xref: /openbsd/gnu/gcc/gcc/function.c (revision 404b540a)
1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* This file handles the generation of rtl code from tree structure
24    at the level of the function as a whole.
25    It creates the rtl expressions for parameters and auto variables
26    and has full responsibility for allocating stack slots.
27 
28    `expand_function_start' is called at the beginning of a function,
29    before the function body is parsed, and `expand_function_end' is
30    called after parsing the body.
31 
32    Call `assign_stack_local' to allocate a stack slot for a local variable.
33    This is usually done during the RTL generation for the function body,
34    but it can also be done in the reload pass when a pseudo-register does
35    not get a hard register.  */
36 
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
66 #include "vecprim.h"
67 
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
71 
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
75 
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80    give the same symbol without quotes for an alternative entry point.  You
81    must define both, or neither.  */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85 
86 /* Round a value to the lowest integer less than it that is a multiple of
87    the required alignment.  Avoid using division in case the value is
88    negative.  Assume the alignment is a power of two.  */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 
91 /* Similar, but round to the next highest integer that meets the
92    alignment.  */
93 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 
95 /* Nonzero if function being compiled doesn't contain any calls
96    (ignoring the prologue and epilogue).  This is set prior to
97    local register allocation and is valid for the remaining
98    compiler passes.  */
99 int current_function_is_leaf;
100 
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102    (ignoring the prologue and epilogue).  This is only valid after
103    life_analysis has run.  */
104 int current_function_sp_is_unchanging;
105 
106 /* Nonzero if the function being compiled is a leaf function which only
107    uses leaf registers.  This is valid after reload (specifically after
108    sched2) and is useful only if the port defines LEAF_REGISTERS.  */
109 int current_function_uses_only_leaf_regs;
110 
111 /* Nonzero once virtual register instantiation has been done.
112    assign_stack_local uses frame_pointer_rtx when this is nonzero.
113    calls.c:emit_library_call_value_1 uses it to set up
114    post-instantiation libcalls.  */
115 int virtuals_instantiated;
116 
117 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
118 static GTY(()) int funcdef_no;
119 
120 /* These variables hold pointers to functions to create and destroy
121    target specific, per-function data structures.  */
122 struct machine_function * (*init_machine_status) (void);
123 
124 /* The currently compiled function.  */
125 struct function *cfun = 0;
126 
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
130 
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132    in this function.  */
133 static VEC(int,heap) *sibcall_epilogue;
134 
135 /* In order to evaluate some expressions, such as function calls returning
136    structures in memory, we need to temporarily allocate stack locations.
137    We record each allocated temporary in the following structure.
138 
139    Associated with each temporary slot is a nesting level.  When we pop up
140    one level, all temporaries associated with the previous level are freed.
141    Normally, all temporaries are freed after the execution of the statement
142    in which they were created.  However, if we are inside a ({...}) grouping,
143    the result may be in a temporary and hence must be preserved.  If the
144    result could be in a temporary, we preserve it if we can determine which
145    one it is in.  If we cannot determine which temporary may contain the
146    result, all temporaries are preserved.  A temporary is preserved by
147    pretending it was allocated at the previous nesting level.
148 
149    Automatic variables are also assigned temporary slots, at the nesting
150    level where they are defined.  They are marked a "kept" so that
151    free_temp_slots will not free them.  */
152 
153 struct temp_slot GTY(())
154 {
155   /* Points to next temporary slot.  */
156   struct temp_slot *next;
157   /* Points to previous temporary slot.  */
158   struct temp_slot *prev;
159 
160   /* The rtx to used to reference the slot.  */
161   rtx slot;
162   /* The rtx used to represent the address if not the address of the
163      slot above.  May be an EXPR_LIST if multiple addresses exist.  */
164   rtx address;
165   /* The alignment (in bits) of the slot.  */
166   unsigned int align;
167   /* The size, in units, of the slot.  */
168   HOST_WIDE_INT size;
169   /* The type of the object in the slot, or zero if it doesn't correspond
170      to a type.  We use this to determine whether a slot can be reused.
171      It can be reused if objects of the type of the new slot will always
172      conflict with objects of the type of the old slot.  */
173   tree type;
174   /* Nonzero if this temporary is currently in use.  */
175   char in_use;
176   /* Nonzero if this temporary has its address taken.  */
177   char addr_taken;
178   /* Nesting level at which this slot is being used.  */
179   int level;
180   /* Nonzero if this should survive a call to free_temp_slots.  */
181   int keep;
182   /* The offset of the slot from the frame_pointer, including extra space
183      for alignment.  This info is for combine_temp_slots.  */
184   HOST_WIDE_INT base_offset;
185   /* The size of the slot, including extra space for alignment.  This
186      info is for combine_temp_slots.  */
187   HOST_WIDE_INT full_size;
188 };
189 
190 /* Forward declarations.  */
191 
192 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
193 				 struct function *);
194 static struct temp_slot *find_temp_slot_from_address (rtx);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
198 static int all_blocks (tree, tree *);
199 static tree *get_block_vector (tree, int *);
200 extern tree debug_find_var_in_block_tree (tree, tree);
201 /* We always define `record_insns' even if it's not used so that we
202    can always export `prologue_epilogue_contains'.  */
203 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
204 static int contains (rtx, VEC(int,heap) **);
205 #ifdef HAVE_return
206 static void emit_return_into_block (basic_block, rtx);
207 #endif
208 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
209 static rtx keep_stack_depressed (rtx);
210 #endif
211 static void prepare_function_start (tree);
212 static void do_clobber_return_reg (rtx, void *);
213 static void do_use_return_reg (rtx, void *);
214 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
215 
216 /* Pointer to chain of `struct function' for containing functions.  */
217 struct function *outer_function_chain;
218 
219 /* Given a function decl for a containing function,
220    return the `struct function' for it.  */
221 
222 struct function *
find_function_data(tree decl)223 find_function_data (tree decl)
224 {
225   struct function *p;
226 
227   for (p = outer_function_chain; p; p = p->outer)
228     if (p->decl == decl)
229       return p;
230 
231   gcc_unreachable ();
232 }
233 
234 /* Save the current context for compilation of a nested function.
235    This is called from language-specific code.  The caller should use
236    the enter_nested langhook to save any language-specific state,
237    since this function knows only about language-independent
238    variables.  */
239 
240 void
push_function_context_to(tree context ATTRIBUTE_UNUSED)241 push_function_context_to (tree context ATTRIBUTE_UNUSED)
242 {
243   struct function *p;
244 
245   if (cfun == 0)
246     init_dummy_function_start ();
247   p = cfun;
248 
249   p->outer = outer_function_chain;
250   outer_function_chain = p;
251 
252   lang_hooks.function.enter_nested (p);
253 
254   cfun = 0;
255 }
256 
257 void
push_function_context(void)258 push_function_context (void)
259 {
260   push_function_context_to (current_function_decl);
261 }
262 
263 /* Restore the last saved context, at the end of a nested function.
264    This function is called from language-specific code.  */
265 
266 void
pop_function_context_from(tree context ATTRIBUTE_UNUSED)267 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
268 {
269   struct function *p = outer_function_chain;
270 
271   cfun = p;
272   outer_function_chain = p->outer;
273 
274   current_function_decl = p->decl;
275 
276   lang_hooks.function.leave_nested (p);
277 
278   /* Reset variables that have known state during rtx generation.  */
279   virtuals_instantiated = 0;
280   generating_concat_p = 1;
281 }
282 
283 void
pop_function_context(void)284 pop_function_context (void)
285 {
286   pop_function_context_from (current_function_decl);
287 }
288 
289 /* Clear out all parts of the state in F that can safely be discarded
290    after the function has been parsed, but not compiled, to let
291    garbage collection reclaim the memory.  */
292 
293 void
free_after_parsing(struct function * f)294 free_after_parsing (struct function *f)
295 {
296   /* f->expr->forced_labels is used by code generation.  */
297   /* f->emit->regno_reg_rtx is used by code generation.  */
298   /* f->varasm is used by code generation.  */
299   /* f->eh->eh_return_stub_label is used by code generation.  */
300 
301   lang_hooks.function.final (f);
302 }
303 
304 /* Clear out all parts of the state in F that can safely be discarded
305    after the function has been compiled, to let garbage collection
306    reclaim the memory.  */
307 
308 void
free_after_compilation(struct function * f)309 free_after_compilation (struct function *f)
310 {
311   VEC_free (int, heap, prologue);
312   VEC_free (int, heap, epilogue);
313   VEC_free (int, heap, sibcall_epilogue);
314 
315   f->eh = NULL;
316   f->expr = NULL;
317   f->emit = NULL;
318   f->varasm = NULL;
319   f->machine = NULL;
320   f->cfg = NULL;
321 
322   f->x_avail_temp_slots = NULL;
323   f->x_used_temp_slots = NULL;
324   f->arg_offset_rtx = NULL;
325   f->return_rtx = NULL;
326   f->internal_arg_pointer = NULL;
327   f->x_nonlocal_goto_handler_labels = NULL;
328   f->x_return_label = NULL;
329   f->x_naked_return_label = NULL;
330   f->x_stack_slot_list = NULL;
331   f->x_stack_check_probe_note = NULL;
332   f->x_arg_pointer_save_area = NULL;
333   f->x_parm_birth_insn = NULL;
334   f->epilogue_delay_list = NULL;
335 }
336 
337 /* Allocate fixed slots in the stack frame of the current function.  */
338 
339 /* Return size needed for stack frame based on slots so far allocated in
340    function F.
341    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
342    the caller may have to do that.  */
343 
344 static HOST_WIDE_INT
get_func_frame_size(struct function * f)345 get_func_frame_size (struct function *f)
346 {
347   if (FRAME_GROWS_DOWNWARD)
348     return -f->x_frame_offset;
349   else
350     return f->x_frame_offset;
351 }
352 
353 /* Return size needed for stack frame based on slots so far allocated.
354    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
355    the caller may have to do that.  */
356 
357 HOST_WIDE_INT
get_frame_size(void)358 get_frame_size (void)
359 {
360   return get_func_frame_size (cfun);
361 }
362 
363 /* Issue an error message and return TRUE if frame OFFSET overflows in
364    the signed target pointer arithmetics for function FUNC.  Otherwise
365    return FALSE.  */
366 
367 bool
frame_offset_overflow(HOST_WIDE_INT offset,tree func)368 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
369 {
370   unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
371 
372   if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
373 	       /* Leave room for the fixed part of the frame.  */
374 	       - 64 * UNITS_PER_WORD)
375     {
376       error ("%Jtotal size of local objects too large", func);
377       return TRUE;
378     }
379 
380   return FALSE;
381 }
382 
383 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
384    with machine mode MODE.
385 
386    ALIGN controls the amount of alignment for the address of the slot:
387    0 means according to MODE,
388    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
389    -2 means use BITS_PER_UNIT,
390    positive specifies alignment boundary in bits.
391 
392    We do not round to stack_boundary here.
393 
394    FUNCTION specifies the function to allocate in.  */
395 
396 static rtx
assign_stack_local_1(enum machine_mode mode,HOST_WIDE_INT size,int align,struct function * function)397 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
398 		      struct function *function)
399 {
400   rtx x, addr;
401   int bigend_correction = 0;
402   unsigned int alignment;
403   int frame_off, frame_alignment, frame_phase;
404 
405   if (align == 0)
406     {
407       tree type;
408 
409       if (mode == BLKmode)
410 	alignment = BIGGEST_ALIGNMENT;
411       else
412 	alignment = GET_MODE_ALIGNMENT (mode);
413 
414       /* Allow the target to (possibly) increase the alignment of this
415 	 stack slot.  */
416       type = lang_hooks.types.type_for_mode (mode, 0);
417       if (type)
418 	alignment = LOCAL_ALIGNMENT (type, alignment);
419 
420       alignment /= BITS_PER_UNIT;
421     }
422   else if (align == -1)
423     {
424       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
425       size = CEIL_ROUND (size, alignment);
426     }
427   else if (align == -2)
428     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
429   else
430     alignment = align / BITS_PER_UNIT;
431 
432   if (FRAME_GROWS_DOWNWARD)
433     function->x_frame_offset -= size;
434 
435   /* Ignore alignment we can't do with expected alignment of the boundary.  */
436   if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
437     alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
438 
439   if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
440     function->stack_alignment_needed = alignment * BITS_PER_UNIT;
441 
442   /* Calculate how many bytes the start of local variables is off from
443      stack alignment.  */
444   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
445   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
446   frame_phase = frame_off ? frame_alignment - frame_off : 0;
447 
448   /* Round the frame offset to the specified alignment.  The default is
449      to always honor requests to align the stack but a port may choose to
450      do its own stack alignment by defining STACK_ALIGNMENT_NEEDED.  */
451   if (STACK_ALIGNMENT_NEEDED
452       || mode != BLKmode
453       || size != 0)
454     {
455       /*  We must be careful here, since FRAME_OFFSET might be negative and
456 	  division with a negative dividend isn't as well defined as we might
457 	  like.  So we instead assume that ALIGNMENT is a power of two and
458 	  use logical operations which are unambiguous.  */
459       if (FRAME_GROWS_DOWNWARD)
460 	function->x_frame_offset
461 	  = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
462 			  (unsigned HOST_WIDE_INT) alignment)
463 	     + frame_phase);
464       else
465 	function->x_frame_offset
466 	  = (CEIL_ROUND (function->x_frame_offset - frame_phase,
467 			 (unsigned HOST_WIDE_INT) alignment)
468 	     + frame_phase);
469     }
470 
471   /* On a big-endian machine, if we are allocating more space than we will use,
472      use the least significant bytes of those that are allocated.  */
473   if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
474     bigend_correction = size - GET_MODE_SIZE (mode);
475 
476   /* If we have already instantiated virtual registers, return the actual
477      address relative to the frame pointer.  */
478   if (function == cfun && virtuals_instantiated)
479     addr = plus_constant (frame_pointer_rtx,
480 			  trunc_int_for_mode
481 			  (frame_offset + bigend_correction
482 			   + STARTING_FRAME_OFFSET, Pmode));
483   else
484     addr = plus_constant (virtual_stack_vars_rtx,
485 			  trunc_int_for_mode
486 			  (function->x_frame_offset + bigend_correction,
487 			   Pmode));
488 
489   if (!FRAME_GROWS_DOWNWARD)
490     function->x_frame_offset += size;
491 
492   x = gen_rtx_MEM (mode, addr);
493   MEM_NOTRAP_P (x) = 1;
494 
495   function->x_stack_slot_list
496     = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
497 
498   if (frame_offset_overflow (function->x_frame_offset, function->decl))
499     function->x_frame_offset = 0;
500 
501   return x;
502 }
503 
504 /* Wrapper around assign_stack_local_1;  assign a local stack slot for the
505    current function.  */
506 
507 rtx
assign_stack_local(enum machine_mode mode,HOST_WIDE_INT size,int align)508 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
509 {
510   return assign_stack_local_1 (mode, size, align, cfun);
511 }
512 
513 
514 /* Removes temporary slot TEMP from LIST.  */
515 
516 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)517 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
518 {
519   if (temp->next)
520     temp->next->prev = temp->prev;
521   if (temp->prev)
522     temp->prev->next = temp->next;
523   else
524     *list = temp->next;
525 
526   temp->prev = temp->next = NULL;
527 }
528 
529 /* Inserts temporary slot TEMP to LIST.  */
530 
531 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)532 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
533 {
534   temp->next = *list;
535   if (*list)
536     (*list)->prev = temp;
537   temp->prev = NULL;
538   *list = temp;
539 }
540 
541 /* Returns the list of used temp slots at LEVEL.  */
542 
543 static struct temp_slot **
temp_slots_at_level(int level)544 temp_slots_at_level (int level)
545 {
546   if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
547     {
548       size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
549       temp_slot_p *p;
550 
551       VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
552       p = VEC_address (temp_slot_p, used_temp_slots);
553       memset (&p[old_length], 0,
554 	      sizeof (temp_slot_p) * (level + 1 - old_length));
555     }
556 
557   return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
558 }
559 
560 /* Returns the maximal temporary slot level.  */
561 
562 static int
max_slot_level(void)563 max_slot_level (void)
564 {
565   if (!used_temp_slots)
566     return -1;
567 
568   return VEC_length (temp_slot_p, used_temp_slots) - 1;
569 }
570 
571 /* Moves temporary slot TEMP to LEVEL.  */
572 
573 static void
move_slot_to_level(struct temp_slot * temp,int level)574 move_slot_to_level (struct temp_slot *temp, int level)
575 {
576   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
577   insert_slot_to_list (temp, temp_slots_at_level (level));
578   temp->level = level;
579 }
580 
581 /* Make temporary slot TEMP available.  */
582 
583 static void
make_slot_available(struct temp_slot * temp)584 make_slot_available (struct temp_slot *temp)
585 {
586   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
587   insert_slot_to_list (temp, &avail_temp_slots);
588   temp->in_use = 0;
589   temp->level = -1;
590 }
591 
592 /* Allocate a temporary stack slot and record it for possible later
593    reuse.
594 
595    MODE is the machine mode to be given to the returned rtx.
596 
597    SIZE is the size in units of the space required.  We do no rounding here
598    since assign_stack_local will do any required rounding.
599 
600    KEEP is 1 if this slot is to be retained after a call to
601    free_temp_slots.  Automatic variables for a block are allocated
602    with this flag.  KEEP values of 2 or 3 were needed respectively
603    for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
604    or for SAVE_EXPRs, but they are now unused.
605 
606    TYPE is the type that will be used for the stack slot.  */
607 
608 rtx
assign_stack_temp_for_type(enum machine_mode mode,HOST_WIDE_INT size,int keep,tree type)609 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
610 			    int keep, tree type)
611 {
612   unsigned int align;
613   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
614   rtx slot;
615 
616   /* If SIZE is -1 it means that somebody tried to allocate a temporary
617      of a variable size.  */
618   gcc_assert (size != -1);
619 
620   /* These are now unused.  */
621   gcc_assert (keep <= 1);
622 
623   if (mode == BLKmode)
624     align = BIGGEST_ALIGNMENT;
625   else
626     align = GET_MODE_ALIGNMENT (mode);
627 
628   if (! type)
629     type = lang_hooks.types.type_for_mode (mode, 0);
630 
631   if (type)
632     align = LOCAL_ALIGNMENT (type, align);
633 
634   /* Try to find an available, already-allocated temporary of the proper
635      mode which meets the size and alignment requirements.  Choose the
636      smallest one with the closest alignment.
637 
638      If assign_stack_temp is called outside of the tree->rtl expansion,
639      we cannot reuse the stack slots (that may still refer to
640      VIRTUAL_STACK_VARS_REGNUM).  */
641   if (!virtuals_instantiated)
642     {
643       for (p = avail_temp_slots; p; p = p->next)
644 	{
645 	  if (p->align >= align && p->size >= size
646 	      && GET_MODE (p->slot) == mode
647 	      && objects_must_conflict_p (p->type, type)
648 	      && (best_p == 0 || best_p->size > p->size
649 		  || (best_p->size == p->size && best_p->align > p->align)))
650 	    {
651 	      if (p->align == align && p->size == size)
652 		{
653 		  selected = p;
654 		  cut_slot_from_list (selected, &avail_temp_slots);
655 		  best_p = 0;
656 		  break;
657 		}
658 	      best_p = p;
659 	    }
660 	}
661     }
662 
663   /* Make our best, if any, the one to use.  */
664   if (best_p)
665     {
666       selected = best_p;
667       cut_slot_from_list (selected, &avail_temp_slots);
668 
669       /* If there are enough aligned bytes left over, make them into a new
670 	 temp_slot so that the extra bytes don't get wasted.  Do this only
671 	 for BLKmode slots, so that we can be sure of the alignment.  */
672       if (GET_MODE (best_p->slot) == BLKmode)
673 	{
674 	  int alignment = best_p->align / BITS_PER_UNIT;
675 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
676 
677 	  if (best_p->size - rounded_size >= alignment)
678 	    {
679 	      p = ggc_alloc (sizeof (struct temp_slot));
680 	      p->in_use = p->addr_taken = 0;
681 	      p->size = best_p->size - rounded_size;
682 	      p->base_offset = best_p->base_offset + rounded_size;
683 	      p->full_size = best_p->full_size - rounded_size;
684 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
685 	      p->align = best_p->align;
686 	      p->address = 0;
687 	      p->type = best_p->type;
688 	      insert_slot_to_list (p, &avail_temp_slots);
689 
690 	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
691 						   stack_slot_list);
692 
693 	      best_p->size = rounded_size;
694 	      best_p->full_size = rounded_size;
695 	    }
696 	}
697     }
698 
699   /* If we still didn't find one, make a new temporary.  */
700   if (selected == 0)
701     {
702       HOST_WIDE_INT frame_offset_old = frame_offset;
703 
704       p = ggc_alloc (sizeof (struct temp_slot));
705 
706       /* We are passing an explicit alignment request to assign_stack_local.
707 	 One side effect of that is assign_stack_local will not round SIZE
708 	 to ensure the frame offset remains suitably aligned.
709 
710 	 So for requests which depended on the rounding of SIZE, we go ahead
711 	 and round it now.  We also make sure ALIGNMENT is at least
712 	 BIGGEST_ALIGNMENT.  */
713       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
714       p->slot = assign_stack_local (mode,
715 				    (mode == BLKmode
716 				     ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
717 				     : size),
718 				    align);
719 
720       p->align = align;
721 
722       /* The following slot size computation is necessary because we don't
723 	 know the actual size of the temporary slot until assign_stack_local
724 	 has performed all the frame alignment and size rounding for the
725 	 requested temporary.  Note that extra space added for alignment
726 	 can be either above or below this stack slot depending on which
727 	 way the frame grows.  We include the extra space if and only if it
728 	 is above this slot.  */
729       if (FRAME_GROWS_DOWNWARD)
730 	p->size = frame_offset_old - frame_offset;
731       else
732 	p->size = size;
733 
734       /* Now define the fields used by combine_temp_slots.  */
735       if (FRAME_GROWS_DOWNWARD)
736 	{
737 	  p->base_offset = frame_offset;
738 	  p->full_size = frame_offset_old - frame_offset;
739 	}
740       else
741 	{
742 	  p->base_offset = frame_offset_old;
743 	  p->full_size = frame_offset - frame_offset_old;
744 	}
745       p->address = 0;
746 
747       selected = p;
748     }
749 
750   p = selected;
751   p->in_use = 1;
752   p->addr_taken = 0;
753   p->type = type;
754   p->level = temp_slot_level;
755   p->keep = keep;
756 
757   pp = temp_slots_at_level (p->level);
758   insert_slot_to_list (p, pp);
759 
760   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
761   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
762   stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
763 
764   /* If we know the alias set for the memory that will be used, use
765      it.  If there's no TYPE, then we don't know anything about the
766      alias set for the memory.  */
767   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
768   set_mem_align (slot, align);
769 
770   /* If a type is specified, set the relevant flags.  */
771   if (type != 0)
772     {
773       MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
774       MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
775     }
776   MEM_NOTRAP_P (slot) = 1;
777 
778   return slot;
779 }
780 
781 /* Allocate a temporary stack slot and record it for possible later
782    reuse.  First three arguments are same as in preceding function.  */
783 
784 rtx
assign_stack_temp(enum machine_mode mode,HOST_WIDE_INT size,int keep)785 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
786 {
787   return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
788 }
789 
790 /* Assign a temporary.
791    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
792    and so that should be used in error messages.  In either case, we
793    allocate of the given type.
794    KEEP is as for assign_stack_temp.
795    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
796    it is 0 if a register is OK.
797    DONT_PROMOTE is 1 if we should not promote values in register
798    to wider modes.  */
799 
800 rtx
assign_temp(tree type_or_decl,int keep,int memory_required,int dont_promote ATTRIBUTE_UNUSED)801 assign_temp (tree type_or_decl, int keep, int memory_required,
802 	     int dont_promote ATTRIBUTE_UNUSED)
803 {
804   tree type, decl;
805   enum machine_mode mode;
806 #ifdef PROMOTE_MODE
807   int unsignedp;
808 #endif
809 
810   if (DECL_P (type_or_decl))
811     decl = type_or_decl, type = TREE_TYPE (decl);
812   else
813     decl = NULL, type = type_or_decl;
814 
815   mode = TYPE_MODE (type);
816 #ifdef PROMOTE_MODE
817   unsignedp = TYPE_UNSIGNED (type);
818 #endif
819 
820   if (mode == BLKmode || memory_required)
821     {
822       HOST_WIDE_INT size = int_size_in_bytes (type);
823       rtx tmp;
824 
825       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
826 	 problems with allocating the stack space.  */
827       if (size == 0)
828 	size = 1;
829 
830       /* Unfortunately, we don't yet know how to allocate variable-sized
831 	 temporaries.  However, sometimes we can find a fixed upper limit on
832 	 the size, so try that instead.  */
833       else if (size == -1)
834 	size = max_int_size_in_bytes (type);
835 
836       /* The size of the temporary may be too large to fit into an integer.  */
837       /* ??? Not sure this should happen except for user silliness, so limit
838 	 this to things that aren't compiler-generated temporaries.  The
839 	 rest of the time we'll die in assign_stack_temp_for_type.  */
840       if (decl && size == -1
841 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
842 	{
843 	  error ("size of variable %q+D is too large", decl);
844 	  size = 1;
845 	}
846 
847       tmp = assign_stack_temp_for_type (mode, size, keep, type);
848       return tmp;
849     }
850 
851 #ifdef PROMOTE_MODE
852   if (! dont_promote)
853     mode = promote_mode (type, mode, &unsignedp, 0);
854 #endif
855 
856   return gen_reg_rtx (mode);
857 }
858 
859 /* Combine temporary stack slots which are adjacent on the stack.
860 
861    This allows for better use of already allocated stack space.  This is only
862    done for BLKmode slots because we can be sure that we won't have alignment
863    problems in this case.  */
864 
865 static void
combine_temp_slots(void)866 combine_temp_slots (void)
867 {
868   struct temp_slot *p, *q, *next, *next_q;
869   int num_slots;
870 
871   /* We can't combine slots, because the information about which slot
872      is in which alias set will be lost.  */
873   if (flag_strict_aliasing)
874     return;
875 
876   /* If there are a lot of temp slots, don't do anything unless
877      high levels of optimization.  */
878   if (! flag_expensive_optimizations)
879     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
880       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
881 	return;
882 
883   for (p = avail_temp_slots; p; p = next)
884     {
885       int delete_p = 0;
886 
887       next = p->next;
888 
889       if (GET_MODE (p->slot) != BLKmode)
890 	continue;
891 
892       for (q = p->next; q; q = next_q)
893 	{
894        	  int delete_q = 0;
895 
896 	  next_q = q->next;
897 
898 	  if (GET_MODE (q->slot) != BLKmode)
899 	    continue;
900 
901 	  if (p->base_offset + p->full_size == q->base_offset)
902 	    {
903 	      /* Q comes after P; combine Q into P.  */
904 	      p->size += q->size;
905 	      p->full_size += q->full_size;
906 	      delete_q = 1;
907 	    }
908 	  else if (q->base_offset + q->full_size == p->base_offset)
909 	    {
910 	      /* P comes after Q; combine P into Q.  */
911 	      q->size += p->size;
912 	      q->full_size += p->full_size;
913 	      delete_p = 1;
914 	      break;
915 	    }
916 	  if (delete_q)
917 	    cut_slot_from_list (q, &avail_temp_slots);
918 	}
919 
920       /* Either delete P or advance past it.  */
921       if (delete_p)
922 	cut_slot_from_list (p, &avail_temp_slots);
923     }
924 }
925 
926 /* Find the temp slot corresponding to the object at address X.  */
927 
928 static struct temp_slot *
find_temp_slot_from_address(rtx x)929 find_temp_slot_from_address (rtx x)
930 {
931   struct temp_slot *p;
932   rtx next;
933   int i;
934 
935   for (i = max_slot_level (); i >= 0; i--)
936     for (p = *temp_slots_at_level (i); p; p = p->next)
937       {
938 	if (XEXP (p->slot, 0) == x
939 	    || p->address == x
940 	    || (GET_CODE (x) == PLUS
941 		&& XEXP (x, 0) == virtual_stack_vars_rtx
942 		&& GET_CODE (XEXP (x, 1)) == CONST_INT
943 		&& INTVAL (XEXP (x, 1)) >= p->base_offset
944 		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
945 	  return p;
946 
947 	else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
948 	  for (next = p->address; next; next = XEXP (next, 1))
949 	    if (XEXP (next, 0) == x)
950 	      return p;
951       }
952 
953   /* If we have a sum involving a register, see if it points to a temp
954      slot.  */
955   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
956       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
957     return p;
958   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
959 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
960     return p;
961 
962   return 0;
963 }
964 
965 /* Indicate that NEW is an alternate way of referring to the temp slot
966    that previously was known by OLD.  */
967 
968 void
update_temp_slot_address(rtx old,rtx new)969 update_temp_slot_address (rtx old, rtx new)
970 {
971   struct temp_slot *p;
972 
973   if (rtx_equal_p (old, new))
974     return;
975 
976   p = find_temp_slot_from_address (old);
977 
978   /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
979      is a register, see if one operand of the PLUS is a temporary
980      location.  If so, NEW points into it.  Otherwise, if both OLD and
981      NEW are a PLUS and if there is a register in common between them.
982      If so, try a recursive call on those values.  */
983   if (p == 0)
984     {
985       if (GET_CODE (old) != PLUS)
986 	return;
987 
988       if (REG_P (new))
989 	{
990 	  update_temp_slot_address (XEXP (old, 0), new);
991 	  update_temp_slot_address (XEXP (old, 1), new);
992 	  return;
993 	}
994       else if (GET_CODE (new) != PLUS)
995 	return;
996 
997       if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
998 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
999       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1000 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1001       else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1002 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1003       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1004 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1005 
1006       return;
1007     }
1008 
1009   /* Otherwise add an alias for the temp's address.  */
1010   else if (p->address == 0)
1011     p->address = new;
1012   else
1013     {
1014       if (GET_CODE (p->address) != EXPR_LIST)
1015 	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1016 
1017       p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1018     }
1019 }
1020 
1021 /* If X could be a reference to a temporary slot, mark the fact that its
1022    address was taken.  */
1023 
1024 void
mark_temp_addr_taken(rtx x)1025 mark_temp_addr_taken (rtx x)
1026 {
1027   struct temp_slot *p;
1028 
1029   if (x == 0)
1030     return;
1031 
1032   /* If X is not in memory or is at a constant address, it cannot be in
1033      a temporary slot.  */
1034   if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1035     return;
1036 
1037   p = find_temp_slot_from_address (XEXP (x, 0));
1038   if (p != 0)
1039     p->addr_taken = 1;
1040 }
1041 
1042 /* If X could be a reference to a temporary slot, mark that slot as
1043    belonging to the to one level higher than the current level.  If X
1044    matched one of our slots, just mark that one.  Otherwise, we can't
1045    easily predict which it is, so upgrade all of them.  Kept slots
1046    need not be touched.
1047 
1048    This is called when an ({...}) construct occurs and a statement
1049    returns a value in memory.  */
1050 
1051 void
preserve_temp_slots(rtx x)1052 preserve_temp_slots (rtx x)
1053 {
1054   struct temp_slot *p = 0, *next;
1055 
1056   /* If there is no result, we still might have some objects whose address
1057      were taken, so we need to make sure they stay around.  */
1058   if (x == 0)
1059     {
1060       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1061 	{
1062 	  next = p->next;
1063 
1064 	  if (p->addr_taken)
1065 	    move_slot_to_level (p, temp_slot_level - 1);
1066 	}
1067 
1068       return;
1069     }
1070 
1071   /* If X is a register that is being used as a pointer, see if we have
1072      a temporary slot we know it points to.  To be consistent with
1073      the code below, we really should preserve all non-kept slots
1074      if we can't find a match, but that seems to be much too costly.  */
1075   if (REG_P (x) && REG_POINTER (x))
1076     p = find_temp_slot_from_address (x);
1077 
1078   /* If X is not in memory or is at a constant address, it cannot be in
1079      a temporary slot, but it can contain something whose address was
1080      taken.  */
1081   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1082     {
1083       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1084 	{
1085 	  next = p->next;
1086 
1087 	  if (p->addr_taken)
1088 	    move_slot_to_level (p, temp_slot_level - 1);
1089 	}
1090 
1091       return;
1092     }
1093 
1094   /* First see if we can find a match.  */
1095   if (p == 0)
1096     p = find_temp_slot_from_address (XEXP (x, 0));
1097 
1098   if (p != 0)
1099     {
1100       /* Move everything at our level whose address was taken to our new
1101 	 level in case we used its address.  */
1102       struct temp_slot *q;
1103 
1104       if (p->level == temp_slot_level)
1105 	{
1106 	  for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1107 	    {
1108 	      next = q->next;
1109 
1110 	      if (p != q && q->addr_taken)
1111 		move_slot_to_level (q, temp_slot_level - 1);
1112 	    }
1113 
1114 	  move_slot_to_level (p, temp_slot_level - 1);
1115 	  p->addr_taken = 0;
1116 	}
1117       return;
1118     }
1119 
1120   /* Otherwise, preserve all non-kept slots at this level.  */
1121   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1122     {
1123       next = p->next;
1124 
1125       if (!p->keep)
1126 	move_slot_to_level (p, temp_slot_level - 1);
1127     }
1128 }
1129 
1130 /* Free all temporaries used so far.  This is normally called at the
1131    end of generating code for a statement.  */
1132 
1133 void
free_temp_slots(void)1134 free_temp_slots (void)
1135 {
1136   struct temp_slot *p, *next;
1137 
1138   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1139     {
1140       next = p->next;
1141 
1142       if (!p->keep)
1143 	make_slot_available (p);
1144     }
1145 
1146   combine_temp_slots ();
1147 }
1148 
1149 /* Push deeper into the nesting level for stack temporaries.  */
1150 
1151 void
push_temp_slots(void)1152 push_temp_slots (void)
1153 {
1154   temp_slot_level++;
1155 }
1156 
1157 /* Pop a temporary nesting level.  All slots in use in the current level
1158    are freed.  */
1159 
1160 void
pop_temp_slots(void)1161 pop_temp_slots (void)
1162 {
1163   struct temp_slot *p, *next;
1164 
1165   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1166     {
1167       next = p->next;
1168       make_slot_available (p);
1169     }
1170 
1171   combine_temp_slots ();
1172 
1173   temp_slot_level--;
1174 }
1175 
1176 /* Initialize temporary slots.  */
1177 
1178 void
init_temp_slots(void)1179 init_temp_slots (void)
1180 {
1181   /* We have not allocated any temporaries yet.  */
1182   avail_temp_slots = 0;
1183   used_temp_slots = 0;
1184   temp_slot_level = 0;
1185 }
1186 
1187 /* These routines are responsible for converting virtual register references
1188    to the actual hard register references once RTL generation is complete.
1189 
1190    The following four variables are used for communication between the
1191    routines.  They contain the offsets of the virtual registers from their
1192    respective hard registers.  */
1193 
1194 static int in_arg_offset;
1195 static int var_offset;
1196 static int dynamic_offset;
1197 static int out_arg_offset;
1198 static int cfa_offset;
1199 
1200 /* In most machines, the stack pointer register is equivalent to the bottom
1201    of the stack.  */
1202 
1203 #ifndef STACK_POINTER_OFFSET
1204 #define STACK_POINTER_OFFSET	0
1205 #endif
1206 
1207 /* If not defined, pick an appropriate default for the offset of dynamically
1208    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1209    REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1210 
1211 #ifndef STACK_DYNAMIC_OFFSET
1212 
1213 /* The bottom of the stack points to the actual arguments.  If
1214    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1215    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1216    stack space for register parameters is not pushed by the caller, but
1217    rather part of the fixed stack areas and hence not included in
1218    `current_function_outgoing_args_size'.  Nevertheless, we must allow
1219    for it when allocating stack dynamic objects.  */
1220 
1221 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1222 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1223 ((ACCUMULATE_OUTGOING_ARGS						      \
1224   ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1225  + (STACK_POINTER_OFFSET))						      \
1226 
1227 #else
1228 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1229 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)	      \
1230  + (STACK_POINTER_OFFSET))
1231 #endif
1232 #endif
1233 
1234 
1235 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1236    is a virtual register, return the equivalent hard register and set the
1237    offset indirectly through the pointer.  Otherwise, return 0.  */
1238 
1239 static rtx
instantiate_new_reg(rtx x,HOST_WIDE_INT * poffset)1240 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1241 {
1242   rtx new;
1243   HOST_WIDE_INT offset;
1244 
1245   if (x == virtual_incoming_args_rtx)
1246     new = arg_pointer_rtx, offset = in_arg_offset;
1247   else if (x == virtual_stack_vars_rtx)
1248     new = frame_pointer_rtx, offset = var_offset;
1249   else if (x == virtual_stack_dynamic_rtx)
1250     new = stack_pointer_rtx, offset = dynamic_offset;
1251   else if (x == virtual_outgoing_args_rtx)
1252     new = stack_pointer_rtx, offset = out_arg_offset;
1253   else if (x == virtual_cfa_rtx)
1254     {
1255 #ifdef FRAME_POINTER_CFA_OFFSET
1256       new = frame_pointer_rtx;
1257 #else
1258       new = arg_pointer_rtx;
1259 #endif
1260       offset = cfa_offset;
1261     }
1262   else
1263     return NULL_RTX;
1264 
1265   *poffset = offset;
1266   return new;
1267 }
1268 
1269 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1270    Instantiate any virtual registers present inside of *LOC.  The expression
1271    is simplified, as much as possible, but is not to be considered "valid"
1272    in any sense implied by the target.  If any change is made, set CHANGED
1273    to true.  */
1274 
1275 static int
instantiate_virtual_regs_in_rtx(rtx * loc,void * data)1276 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1277 {
1278   HOST_WIDE_INT offset;
1279   bool *changed = (bool *) data;
1280   rtx x, new;
1281 
1282   x = *loc;
1283   if (x == 0)
1284     return 0;
1285 
1286   switch (GET_CODE (x))
1287     {
1288     case REG:
1289       new = instantiate_new_reg (x, &offset);
1290       if (new)
1291 	{
1292 	  *loc = plus_constant (new, offset);
1293 	  if (changed)
1294 	    *changed = true;
1295 	}
1296       return -1;
1297 
1298     case PLUS:
1299       new = instantiate_new_reg (XEXP (x, 0), &offset);
1300       if (new)
1301 	{
1302 	  new = plus_constant (new, offset);
1303 	  *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1304 	  if (changed)
1305 	    *changed = true;
1306 	  return -1;
1307 	}
1308 
1309       /* FIXME -- from old code */
1310 	  /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1311 	     we can commute the PLUS and SUBREG because pointers into the
1312 	     frame are well-behaved.  */
1313       break;
1314 
1315     default:
1316       break;
1317     }
1318 
1319   return 0;
1320 }
1321 
1322 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1323    matches the predicate for insn CODE operand OPERAND.  */
1324 
1325 static int
safe_insn_predicate(int code,int operand,rtx x)1326 safe_insn_predicate (int code, int operand, rtx x)
1327 {
1328   const struct insn_operand_data *op_data;
1329 
1330   if (code < 0)
1331     return true;
1332 
1333   op_data = &insn_data[code].operand[operand];
1334   if (op_data->predicate == NULL)
1335     return true;
1336 
1337   return op_data->predicate (x, op_data->mode);
1338 }
1339 
1340 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1341    registers present inside of insn.  The result will be a valid insn.  */
1342 
1343 static void
instantiate_virtual_regs_in_insn(rtx insn)1344 instantiate_virtual_regs_in_insn (rtx insn)
1345 {
1346   HOST_WIDE_INT offset;
1347   int insn_code, i;
1348   bool any_change = false;
1349   rtx set, new, x, seq;
1350 
1351   /* There are some special cases to be handled first.  */
1352   set = single_set (insn);
1353   if (set)
1354     {
1355       /* We're allowed to assign to a virtual register.  This is interpreted
1356 	 to mean that the underlying register gets assigned the inverse
1357 	 transformation.  This is used, for example, in the handling of
1358 	 non-local gotos.  */
1359       new = instantiate_new_reg (SET_DEST (set), &offset);
1360       if (new)
1361 	{
1362 	  start_sequence ();
1363 
1364 	  for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1365 	  x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1366 				   GEN_INT (-offset));
1367 	  x = force_operand (x, new);
1368 	  if (x != new)
1369 	    emit_move_insn (new, x);
1370 
1371 	  seq = get_insns ();
1372 	  end_sequence ();
1373 
1374 	  emit_insn_before (seq, insn);
1375 	  delete_insn (insn);
1376 	  return;
1377 	}
1378 
1379       /* Handle a straight copy from a virtual register by generating a
1380 	 new add insn.  The difference between this and falling through
1381 	 to the generic case is avoiding a new pseudo and eliminating a
1382 	 move insn in the initial rtl stream.  */
1383       new = instantiate_new_reg (SET_SRC (set), &offset);
1384       if (new && offset != 0
1385 	  && REG_P (SET_DEST (set))
1386 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1387 	{
1388 	  start_sequence ();
1389 
1390 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1391 				   new, GEN_INT (offset), SET_DEST (set),
1392 				   1, OPTAB_LIB_WIDEN);
1393 	  if (x != SET_DEST (set))
1394 	    emit_move_insn (SET_DEST (set), x);
1395 
1396 	  seq = get_insns ();
1397 	  end_sequence ();
1398 
1399 	  emit_insn_before (seq, insn);
1400 	  delete_insn (insn);
1401 	  return;
1402 	}
1403 
1404       extract_insn (insn);
1405       insn_code = INSN_CODE (insn);
1406 
1407       /* Handle a plus involving a virtual register by determining if the
1408 	 operands remain valid if they're modified in place.  */
1409       if (GET_CODE (SET_SRC (set)) == PLUS
1410 	  && recog_data.n_operands >= 3
1411 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1412 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1413 	  && GET_CODE (recog_data.operand[2]) == CONST_INT
1414 	  && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1415 	{
1416 	  offset += INTVAL (recog_data.operand[2]);
1417 
1418 	  /* If the sum is zero, then replace with a plain move.  */
1419 	  if (offset == 0
1420 	      && REG_P (SET_DEST (set))
1421 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1422 	    {
1423 	      start_sequence ();
1424 	      emit_move_insn (SET_DEST (set), new);
1425 	      seq = get_insns ();
1426 	      end_sequence ();
1427 
1428 	      emit_insn_before (seq, insn);
1429 	      delete_insn (insn);
1430 	      return;
1431 	    }
1432 
1433 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1434 
1435 	  /* Using validate_change and apply_change_group here leaves
1436 	     recog_data in an invalid state.  Since we know exactly what
1437 	     we want to check, do those two by hand.  */
1438 	  if (safe_insn_predicate (insn_code, 1, new)
1439 	      && safe_insn_predicate (insn_code, 2, x))
1440 	    {
1441 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1442 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1443 	      any_change = true;
1444 
1445 	      /* Fall through into the regular operand fixup loop in
1446 		 order to take care of operands other than 1 and 2.  */
1447 	    }
1448 	}
1449     }
1450   else
1451     {
1452       extract_insn (insn);
1453       insn_code = INSN_CODE (insn);
1454     }
1455 
1456   /* In the general case, we expect virtual registers to appear only in
1457      operands, and then only as either bare registers or inside memories.  */
1458   for (i = 0; i < recog_data.n_operands; ++i)
1459     {
1460       x = recog_data.operand[i];
1461       switch (GET_CODE (x))
1462 	{
1463 	case MEM:
1464 	  {
1465 	    rtx addr = XEXP (x, 0);
1466 	    bool changed = false;
1467 
1468 	    for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1469 	    if (!changed)
1470 	      continue;
1471 
1472 	    start_sequence ();
1473 	    x = replace_equiv_address (x, addr);
1474 	    seq = get_insns ();
1475 	    end_sequence ();
1476 	    if (seq)
1477 	      emit_insn_before (seq, insn);
1478 	  }
1479 	  break;
1480 
1481 	case REG:
1482 	  new = instantiate_new_reg (x, &offset);
1483 	  if (new == NULL)
1484 	    continue;
1485 	  if (offset == 0)
1486 	    x = new;
1487 	  else
1488 	    {
1489 	      start_sequence ();
1490 
1491 	      /* Careful, special mode predicates may have stuff in
1492 		 insn_data[insn_code].operand[i].mode that isn't useful
1493 		 to us for computing a new value.  */
1494 	      /* ??? Recognize address_operand and/or "p" constraints
1495 		 to see if (plus new offset) is a valid before we put
1496 		 this through expand_simple_binop.  */
1497 	      x = expand_simple_binop (GET_MODE (x), PLUS, new,
1498 				       GEN_INT (offset), NULL_RTX,
1499 				       1, OPTAB_LIB_WIDEN);
1500 	      seq = get_insns ();
1501 	      end_sequence ();
1502 	      emit_insn_before (seq, insn);
1503 	    }
1504 	  break;
1505 
1506 	case SUBREG:
1507 	  new = instantiate_new_reg (SUBREG_REG (x), &offset);
1508 	  if (new == NULL)
1509 	    continue;
1510 	  if (offset != 0)
1511 	    {
1512 	      start_sequence ();
1513 	      new = expand_simple_binop (GET_MODE (new), PLUS, new,
1514 					 GEN_INT (offset), NULL_RTX,
1515 					 1, OPTAB_LIB_WIDEN);
1516 	      seq = get_insns ();
1517 	      end_sequence ();
1518 	      emit_insn_before (seq, insn);
1519 	    }
1520 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1521 				   GET_MODE (new), SUBREG_BYTE (x));
1522 	  break;
1523 
1524 	default:
1525 	  continue;
1526 	}
1527 
1528       /* At this point, X contains the new value for the operand.
1529 	 Validate the new value vs the insn predicate.  Note that
1530 	 asm insns will have insn_code -1 here.  */
1531       if (!safe_insn_predicate (insn_code, i, x))
1532 	{
1533 	  start_sequence ();
1534 	  x = force_reg (insn_data[insn_code].operand[i].mode, x);
1535 	  seq = get_insns ();
1536 	  end_sequence ();
1537 	  if (seq)
1538 	    emit_insn_before (seq, insn);
1539 	}
1540 
1541       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1542       any_change = true;
1543     }
1544 
1545   if (any_change)
1546     {
1547       /* Propagate operand changes into the duplicates.  */
1548       for (i = 0; i < recog_data.n_dups; ++i)
1549 	*recog_data.dup_loc[i]
1550 	  = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1551 
1552       /* Force re-recognition of the instruction for validation.  */
1553       INSN_CODE (insn) = -1;
1554     }
1555 
1556   if (asm_noperands (PATTERN (insn)) >= 0)
1557     {
1558       if (!check_asm_operands (PATTERN (insn)))
1559 	{
1560 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1561 	  delete_insn (insn);
1562 	}
1563     }
1564   else
1565     {
1566       if (recog_memoized (insn) < 0)
1567 	fatal_insn_not_found (insn);
1568     }
1569 }
1570 
1571 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1572    do any instantiation required.  */
1573 
1574 static void
instantiate_decl(rtx x)1575 instantiate_decl (rtx x)
1576 {
1577   rtx addr;
1578 
1579   if (x == 0)
1580     return;
1581 
1582   /* If this is a CONCAT, recurse for the pieces.  */
1583   if (GET_CODE (x) == CONCAT)
1584     {
1585       instantiate_decl (XEXP (x, 0));
1586       instantiate_decl (XEXP (x, 1));
1587       return;
1588     }
1589 
1590   /* If this is not a MEM, no need to do anything.  Similarly if the
1591      address is a constant or a register that is not a virtual register.  */
1592   if (!MEM_P (x))
1593     return;
1594 
1595   addr = XEXP (x, 0);
1596   if (CONSTANT_P (addr)
1597       || (REG_P (addr)
1598 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1599 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1600     return;
1601 
1602   for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1603 }
1604 
1605 /* Helper for instantiate_decls called via walk_tree: Process all decls
1606    in the given DECL_VALUE_EXPR.  */
1607 
1608 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1609 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1610 {
1611   tree t = *tp;
1612   if (! EXPR_P (t))
1613     {
1614       *walk_subtrees = 0;
1615       if (DECL_P (t) && DECL_RTL_SET_P (t))
1616 	instantiate_decl (DECL_RTL (t));
1617     }
1618   return NULL;
1619 }
1620 
1621 /* Subroutine of instantiate_decls: Process all decls in the given
1622    BLOCK node and all its subblocks.  */
1623 
1624 static void
instantiate_decls_1(tree let)1625 instantiate_decls_1 (tree let)
1626 {
1627   tree t;
1628 
1629   for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1630     {
1631       if (DECL_RTL_SET_P (t))
1632 	instantiate_decl (DECL_RTL (t));
1633       if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1634 	{
1635 	  tree v = DECL_VALUE_EXPR (t);
1636 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1637 	}
1638     }
1639 
1640   /* Process all subblocks.  */
1641   for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1642     instantiate_decls_1 (t);
1643 }
1644 
1645 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1646    all virtual registers in their DECL_RTL's.  */
1647 
1648 static void
instantiate_decls(tree fndecl)1649 instantiate_decls (tree fndecl)
1650 {
1651   tree decl;
1652 
1653   /* Process all parameters of the function.  */
1654   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1655     {
1656       instantiate_decl (DECL_RTL (decl));
1657       instantiate_decl (DECL_INCOMING_RTL (decl));
1658       if (DECL_HAS_VALUE_EXPR_P (decl))
1659 	{
1660 	  tree v = DECL_VALUE_EXPR (decl);
1661 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1662 	}
1663     }
1664 
1665   /* Now process all variables defined in the function or its subblocks.  */
1666   instantiate_decls_1 (DECL_INITIAL (fndecl));
1667 }
1668 
1669 /* Pass through the INSNS of function FNDECL and convert virtual register
1670    references to hard register references.  */
1671 
1672 static unsigned int
instantiate_virtual_regs(void)1673 instantiate_virtual_regs (void)
1674 {
1675   rtx insn;
1676 
1677   /* Compute the offsets to use for this function.  */
1678   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1679   var_offset = STARTING_FRAME_OFFSET;
1680   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1681   out_arg_offset = STACK_POINTER_OFFSET;
1682 #ifdef FRAME_POINTER_CFA_OFFSET
1683   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1684 #else
1685   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1686 #endif
1687 
1688   /* Initialize recognition, indicating that volatile is OK.  */
1689   init_recog ();
1690 
1691   /* Scan through all the insns, instantiating every virtual register still
1692      present.  */
1693   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1694     if (INSN_P (insn))
1695       {
1696 	/* These patterns in the instruction stream can never be recognized.
1697 	   Fortunately, they shouldn't contain virtual registers either.  */
1698 	if (GET_CODE (PATTERN (insn)) == USE
1699 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1700 	    || GET_CODE (PATTERN (insn)) == ADDR_VEC
1701 	    || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1702 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1703 	  continue;
1704 
1705 	instantiate_virtual_regs_in_insn (insn);
1706 
1707 	if (INSN_DELETED_P (insn))
1708 	  continue;
1709 
1710 	for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1711 
1712 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1713 	if (GET_CODE (insn) == CALL_INSN)
1714 	  for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1715 			instantiate_virtual_regs_in_rtx, NULL);
1716       }
1717 
1718   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1719   instantiate_decls (current_function_decl);
1720 
1721   /* Indicate that, from now on, assign_stack_local should use
1722      frame_pointer_rtx.  */
1723   virtuals_instantiated = 1;
1724   return 0;
1725 }
1726 
1727 struct tree_opt_pass pass_instantiate_virtual_regs =
1728 {
1729   "vregs",                              /* name */
1730   NULL,                                 /* gate */
1731   instantiate_virtual_regs,             /* execute */
1732   NULL,                                 /* sub */
1733   NULL,                                 /* next */
1734   0,                                    /* static_pass_number */
1735   0,                                    /* tv_id */
1736   0,                                    /* properties_required */
1737   0,                                    /* properties_provided */
1738   0,                                    /* properties_destroyed */
1739   0,                                    /* todo_flags_start */
1740   TODO_dump_func,                       /* todo_flags_finish */
1741   0                                     /* letter */
1742 };
1743 
1744 
1745 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1746    This means a type for which function calls must pass an address to the
1747    function or get an address back from the function.
1748    EXP may be a type node or an expression (whose type is tested).  */
1749 
1750 int
aggregate_value_p(tree exp,tree fntype)1751 aggregate_value_p (tree exp, tree fntype)
1752 {
1753   int i, regno, nregs;
1754   rtx reg;
1755 
1756   tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1757 
1758   /* DECL node associated with FNTYPE when relevant, which we might need to
1759      check for by-invisible-reference returns, typically for CALL_EXPR input
1760      EXPressions.  */
1761   tree fndecl = NULL_TREE;
1762 
1763   if (fntype)
1764     switch (TREE_CODE (fntype))
1765       {
1766       case CALL_EXPR:
1767 	fndecl = get_callee_fndecl (fntype);
1768 	fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1769 	break;
1770       case FUNCTION_DECL:
1771 	fndecl = fntype;
1772 	fntype = TREE_TYPE (fndecl);
1773 	break;
1774       case FUNCTION_TYPE:
1775       case METHOD_TYPE:
1776         break;
1777       case IDENTIFIER_NODE:
1778 	fntype = 0;
1779 	break;
1780       default:
1781 	/* We don't expect other rtl types here.  */
1782 	gcc_unreachable ();
1783       }
1784 
1785   if (TREE_CODE (type) == VOID_TYPE)
1786     return 0;
1787 
1788   /* If the front end has decided that this needs to be passed by
1789      reference, do so.  */
1790   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1791       && DECL_BY_REFERENCE (exp))
1792     return 1;
1793 
1794   /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1795      called function RESULT_DECL, meaning the function returns in memory by
1796      invisible reference.  This check lets front-ends not set TREE_ADDRESSABLE
1797      on the function type, which used to be the way to request such a return
1798      mechanism but might now be causing troubles at gimplification time if
1799      temporaries with the function type need to be created.  */
1800   if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1801       && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1802     return 1;
1803 
1804   if (targetm.calls.return_in_memory (type, fntype))
1805     return 1;
1806   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1807      and thus can't be returned in registers.  */
1808   if (TREE_ADDRESSABLE (type))
1809     return 1;
1810   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1811     return 1;
1812   /* Make sure we have suitable call-clobbered regs to return
1813      the value in; if not, we must return it in memory.  */
1814   reg = hard_function_value (type, 0, fntype, 0);
1815 
1816   /* If we have something other than a REG (e.g. a PARALLEL), then assume
1817      it is OK.  */
1818   if (!REG_P (reg))
1819     return 0;
1820 
1821   regno = REGNO (reg);
1822   nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1823   for (i = 0; i < nregs; i++)
1824     if (! call_used_regs[regno + i])
1825       return 1;
1826   return 0;
1827 }
1828 
1829 /* Return true if we should assign DECL a pseudo register; false if it
1830    should live on the local stack.  */
1831 
1832 bool
use_register_for_decl(tree decl)1833 use_register_for_decl (tree decl)
1834 {
1835   /* Honor volatile.  */
1836   if (TREE_SIDE_EFFECTS (decl))
1837     return false;
1838 
1839   /* Honor addressability.  */
1840   if (TREE_ADDRESSABLE (decl))
1841     return false;
1842 
1843   /* Only register-like things go in registers.  */
1844   if (DECL_MODE (decl) == BLKmode)
1845     return false;
1846 
1847   /* If -ffloat-store specified, don't put explicit float variables
1848      into registers.  */
1849   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1850      propagates values across these stores, and it probably shouldn't.  */
1851   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1852     return false;
1853 
1854   /* If we're not interested in tracking debugging information for
1855      this decl, then we can certainly put it in a register.  */
1856   if (DECL_IGNORED_P (decl))
1857     return true;
1858 
1859   return (optimize || DECL_REGISTER (decl));
1860 }
1861 
1862 /* Return true if TYPE should be passed by invisible reference.  */
1863 
1864 bool
pass_by_reference(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1865 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1866 		   tree type, bool named_arg)
1867 {
1868   if (type)
1869     {
1870       /* If this type contains non-trivial constructors, then it is
1871 	 forbidden for the middle-end to create any new copies.  */
1872       if (TREE_ADDRESSABLE (type))
1873 	return true;
1874 
1875       /* GCC post 3.4 passes *all* variable sized types by reference.  */
1876       if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1877 	return true;
1878     }
1879 
1880   return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1881 }
1882 
1883 /* Return true if TYPE, which is passed by reference, should be callee
1884    copied instead of caller copied.  */
1885 
1886 bool
reference_callee_copied(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1887 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1888 			 tree type, bool named_arg)
1889 {
1890   if (type && TREE_ADDRESSABLE (type))
1891     return false;
1892   return targetm.calls.callee_copies (ca, mode, type, named_arg);
1893 }
1894 
1895 /* Structures to communicate between the subroutines of assign_parms.
1896    The first holds data persistent across all parameters, the second
1897    is cleared out for each parameter.  */
1898 
1899 struct assign_parm_data_all
1900 {
1901   CUMULATIVE_ARGS args_so_far;
1902   struct args_size stack_args_size;
1903   tree function_result_decl;
1904   tree orig_fnargs;
1905   rtx conversion_insns;
1906   HOST_WIDE_INT pretend_args_size;
1907   HOST_WIDE_INT extra_pretend_bytes;
1908   int reg_parm_stack_space;
1909 };
1910 
1911 struct assign_parm_data_one
1912 {
1913   tree nominal_type;
1914   tree passed_type;
1915   rtx entry_parm;
1916   rtx stack_parm;
1917   enum machine_mode nominal_mode;
1918   enum machine_mode passed_mode;
1919   enum machine_mode promoted_mode;
1920   struct locate_and_pad_arg_data locate;
1921   int partial;
1922   BOOL_BITFIELD named_arg : 1;
1923   BOOL_BITFIELD passed_pointer : 1;
1924   BOOL_BITFIELD on_stack : 1;
1925   BOOL_BITFIELD loaded_in_reg : 1;
1926 };
1927 
1928 /* A subroutine of assign_parms.  Initialize ALL.  */
1929 
1930 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)1931 assign_parms_initialize_all (struct assign_parm_data_all *all)
1932 {
1933   tree fntype;
1934 
1935   memset (all, 0, sizeof (*all));
1936 
1937   fntype = TREE_TYPE (current_function_decl);
1938 
1939 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1940   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1941 #else
1942   INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1943 			current_function_decl, -1);
1944 #endif
1945 
1946 #ifdef REG_PARM_STACK_SPACE
1947   all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1948 #endif
1949 }
1950 
1951 /* If ARGS contains entries with complex types, split the entry into two
1952    entries of the component type.  Return a new list of substitutions are
1953    needed, else the old list.  */
1954 
1955 static tree
split_complex_args(tree args)1956 split_complex_args (tree args)
1957 {
1958   tree p;
1959 
1960   /* Before allocating memory, check for the common case of no complex.  */
1961   for (p = args; p; p = TREE_CHAIN (p))
1962     {
1963       tree type = TREE_TYPE (p);
1964       if (TREE_CODE (type) == COMPLEX_TYPE
1965 	  && targetm.calls.split_complex_arg (type))
1966         goto found;
1967     }
1968   return args;
1969 
1970  found:
1971   args = copy_list (args);
1972 
1973   for (p = args; p; p = TREE_CHAIN (p))
1974     {
1975       tree type = TREE_TYPE (p);
1976       if (TREE_CODE (type) == COMPLEX_TYPE
1977 	  && targetm.calls.split_complex_arg (type))
1978 	{
1979 	  tree decl;
1980 	  tree subtype = TREE_TYPE (type);
1981 	  bool addressable = TREE_ADDRESSABLE (p);
1982 
1983 	  /* Rewrite the PARM_DECL's type with its component.  */
1984 	  TREE_TYPE (p) = subtype;
1985 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1986 	  DECL_MODE (p) = VOIDmode;
1987 	  DECL_SIZE (p) = NULL;
1988 	  DECL_SIZE_UNIT (p) = NULL;
1989 	  /* If this arg must go in memory, put it in a pseudo here.
1990 	     We can't allow it to go in memory as per normal parms,
1991 	     because the usual place might not have the imag part
1992 	     adjacent to the real part.  */
1993 	  DECL_ARTIFICIAL (p) = addressable;
1994 	  DECL_IGNORED_P (p) = addressable;
1995 	  TREE_ADDRESSABLE (p) = 0;
1996 	  layout_decl (p, 0);
1997 
1998 	  /* Build a second synthetic decl.  */
1999 	  decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2000 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2001 	  DECL_ARTIFICIAL (decl) = addressable;
2002 	  DECL_IGNORED_P (decl) = addressable;
2003 	  layout_decl (decl, 0);
2004 
2005 	  /* Splice it in; skip the new decl.  */
2006 	  TREE_CHAIN (decl) = TREE_CHAIN (p);
2007 	  TREE_CHAIN (p) = decl;
2008 	  p = decl;
2009 	}
2010     }
2011 
2012   return args;
2013 }
2014 
2015 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2016    the hidden struct return argument, and (abi willing) complex args.
2017    Return the new parameter list.  */
2018 
2019 static tree
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2020 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2021 {
2022   tree fndecl = current_function_decl;
2023   tree fntype = TREE_TYPE (fndecl);
2024   tree fnargs = DECL_ARGUMENTS (fndecl);
2025 
2026   /* If struct value address is treated as the first argument, make it so.  */
2027   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2028       && ! current_function_returns_pcc_struct
2029       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2030     {
2031       tree type = build_pointer_type (TREE_TYPE (fntype));
2032       tree decl;
2033 
2034       decl = build_decl (PARM_DECL, NULL_TREE, type);
2035       DECL_ARG_TYPE (decl) = type;
2036       DECL_ARTIFICIAL (decl) = 1;
2037       DECL_IGNORED_P (decl) = 1;
2038 
2039       TREE_CHAIN (decl) = fnargs;
2040       fnargs = decl;
2041       all->function_result_decl = decl;
2042     }
2043 
2044   all->orig_fnargs = fnargs;
2045 
2046   /* If the target wants to split complex arguments into scalars, do so.  */
2047   if (targetm.calls.split_complex_arg)
2048     fnargs = split_complex_args (fnargs);
2049 
2050   return fnargs;
2051 }
2052 
2053 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2054    data for the parameter.  Incorporate ABI specifics such as pass-by-
2055    reference and type promotion.  */
2056 
2057 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2058 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2059 			     struct assign_parm_data_one *data)
2060 {
2061   tree nominal_type, passed_type;
2062   enum machine_mode nominal_mode, passed_mode, promoted_mode;
2063 
2064   memset (data, 0, sizeof (*data));
2065 
2066   /* NAMED_ARG is a mis-nomer.  We really mean 'non-varadic'. */
2067   if (!current_function_stdarg)
2068     data->named_arg = 1;  /* No varadic parms.  */
2069   else if (TREE_CHAIN (parm))
2070     data->named_arg = 1;  /* Not the last non-varadic parm. */
2071   else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2072     data->named_arg = 1;  /* Only varadic ones are unnamed.  */
2073   else
2074     data->named_arg = 0;  /* Treat as varadic.  */
2075 
2076   nominal_type = TREE_TYPE (parm);
2077   passed_type = DECL_ARG_TYPE (parm);
2078 
2079   /* Look out for errors propagating this far.  Also, if the parameter's
2080      type is void then its value doesn't matter.  */
2081   if (TREE_TYPE (parm) == error_mark_node
2082       /* This can happen after weird syntax errors
2083 	 or if an enum type is defined among the parms.  */
2084       || TREE_CODE (parm) != PARM_DECL
2085       || passed_type == NULL
2086       || VOID_TYPE_P (nominal_type))
2087     {
2088       nominal_type = passed_type = void_type_node;
2089       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2090       goto egress;
2091     }
2092 
2093   /* Find mode of arg as it is passed, and mode of arg as it should be
2094      during execution of this function.  */
2095   passed_mode = TYPE_MODE (passed_type);
2096   nominal_mode = TYPE_MODE (nominal_type);
2097 
2098   /* If the parm is to be passed as a transparent union, use the type of
2099      the first field for the tests below.  We have already verified that
2100      the modes are the same.  */
2101   if (TREE_CODE (passed_type) == UNION_TYPE
2102       && TYPE_TRANSPARENT_UNION (passed_type))
2103     passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2104 
2105   /* See if this arg was passed by invisible reference.  */
2106   if (pass_by_reference (&all->args_so_far, passed_mode,
2107 			 passed_type, data->named_arg))
2108     {
2109       passed_type = nominal_type = build_pointer_type (passed_type);
2110       data->passed_pointer = true;
2111       passed_mode = nominal_mode = Pmode;
2112     }
2113 
2114   /* Find mode as it is passed by the ABI.  */
2115   promoted_mode = passed_mode;
2116   if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2117     {
2118       int unsignedp = TYPE_UNSIGNED (passed_type);
2119       promoted_mode = promote_mode (passed_type, promoted_mode,
2120 				    &unsignedp, 1);
2121     }
2122 
2123  egress:
2124   data->nominal_type = nominal_type;
2125   data->passed_type = passed_type;
2126   data->nominal_mode = nominal_mode;
2127   data->passed_mode = passed_mode;
2128   data->promoted_mode = promoted_mode;
2129 }
2130 
2131 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2132 
2133 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2134 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2135 			    struct assign_parm_data_one *data, bool no_rtl)
2136 {
2137   int varargs_pretend_bytes = 0;
2138 
2139   targetm.calls.setup_incoming_varargs (&all->args_so_far,
2140 					data->promoted_mode,
2141 					data->passed_type,
2142 					&varargs_pretend_bytes, no_rtl);
2143 
2144   /* If the back-end has requested extra stack space, record how much is
2145      needed.  Do not change pretend_args_size otherwise since it may be
2146      nonzero from an earlier partial argument.  */
2147   if (varargs_pretend_bytes > 0)
2148     all->pretend_args_size = varargs_pretend_bytes;
2149 }
2150 
2151 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2152    the incoming location of the current parameter.  */
2153 
2154 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2155 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2156 			    struct assign_parm_data_one *data)
2157 {
2158   HOST_WIDE_INT pretend_bytes = 0;
2159   rtx entry_parm;
2160   bool in_regs;
2161 
2162   if (data->promoted_mode == VOIDmode)
2163     {
2164       data->entry_parm = data->stack_parm = const0_rtx;
2165       return;
2166     }
2167 
2168 #ifdef FUNCTION_INCOMING_ARG
2169   entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2170 				      data->passed_type, data->named_arg);
2171 #else
2172   entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2173 			     data->passed_type, data->named_arg);
2174 #endif
2175 
2176   if (entry_parm == 0)
2177     data->promoted_mode = data->passed_mode;
2178 
2179   /* Determine parm's home in the stack, in case it arrives in the stack
2180      or we should pretend it did.  Compute the stack position and rtx where
2181      the argument arrives and its size.
2182 
2183      There is one complexity here:  If this was a parameter that would
2184      have been passed in registers, but wasn't only because it is
2185      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2186      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2187      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2188      as it was the previous time.  */
2189   in_regs = entry_parm != 0;
2190 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2191   in_regs = true;
2192 #endif
2193   if (!in_regs && !data->named_arg)
2194     {
2195       if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2196 	{
2197 	  rtx tem;
2198 #ifdef FUNCTION_INCOMING_ARG
2199 	  tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2200 				       data->passed_type, true);
2201 #else
2202 	  tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2203 			      data->passed_type, true);
2204 #endif
2205 	  in_regs = tem != NULL;
2206 	}
2207     }
2208 
2209   /* If this parameter was passed both in registers and in the stack, use
2210      the copy on the stack.  */
2211   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2212 					data->passed_type))
2213     entry_parm = 0;
2214 
2215   if (entry_parm)
2216     {
2217       int partial;
2218 
2219       partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2220 						 data->promoted_mode,
2221 						 data->passed_type,
2222 						 data->named_arg);
2223       data->partial = partial;
2224 
2225       /* The caller might already have allocated stack space for the
2226 	 register parameters.  */
2227       if (partial != 0 && all->reg_parm_stack_space == 0)
2228 	{
2229 	  /* Part of this argument is passed in registers and part
2230 	     is passed on the stack.  Ask the prologue code to extend
2231 	     the stack part so that we can recreate the full value.
2232 
2233 	     PRETEND_BYTES is the size of the registers we need to store.
2234 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2235 	     stack space that the prologue should allocate.
2236 
2237 	     Internally, gcc assumes that the argument pointer is aligned
2238 	     to STACK_BOUNDARY bits.  This is used both for alignment
2239 	     optimizations (see init_emit) and to locate arguments that are
2240 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2241 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2242 	     a stack boundary.  */
2243 
2244 	  /* We assume at most one partial arg, and it must be the first
2245 	     argument on the stack.  */
2246 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2247 
2248 	  pretend_bytes = partial;
2249 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2250 
2251 	  /* We want to align relative to the actual stack pointer, so
2252 	     don't include this in the stack size until later.  */
2253 	  all->extra_pretend_bytes = all->pretend_args_size;
2254 	}
2255     }
2256 
2257   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2258 		       entry_parm ? data->partial : 0, current_function_decl,
2259 		       &all->stack_args_size, &data->locate);
2260 
2261   /* Adjust offsets to include the pretend args.  */
2262   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2263   data->locate.slot_offset.constant += pretend_bytes;
2264   data->locate.offset.constant += pretend_bytes;
2265 
2266   data->entry_parm = entry_parm;
2267 }
2268 
2269 /* A subroutine of assign_parms.  If there is actually space on the stack
2270    for this parm, count it in stack_args_size and return true.  */
2271 
2272 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2273 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2274 			   struct assign_parm_data_one *data)
2275 {
2276   /* Trivially true if we've no incoming register.  */
2277   if (data->entry_parm == NULL)
2278     ;
2279   /* Also true if we're partially in registers and partially not,
2280      since we've arranged to drop the entire argument on the stack.  */
2281   else if (data->partial != 0)
2282     ;
2283   /* Also true if the target says that it's passed in both registers
2284      and on the stack.  */
2285   else if (GET_CODE (data->entry_parm) == PARALLEL
2286 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2287     ;
2288   /* Also true if the target says that there's stack allocated for
2289      all register parameters.  */
2290   else if (all->reg_parm_stack_space > 0)
2291     ;
2292   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2293   else
2294     return false;
2295 
2296   all->stack_args_size.constant += data->locate.size.constant;
2297   if (data->locate.size.var)
2298     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2299 
2300   return true;
2301 }
2302 
2303 /* A subroutine of assign_parms.  Given that this parameter is allocated
2304    stack space by the ABI, find it.  */
2305 
2306 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2307 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2308 {
2309   rtx offset_rtx, stack_parm;
2310   unsigned int align, boundary;
2311 
2312   /* If we're passing this arg using a reg, make its stack home the
2313      aligned stack slot.  */
2314   if (data->entry_parm)
2315     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2316   else
2317     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2318 
2319   stack_parm = current_function_internal_arg_pointer;
2320   if (offset_rtx != const0_rtx)
2321     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2322   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2323 
2324   set_mem_attributes (stack_parm, parm, 1);
2325 
2326   boundary = data->locate.boundary;
2327   align = BITS_PER_UNIT;
2328 
2329   /* If we're padding upward, we know that the alignment of the slot
2330      is FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2331      intentionally forcing upward padding.  Otherwise we have to come
2332      up with a guess at the alignment based on OFFSET_RTX.  */
2333   if (data->locate.where_pad != downward || data->entry_parm)
2334     align = boundary;
2335   else if (GET_CODE (offset_rtx) == CONST_INT)
2336     {
2337       align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2338       align = align & -align;
2339     }
2340   set_mem_align (stack_parm, align);
2341 
2342   if (data->entry_parm)
2343     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2344 
2345   data->stack_parm = stack_parm;
2346 }
2347 
2348 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2349    always valid and contiguous.  */
2350 
2351 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2352 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2353 {
2354   rtx entry_parm = data->entry_parm;
2355   rtx stack_parm = data->stack_parm;
2356 
2357   /* If this parm was passed part in regs and part in memory, pretend it
2358      arrived entirely in memory by pushing the register-part onto the stack.
2359      In the special case of a DImode or DFmode that is split, we could put
2360      it together in a pseudoreg directly, but for now that's not worth
2361      bothering with.  */
2362   if (data->partial != 0)
2363     {
2364       /* Handle calls that pass values in multiple non-contiguous
2365 	 locations.  The Irix 6 ABI has examples of this.  */
2366       if (GET_CODE (entry_parm) == PARALLEL)
2367 	emit_group_store (validize_mem (stack_parm), entry_parm,
2368 			  data->passed_type,
2369 			  int_size_in_bytes (data->passed_type));
2370       else
2371 	{
2372 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2373 	  move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2374 			       data->partial / UNITS_PER_WORD);
2375 	}
2376 
2377       entry_parm = stack_parm;
2378     }
2379 
2380   /* If we didn't decide this parm came in a register, by default it came
2381      on the stack.  */
2382   else if (entry_parm == NULL)
2383     entry_parm = stack_parm;
2384 
2385   /* When an argument is passed in multiple locations, we can't make use
2386      of this information, but we can save some copying if the whole argument
2387      is passed in a single register.  */
2388   else if (GET_CODE (entry_parm) == PARALLEL
2389 	   && data->nominal_mode != BLKmode
2390 	   && data->passed_mode != BLKmode)
2391     {
2392       size_t i, len = XVECLEN (entry_parm, 0);
2393 
2394       for (i = 0; i < len; i++)
2395 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2396 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2397 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2398 		== data->passed_mode)
2399 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2400 	  {
2401 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2402 	    break;
2403 	  }
2404     }
2405 
2406   data->entry_parm = entry_parm;
2407 }
2408 
2409 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2410    always valid and properly aligned.  */
2411 
2412 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2413 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2414 {
2415   rtx stack_parm = data->stack_parm;
2416 
2417   /* If we can't trust the parm stack slot to be aligned enough for its
2418      ultimate type, don't use that slot after entry.  We'll make another
2419      stack slot, if we need one.  */
2420   if (stack_parm
2421       && ((STRICT_ALIGNMENT
2422 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2423 	  || (data->nominal_type
2424 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2425 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2426     stack_parm = NULL;
2427 
2428   /* If parm was passed in memory, and we need to convert it on entry,
2429      don't store it back in that same slot.  */
2430   else if (data->entry_parm == stack_parm
2431 	   && data->nominal_mode != BLKmode
2432 	   && data->nominal_mode != data->passed_mode)
2433     stack_parm = NULL;
2434 
2435   /* If stack protection is in effect for this function, don't leave any
2436      pointers in their passed stack slots.  */
2437   else if (cfun->stack_protect_guard
2438 	   && (flag_stack_protect == 2
2439 	       || data->passed_pointer
2440 	       || POINTER_TYPE_P (data->nominal_type)))
2441     stack_parm = NULL;
2442 
2443   data->stack_parm = stack_parm;
2444 }
2445 
2446 /* A subroutine of assign_parms.  Return true if the current parameter
2447    should be stored as a BLKmode in the current frame.  */
2448 
2449 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2450 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2451 {
2452   if (data->nominal_mode == BLKmode)
2453     return true;
2454   if (GET_CODE (data->entry_parm) == PARALLEL)
2455     return true;
2456 
2457 #ifdef BLOCK_REG_PADDING
2458   /* Only assign_parm_setup_block knows how to deal with register arguments
2459      that are padded at the least significant end.  */
2460   if (REG_P (data->entry_parm)
2461       && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2462       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2463 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2464     return true;
2465 #endif
2466 
2467   return false;
2468 }
2469 
2470 /* A subroutine of assign_parms.  Arrange for the parameter to be
2471    present and valid in DATA->STACK_RTL.  */
2472 
2473 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2474 assign_parm_setup_block (struct assign_parm_data_all *all,
2475 			 tree parm, struct assign_parm_data_one *data)
2476 {
2477   rtx entry_parm = data->entry_parm;
2478   rtx stack_parm = data->stack_parm;
2479   HOST_WIDE_INT size;
2480   HOST_WIDE_INT size_stored;
2481   rtx orig_entry_parm = entry_parm;
2482 
2483   if (GET_CODE (entry_parm) == PARALLEL)
2484     entry_parm = emit_group_move_into_temps (entry_parm);
2485 
2486   /* If we've a non-block object that's nevertheless passed in parts,
2487      reconstitute it in register operations rather than on the stack.  */
2488   if (GET_CODE (entry_parm) == PARALLEL
2489       && data->nominal_mode != BLKmode)
2490     {
2491       rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2492 
2493       if ((XVECLEN (entry_parm, 0) > 1
2494 	   || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2495 	  && use_register_for_decl (parm))
2496 	{
2497 	  rtx parmreg = gen_reg_rtx (data->nominal_mode);
2498 
2499 	  push_to_sequence (all->conversion_insns);
2500 
2501 	  /* For values returned in multiple registers, handle possible
2502 	     incompatible calls to emit_group_store.
2503 
2504 	     For example, the following would be invalid, and would have to
2505 	     be fixed by the conditional below:
2506 
2507 	     emit_group_store ((reg:SF), (parallel:DF))
2508 	     emit_group_store ((reg:SI), (parallel:DI))
2509 
2510 	     An example of this are doubles in e500 v2:
2511 	     (parallel:DF (expr_list (reg:SI) (const_int 0))
2512 	     (expr_list (reg:SI) (const_int 4))).  */
2513 	  if (data->nominal_mode != data->passed_mode)
2514 	    {
2515 	      rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2516 	      emit_group_store (t, entry_parm, NULL_TREE,
2517 				GET_MODE_SIZE (GET_MODE (entry_parm)));
2518 	      convert_move (parmreg, t, 0);
2519 	    }
2520 	  else
2521 	    emit_group_store (parmreg, entry_parm, data->nominal_type,
2522 			      int_size_in_bytes (data->nominal_type));
2523 
2524 	  all->conversion_insns = get_insns ();
2525 	  end_sequence ();
2526 
2527 	  SET_DECL_RTL (parm, parmreg);
2528 	  return;
2529 	}
2530     }
2531 
2532   size = int_size_in_bytes (data->passed_type);
2533   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2534   if (stack_parm == 0)
2535     {
2536       DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2537       stack_parm = assign_stack_local (BLKmode, size_stored,
2538 				       DECL_ALIGN (parm));
2539       if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2540 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2541       set_mem_attributes (stack_parm, parm, 1);
2542     }
2543 
2544   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2545      calls that pass values in multiple non-contiguous locations.  */
2546   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2547     {
2548       rtx mem;
2549 
2550       /* Note that we will be storing an integral number of words.
2551 	 So we have to be careful to ensure that we allocate an
2552 	 integral number of words.  We do this above when we call
2553 	 assign_stack_local if space was not allocated in the argument
2554 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2555 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2556 	 if it becomes a problem.  Exception is when BLKmode arrives
2557 	 with arguments not conforming to word_mode.  */
2558 
2559       if (data->stack_parm == 0)
2560 	;
2561       else if (GET_CODE (entry_parm) == PARALLEL)
2562 	;
2563       else
2564 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2565 
2566       mem = validize_mem (stack_parm);
2567 
2568       /* Handle values in multiple non-contiguous locations.  */
2569       if (GET_CODE (entry_parm) == PARALLEL)
2570 	{
2571 	  push_to_sequence (all->conversion_insns);
2572 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2573 	  all->conversion_insns = get_insns ();
2574 	  end_sequence ();
2575 	}
2576 
2577       else if (size == 0)
2578 	;
2579 
2580       /* If SIZE is that of a mode no bigger than a word, just use
2581 	 that mode's store operation.  */
2582       else if (size <= UNITS_PER_WORD)
2583 	{
2584 	  enum machine_mode mode
2585 	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2586 
2587 	  if (mode != BLKmode
2588 #ifdef BLOCK_REG_PADDING
2589 	      && (size == UNITS_PER_WORD
2590 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2591 		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2592 #endif
2593 	      )
2594 	    {
2595 	      rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2596 	      emit_move_insn (change_address (mem, mode, 0), reg);
2597 	    }
2598 
2599 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2600 	     machine must be aligned to the left before storing
2601 	     to memory.  Note that the previous test doesn't
2602 	     handle all cases (e.g. SIZE == 3).  */
2603 	  else if (size != UNITS_PER_WORD
2604 #ifdef BLOCK_REG_PADDING
2605 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2606 		       == downward)
2607 #else
2608 		   && BYTES_BIG_ENDIAN
2609 #endif
2610 		   )
2611 	    {
2612 	      rtx tem, x;
2613 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2614 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2615 
2616 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2617 				build_int_cst (NULL_TREE, by),
2618 				NULL_RTX, 1);
2619 	      tem = change_address (mem, word_mode, 0);
2620 	      emit_move_insn (tem, x);
2621 	    }
2622 	  else
2623 	    move_block_from_reg (REGNO (entry_parm), mem,
2624 				 size_stored / UNITS_PER_WORD);
2625 	}
2626       else
2627 	move_block_from_reg (REGNO (entry_parm), mem,
2628 			     size_stored / UNITS_PER_WORD);
2629     }
2630   else if (data->stack_parm == 0)
2631     {
2632       push_to_sequence (all->conversion_insns);
2633       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2634 		       BLOCK_OP_NORMAL);
2635       all->conversion_insns = get_insns ();
2636       end_sequence ();
2637     }
2638 
2639   data->stack_parm = stack_parm;
2640   SET_DECL_RTL (parm, stack_parm);
2641 }
2642 
2643 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
2644    parameter.  Get it there.  Perform all ABI specified conversions.  */
2645 
2646 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2647 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2648 		       struct assign_parm_data_one *data)
2649 {
2650   rtx parmreg;
2651   enum machine_mode promoted_nominal_mode;
2652   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2653   bool did_conversion = false;
2654 
2655   /* Store the parm in a pseudoregister during the function, but we may
2656      need to do it in a wider mode.  */
2657 
2658   /* This is not really promoting for a call.  However we need to be
2659      consistent with assign_parm_find_data_types and expand_expr_real_1.  */
2660   promoted_nominal_mode
2661     = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2662 
2663   parmreg = gen_reg_rtx (promoted_nominal_mode);
2664 
2665   if (!DECL_ARTIFICIAL (parm))
2666     mark_user_reg (parmreg);
2667 
2668   /* If this was an item that we received a pointer to,
2669      set DECL_RTL appropriately.  */
2670   if (data->passed_pointer)
2671     {
2672       rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2673       set_mem_attributes (x, parm, 1);
2674       SET_DECL_RTL (parm, x);
2675     }
2676   else
2677     SET_DECL_RTL (parm, parmreg);
2678 
2679   /* Copy the value into the register.  */
2680   if (data->nominal_mode != data->passed_mode
2681       || promoted_nominal_mode != data->promoted_mode)
2682     {
2683       int save_tree_used;
2684 
2685       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2686 	 mode, by the caller.  We now have to convert it to
2687 	 NOMINAL_MODE, if different.  However, PARMREG may be in
2688 	 a different mode than NOMINAL_MODE if it is being stored
2689 	 promoted.
2690 
2691 	 If ENTRY_PARM is a hard register, it might be in a register
2692 	 not valid for operating in its mode (e.g., an odd-numbered
2693 	 register for a DFmode).  In that case, moves are the only
2694 	 thing valid, so we can't do a convert from there.  This
2695 	 occurs when the calling sequence allow such misaligned
2696 	 usages.
2697 
2698 	 In addition, the conversion may involve a call, which could
2699 	 clobber parameters which haven't been copied to pseudo
2700 	 registers yet.  Therefore, we must first copy the parm to
2701 	 a pseudo reg here, and save the conversion until after all
2702 	 parameters have been moved.  */
2703 
2704       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2705 
2706       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2707 
2708       push_to_sequence (all->conversion_insns);
2709       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2710 
2711       if (GET_CODE (tempreg) == SUBREG
2712 	  && GET_MODE (tempreg) == data->nominal_mode
2713 	  && REG_P (SUBREG_REG (tempreg))
2714 	  && data->nominal_mode == data->passed_mode
2715 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2716 	  && GET_MODE_SIZE (GET_MODE (tempreg))
2717 	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2718 	{
2719 	  /* The argument is already sign/zero extended, so note it
2720 	     into the subreg.  */
2721 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2722 	  SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2723 	}
2724 
2725       /* TREE_USED gets set erroneously during expand_assignment.  */
2726       save_tree_used = TREE_USED (parm);
2727       expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2728       TREE_USED (parm) = save_tree_used;
2729       all->conversion_insns = get_insns ();
2730       end_sequence ();
2731 
2732       did_conversion = true;
2733     }
2734   else
2735     emit_move_insn (parmreg, validize_mem (data->entry_parm));
2736 
2737   /* If we were passed a pointer but the actual value can safely live
2738      in a register, put it in one.  */
2739   if (data->passed_pointer
2740       && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2741       /* If by-reference argument was promoted, demote it.  */
2742       && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2743 	  || use_register_for_decl (parm)))
2744     {
2745       /* We can't use nominal_mode, because it will have been set to
2746 	 Pmode above.  We must use the actual mode of the parm.  */
2747       parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2748       mark_user_reg (parmreg);
2749 
2750       if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2751 	{
2752 	  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2753 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2754 
2755 	  push_to_sequence (all->conversion_insns);
2756 	  emit_move_insn (tempreg, DECL_RTL (parm));
2757 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2758 	  emit_move_insn (parmreg, tempreg);
2759 	  all->conversion_insns = get_insns ();
2760 	  end_sequence ();
2761 
2762 	  did_conversion = true;
2763 	}
2764       else
2765 	emit_move_insn (parmreg, DECL_RTL (parm));
2766 
2767       SET_DECL_RTL (parm, parmreg);
2768 
2769       /* STACK_PARM is the pointer, not the parm, and PARMREG is
2770 	 now the parm.  */
2771       data->stack_parm = NULL;
2772     }
2773 
2774   /* Mark the register as eliminable if we did no conversion and it was
2775      copied from memory at a fixed offset, and the arg pointer was not
2776      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
2777      offset formed an invalid address, such memory-equivalences as we
2778      make here would screw up life analysis for it.  */
2779   if (data->nominal_mode == data->passed_mode
2780       && !did_conversion
2781       && data->stack_parm != 0
2782       && MEM_P (data->stack_parm)
2783       && data->locate.offset.var == 0
2784       && reg_mentioned_p (virtual_incoming_args_rtx,
2785 			  XEXP (data->stack_parm, 0)))
2786     {
2787       rtx linsn = get_last_insn ();
2788       rtx sinsn, set;
2789 
2790       /* Mark complex types separately.  */
2791       if (GET_CODE (parmreg) == CONCAT)
2792 	{
2793 	  enum machine_mode submode
2794 	    = GET_MODE_INNER (GET_MODE (parmreg));
2795 	  int regnor = REGNO (XEXP (parmreg, 0));
2796 	  int regnoi = REGNO (XEXP (parmreg, 1));
2797 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2798 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
2799 					  GET_MODE_SIZE (submode));
2800 
2801 	  /* Scan backwards for the set of the real and
2802 	     imaginary parts.  */
2803 	  for (sinsn = linsn; sinsn != 0;
2804 	       sinsn = prev_nonnote_insn (sinsn))
2805 	    {
2806 	      set = single_set (sinsn);
2807 	      if (set == 0)
2808 		continue;
2809 
2810 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
2811 		REG_NOTES (sinsn)
2812 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2813 				       REG_NOTES (sinsn));
2814 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
2815 		REG_NOTES (sinsn)
2816 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2817 				       REG_NOTES (sinsn));
2818 	    }
2819 	}
2820       else if ((set = single_set (linsn)) != 0
2821 	       && SET_DEST (set) == parmreg)
2822 	REG_NOTES (linsn)
2823 	  = gen_rtx_EXPR_LIST (REG_EQUIV,
2824 			       data->stack_parm, REG_NOTES (linsn));
2825     }
2826 
2827   /* For pointer data type, suggest pointer register.  */
2828   if (POINTER_TYPE_P (TREE_TYPE (parm)))
2829     mark_reg_pointer (parmreg,
2830 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2831 }
2832 
2833 /* A subroutine of assign_parms.  Allocate stack space to hold the current
2834    parameter.  Get it there.  Perform all ABI specified conversions.  */
2835 
2836 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2837 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2838 		         struct assign_parm_data_one *data)
2839 {
2840   /* Value must be stored in the stack slot STACK_PARM during function
2841      execution.  */
2842   bool to_conversion = false;
2843 
2844   if (data->promoted_mode != data->nominal_mode)
2845     {
2846       /* Conversion is required.  */
2847       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2848 
2849       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2850 
2851       push_to_sequence (all->conversion_insns);
2852       to_conversion = true;
2853 
2854       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2855 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
2856 
2857       if (data->stack_parm)
2858 	/* ??? This may need a big-endian conversion on sparc64.  */
2859 	data->stack_parm
2860 	  = adjust_address (data->stack_parm, data->nominal_mode, 0);
2861     }
2862 
2863   if (data->entry_parm != data->stack_parm)
2864     {
2865       rtx src, dest;
2866 
2867       if (data->stack_parm == 0)
2868 	{
2869 	  data->stack_parm
2870 	    = assign_stack_local (GET_MODE (data->entry_parm),
2871 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2872 				  TYPE_ALIGN (data->passed_type));
2873 	  set_mem_attributes (data->stack_parm, parm, 1);
2874 	}
2875 
2876       dest = validize_mem (data->stack_parm);
2877       src = validize_mem (data->entry_parm);
2878 
2879       if (MEM_P (src))
2880 	{
2881 	  /* Use a block move to handle potentially misaligned entry_parm.  */
2882 	  if (!to_conversion)
2883 	    push_to_sequence (all->conversion_insns);
2884 	  to_conversion = true;
2885 
2886 	  emit_block_move (dest, src,
2887 			   GEN_INT (int_size_in_bytes (data->passed_type)),
2888 			   BLOCK_OP_NORMAL);
2889 	}
2890       else
2891 	emit_move_insn (dest, src);
2892     }
2893 
2894   if (to_conversion)
2895     {
2896       all->conversion_insns = get_insns ();
2897       end_sequence ();
2898     }
2899 
2900   SET_DECL_RTL (parm, data->stack_parm);
2901 }
2902 
2903 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
2904    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
2905 
2906 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,tree fnargs)2907 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2908 {
2909   tree parm;
2910   tree orig_fnargs = all->orig_fnargs;
2911 
2912   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2913     {
2914       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2915 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2916 	{
2917 	  rtx tmp, real, imag;
2918 	  enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2919 
2920 	  real = DECL_RTL (fnargs);
2921 	  imag = DECL_RTL (TREE_CHAIN (fnargs));
2922 	  if (inner != GET_MODE (real))
2923 	    {
2924 	      real = gen_lowpart_SUBREG (inner, real);
2925 	      imag = gen_lowpart_SUBREG (inner, imag);
2926 	    }
2927 
2928 	  if (TREE_ADDRESSABLE (parm))
2929 	    {
2930 	      rtx rmem, imem;
2931 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2932 
2933 	      /* split_complex_arg put the real and imag parts in
2934 		 pseudos.  Move them to memory.  */
2935 	      tmp = assign_stack_local (DECL_MODE (parm), size,
2936 					TYPE_ALIGN (TREE_TYPE (parm)));
2937 	      set_mem_attributes (tmp, parm, 1);
2938 	      rmem = adjust_address_nv (tmp, inner, 0);
2939 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2940 	      push_to_sequence (all->conversion_insns);
2941 	      emit_move_insn (rmem, real);
2942 	      emit_move_insn (imem, imag);
2943 	      all->conversion_insns = get_insns ();
2944 	      end_sequence ();
2945 	    }
2946 	  else
2947 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2948 	  SET_DECL_RTL (parm, tmp);
2949 
2950 	  real = DECL_INCOMING_RTL (fnargs);
2951 	  imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2952 	  if (inner != GET_MODE (real))
2953 	    {
2954 	      real = gen_lowpart_SUBREG (inner, real);
2955 	      imag = gen_lowpart_SUBREG (inner, imag);
2956 	    }
2957 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2958 	  set_decl_incoming_rtl (parm, tmp);
2959 	  fnargs = TREE_CHAIN (fnargs);
2960 	}
2961       else
2962 	{
2963 	  SET_DECL_RTL (parm, DECL_RTL (fnargs));
2964 	  set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2965 
2966 	  /* Set MEM_EXPR to the original decl, i.e. to PARM,
2967 	     instead of the copy of decl, i.e. FNARGS.  */
2968 	  if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2969 	    set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2970 	}
2971 
2972       fnargs = TREE_CHAIN (fnargs);
2973     }
2974 }
2975 
2976 /* Assign RTL expressions to the function's parameters.  This may involve
2977    copying them into registers and using those registers as the DECL_RTL.  */
2978 
2979 static void
assign_parms(tree fndecl)2980 assign_parms (tree fndecl)
2981 {
2982   struct assign_parm_data_all all;
2983   tree fnargs, parm;
2984 
2985   current_function_internal_arg_pointer
2986     = targetm.calls.internal_arg_pointer ();
2987 
2988   assign_parms_initialize_all (&all);
2989   fnargs = assign_parms_augmented_arg_list (&all);
2990 
2991   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2992     {
2993       struct assign_parm_data_one data;
2994 
2995       /* Extract the type of PARM; adjust it according to ABI.  */
2996       assign_parm_find_data_types (&all, parm, &data);
2997 
2998       /* Early out for errors and void parameters.  */
2999       if (data.passed_mode == VOIDmode)
3000 	{
3001 	  SET_DECL_RTL (parm, const0_rtx);
3002 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3003 	  continue;
3004 	}
3005 
3006       if (current_function_stdarg && !TREE_CHAIN (parm))
3007 	assign_parms_setup_varargs (&all, &data, false);
3008 
3009       /* Find out where the parameter arrives in this function.  */
3010       assign_parm_find_entry_rtl (&all, &data);
3011 
3012       /* Find out where stack space for this parameter might be.  */
3013       if (assign_parm_is_stack_parm (&all, &data))
3014 	{
3015 	  assign_parm_find_stack_rtl (parm, &data);
3016 	  assign_parm_adjust_entry_rtl (&data);
3017 	}
3018 
3019       /* Record permanently how this parm was passed.  */
3020       set_decl_incoming_rtl (parm, data.entry_parm);
3021 
3022       /* Update info on where next arg arrives in registers.  */
3023       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3024 			    data.passed_type, data.named_arg);
3025 
3026       assign_parm_adjust_stack_rtl (&data);
3027 
3028       if (assign_parm_setup_block_p (&data))
3029 	assign_parm_setup_block (&all, parm, &data);
3030       else if (data.passed_pointer || use_register_for_decl (parm))
3031 	assign_parm_setup_reg (&all, parm, &data);
3032       else
3033 	assign_parm_setup_stack (&all, parm, &data);
3034     }
3035 
3036   if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3037     assign_parms_unsplit_complex (&all, fnargs);
3038 
3039   /* Output all parameter conversion instructions (possibly including calls)
3040      now that all parameters have been copied out of hard registers.  */
3041   emit_insn (all.conversion_insns);
3042 
3043   /* If we are receiving a struct value address as the first argument, set up
3044      the RTL for the function result. As this might require code to convert
3045      the transmitted address to Pmode, we do this here to ensure that possible
3046      preliminary conversions of the address have been emitted already.  */
3047   if (all.function_result_decl)
3048     {
3049       tree result = DECL_RESULT (current_function_decl);
3050       rtx addr = DECL_RTL (all.function_result_decl);
3051       rtx x;
3052 
3053       if (DECL_BY_REFERENCE (result))
3054 	x = addr;
3055       else
3056 	{
3057 	  addr = convert_memory_address (Pmode, addr);
3058 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3059 	  set_mem_attributes (x, result, 1);
3060 	}
3061       SET_DECL_RTL (result, x);
3062     }
3063 
3064   /* We have aligned all the args, so add space for the pretend args.  */
3065   current_function_pretend_args_size = all.pretend_args_size;
3066   all.stack_args_size.constant += all.extra_pretend_bytes;
3067   current_function_args_size = all.stack_args_size.constant;
3068 
3069   /* Adjust function incoming argument size for alignment and
3070      minimum length.  */
3071 
3072 #ifdef REG_PARM_STACK_SPACE
3073   current_function_args_size = MAX (current_function_args_size,
3074 				    REG_PARM_STACK_SPACE (fndecl));
3075 #endif
3076 
3077   current_function_args_size = CEIL_ROUND (current_function_args_size,
3078 					   PARM_BOUNDARY / BITS_PER_UNIT);
3079 
3080 #ifdef ARGS_GROW_DOWNWARD
3081   current_function_arg_offset_rtx
3082     = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3083        : expand_expr (size_diffop (all.stack_args_size.var,
3084 				   size_int (-all.stack_args_size.constant)),
3085 		      NULL_RTX, VOIDmode, 0));
3086 #else
3087   current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3088 #endif
3089 
3090   /* See how many bytes, if any, of its args a function should try to pop
3091      on return.  */
3092 
3093   current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3094 						 current_function_args_size);
3095 
3096   /* For stdarg.h function, save info about
3097      regs and stack space used by the named args.  */
3098 
3099   current_function_args_info = all.args_so_far;
3100 
3101   /* Set the rtx used for the function return value.  Put this in its
3102      own variable so any optimizers that need this information don't have
3103      to include tree.h.  Do this here so it gets done when an inlined
3104      function gets output.  */
3105 
3106   current_function_return_rtx
3107     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3108        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3109 
3110   /* If scalar return value was computed in a pseudo-reg, or was a named
3111      return value that got dumped to the stack, copy that to the hard
3112      return register.  */
3113   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3114     {
3115       tree decl_result = DECL_RESULT (fndecl);
3116       rtx decl_rtl = DECL_RTL (decl_result);
3117 
3118       if (REG_P (decl_rtl)
3119 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3120 	  : DECL_REGISTER (decl_result))
3121 	{
3122 	  rtx real_decl_rtl;
3123 
3124 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3125 							fndecl, true);
3126 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3127 	  /* The delay slot scheduler assumes that current_function_return_rtx
3128 	     holds the hard register containing the return value, not a
3129 	     temporary pseudo.  */
3130 	  current_function_return_rtx = real_decl_rtl;
3131 	}
3132     }
3133 }
3134 
3135 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3136    For all seen types, gimplify their sizes.  */
3137 
3138 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3139 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3140 {
3141   tree t = *tp;
3142 
3143   *walk_subtrees = 0;
3144   if (TYPE_P (t))
3145     {
3146       if (POINTER_TYPE_P (t))
3147 	*walk_subtrees = 1;
3148       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3149 	       && !TYPE_SIZES_GIMPLIFIED (t))
3150 	{
3151 	  gimplify_type_sizes (t, (tree *) data);
3152 	  *walk_subtrees = 1;
3153 	}
3154     }
3155 
3156   return NULL;
3157 }
3158 
3159 /* Gimplify the parameter list for current_function_decl.  This involves
3160    evaluating SAVE_EXPRs of variable sized parameters and generating code
3161    to implement callee-copies reference parameters.  Returns a list of
3162    statements to add to the beginning of the function, or NULL if nothing
3163    to do.  */
3164 
3165 tree
gimplify_parameters(void)3166 gimplify_parameters (void)
3167 {
3168   struct assign_parm_data_all all;
3169   tree fnargs, parm, stmts = NULL;
3170 
3171   assign_parms_initialize_all (&all);
3172   fnargs = assign_parms_augmented_arg_list (&all);
3173 
3174   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3175     {
3176       struct assign_parm_data_one data;
3177 
3178       /* Extract the type of PARM; adjust it according to ABI.  */
3179       assign_parm_find_data_types (&all, parm, &data);
3180 
3181       /* Early out for errors and void parameters.  */
3182       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3183 	continue;
3184 
3185       /* Update info on where next arg arrives in registers.  */
3186       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3187 			    data.passed_type, data.named_arg);
3188 
3189       /* ??? Once upon a time variable_size stuffed parameter list
3190 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3191 	 turned out to be less than manageable in the gimple world.
3192 	 Now we have to hunt them down ourselves.  */
3193       walk_tree_without_duplicates (&data.passed_type,
3194 				    gimplify_parm_type, &stmts);
3195 
3196       if (!TREE_CONSTANT (DECL_SIZE (parm)))
3197 	{
3198 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3199 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3200 	}
3201 
3202       if (data.passed_pointer)
3203 	{
3204           tree type = TREE_TYPE (data.passed_type);
3205 	  if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3206 				       type, data.named_arg))
3207 	    {
3208 	      tree local, t;
3209 
3210 	      /* For constant sized objects, this is trivial; for
3211 		 variable-sized objects, we have to play games.  */
3212 	      if (TREE_CONSTANT (DECL_SIZE (parm)))
3213 		{
3214 		  local = create_tmp_var (type, get_name (parm));
3215 		  DECL_IGNORED_P (local) = 0;
3216 		}
3217 	      else
3218 		{
3219 		  tree ptr_type, addr, args;
3220 
3221 		  ptr_type = build_pointer_type (type);
3222 		  addr = create_tmp_var (ptr_type, get_name (parm));
3223 		  DECL_IGNORED_P (addr) = 0;
3224 		  local = build_fold_indirect_ref (addr);
3225 
3226 		  args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3227 		  t = built_in_decls[BUILT_IN_ALLOCA];
3228 		  t = build_function_call_expr (t, args);
3229 		  t = fold_convert (ptr_type, t);
3230 		  t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3231 		  gimplify_and_add (t, &stmts);
3232 		}
3233 
3234 	      t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3235 	      gimplify_and_add (t, &stmts);
3236 
3237 	      SET_DECL_VALUE_EXPR (parm, local);
3238 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3239 	    }
3240 	}
3241     }
3242 
3243   return stmts;
3244 }
3245 
3246 /* Indicate whether REGNO is an incoming argument to the current function
3247    that was promoted to a wider mode.  If so, return the RTX for the
3248    register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
3249    that REGNO is promoted from and whether the promotion was signed or
3250    unsigned.  */
3251 
3252 rtx
promoted_input_arg(unsigned int regno,enum machine_mode * pmode,int * punsignedp)3253 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3254 {
3255   tree arg;
3256 
3257   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3258        arg = TREE_CHAIN (arg))
3259     if (REG_P (DECL_INCOMING_RTL (arg))
3260 	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
3261 	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3262       {
3263 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3264 	int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3265 
3266 	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3267 	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3268 	    && mode != DECL_MODE (arg))
3269 	  {
3270 	    *pmode = DECL_MODE (arg);
3271 	    *punsignedp = unsignedp;
3272 	    return DECL_INCOMING_RTL (arg);
3273 	  }
3274       }
3275 
3276   return 0;
3277 }
3278 
3279 
3280 /* Compute the size and offset from the start of the stacked arguments for a
3281    parm passed in mode PASSED_MODE and with type TYPE.
3282 
3283    INITIAL_OFFSET_PTR points to the current offset into the stacked
3284    arguments.
3285 
3286    The starting offset and size for this parm are returned in
3287    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3288    nonzero, the offset is that of stack slot, which is returned in
3289    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3290    padding required from the initial offset ptr to the stack slot.
3291 
3292    IN_REGS is nonzero if the argument will be passed in registers.  It will
3293    never be set if REG_PARM_STACK_SPACE is not defined.
3294 
3295    FNDECL is the function in which the argument was defined.
3296 
3297    There are two types of rounding that are done.  The first, controlled by
3298    FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3299    list to be aligned to the specific boundary (in bits).  This rounding
3300    affects the initial and starting offsets, but not the argument size.
3301 
3302    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3303    optionally rounds the size of the parm to PARM_BOUNDARY.  The
3304    initial offset is not affected by this rounding, while the size always
3305    is and the starting offset may be.  */
3306 
3307 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3308     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3309     callers pass in the total size of args so far as
3310     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3311 
3312 void
locate_and_pad_parm(enum machine_mode passed_mode,tree type,int in_regs,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)3313 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3314 		     int partial, tree fndecl ATTRIBUTE_UNUSED,
3315 		     struct args_size *initial_offset_ptr,
3316 		     struct locate_and_pad_arg_data *locate)
3317 {
3318   tree sizetree;
3319   enum direction where_pad;
3320   unsigned int boundary;
3321   int reg_parm_stack_space = 0;
3322   int part_size_in_regs;
3323 
3324 #ifdef REG_PARM_STACK_SPACE
3325   reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3326 
3327   /* If we have found a stack parm before we reach the end of the
3328      area reserved for registers, skip that area.  */
3329   if (! in_regs)
3330     {
3331       if (reg_parm_stack_space > 0)
3332 	{
3333 	  if (initial_offset_ptr->var)
3334 	    {
3335 	      initial_offset_ptr->var
3336 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3337 			      ssize_int (reg_parm_stack_space));
3338 	      initial_offset_ptr->constant = 0;
3339 	    }
3340 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
3341 	    initial_offset_ptr->constant = reg_parm_stack_space;
3342 	}
3343     }
3344 #endif /* REG_PARM_STACK_SPACE */
3345 
3346   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3347 
3348   sizetree
3349     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3350   where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3351   boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3352   locate->where_pad = where_pad;
3353   locate->boundary = boundary;
3354 
3355   /* Remember if the outgoing parameter requires extra alignment on the
3356      calling function side.  */
3357   if (boundary > PREFERRED_STACK_BOUNDARY)
3358     boundary = PREFERRED_STACK_BOUNDARY;
3359   if (cfun->stack_alignment_needed < boundary)
3360     cfun->stack_alignment_needed = boundary;
3361 
3362 #ifdef ARGS_GROW_DOWNWARD
3363   locate->slot_offset.constant = -initial_offset_ptr->constant;
3364   if (initial_offset_ptr->var)
3365     locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3366 					  initial_offset_ptr->var);
3367 
3368   {
3369     tree s2 = sizetree;
3370     if (where_pad != none
3371 	&& (!host_integerp (sizetree, 1)
3372 	    || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3373       s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3374     SUB_PARM_SIZE (locate->slot_offset, s2);
3375   }
3376 
3377   locate->slot_offset.constant += part_size_in_regs;
3378 
3379   if (!in_regs
3380 #ifdef REG_PARM_STACK_SPACE
3381       || REG_PARM_STACK_SPACE (fndecl) > 0
3382 #endif
3383      )
3384     pad_to_arg_alignment (&locate->slot_offset, boundary,
3385 			  &locate->alignment_pad);
3386 
3387   locate->size.constant = (-initial_offset_ptr->constant
3388 			   - locate->slot_offset.constant);
3389   if (initial_offset_ptr->var)
3390     locate->size.var = size_binop (MINUS_EXPR,
3391 				   size_binop (MINUS_EXPR,
3392 					       ssize_int (0),
3393 					       initial_offset_ptr->var),
3394 				   locate->slot_offset.var);
3395 
3396   /* Pad_below needs the pre-rounded size to know how much to pad
3397      below.  */
3398   locate->offset = locate->slot_offset;
3399   if (where_pad == downward)
3400     pad_below (&locate->offset, passed_mode, sizetree);
3401 
3402 #else /* !ARGS_GROW_DOWNWARD */
3403   if (!in_regs
3404 #ifdef REG_PARM_STACK_SPACE
3405       || REG_PARM_STACK_SPACE (fndecl) > 0
3406 #endif
3407       )
3408     pad_to_arg_alignment (initial_offset_ptr, boundary,
3409 			  &locate->alignment_pad);
3410   locate->slot_offset = *initial_offset_ptr;
3411 
3412 #ifdef PUSH_ROUNDING
3413   if (passed_mode != BLKmode)
3414     sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3415 #endif
3416 
3417   /* Pad_below needs the pre-rounded size to know how much to pad below
3418      so this must be done before rounding up.  */
3419   locate->offset = locate->slot_offset;
3420   if (where_pad == downward)
3421     pad_below (&locate->offset, passed_mode, sizetree);
3422 
3423   if (where_pad != none
3424       && (!host_integerp (sizetree, 1)
3425 	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3426     sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3427 
3428   ADD_PARM_SIZE (locate->size, sizetree);
3429 
3430   locate->size.constant -= part_size_in_regs;
3431 #endif /* ARGS_GROW_DOWNWARD */
3432 }
3433 
3434 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3435    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
3436 
3437 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)3438 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3439 		      struct args_size *alignment_pad)
3440 {
3441   tree save_var = NULL_TREE;
3442   HOST_WIDE_INT save_constant = 0;
3443   int boundary_in_bytes = boundary / BITS_PER_UNIT;
3444   HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3445 
3446 #ifdef SPARC_STACK_BOUNDARY_HACK
3447   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3448      the real alignment of %sp.  However, when it does this, the
3449      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
3450   if (SPARC_STACK_BOUNDARY_HACK)
3451     sp_offset = 0;
3452 #endif
3453 
3454   if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3455     {
3456       save_var = offset_ptr->var;
3457       save_constant = offset_ptr->constant;
3458     }
3459 
3460   alignment_pad->var = NULL_TREE;
3461   alignment_pad->constant = 0;
3462 
3463   if (boundary > BITS_PER_UNIT)
3464     {
3465       if (offset_ptr->var)
3466 	{
3467 	  tree sp_offset_tree = ssize_int (sp_offset);
3468 	  tree offset = size_binop (PLUS_EXPR,
3469 				    ARGS_SIZE_TREE (*offset_ptr),
3470 				    sp_offset_tree);
3471 #ifdef ARGS_GROW_DOWNWARD
3472 	  tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3473 #else
3474 	  tree rounded = round_up   (offset, boundary / BITS_PER_UNIT);
3475 #endif
3476 
3477 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3478 	  /* ARGS_SIZE_TREE includes constant term.  */
3479 	  offset_ptr->constant = 0;
3480 	  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3481 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3482 					     save_var);
3483 	}
3484       else
3485 	{
3486 	  offset_ptr->constant = -sp_offset +
3487 #ifdef ARGS_GROW_DOWNWARD
3488 	    FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3489 #else
3490 	    CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3491 #endif
3492 	    if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3493 	      alignment_pad->constant = offset_ptr->constant - save_constant;
3494 	}
3495     }
3496 }
3497 
3498 static void
pad_below(struct args_size * offset_ptr,enum machine_mode passed_mode,tree sizetree)3499 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3500 {
3501   if (passed_mode != BLKmode)
3502     {
3503       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3504 	offset_ptr->constant
3505 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3506 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3507 	      - GET_MODE_SIZE (passed_mode));
3508     }
3509   else
3510     {
3511       if (TREE_CODE (sizetree) != INTEGER_CST
3512 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3513 	{
3514 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
3515 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3516 	  /* Add it in.  */
3517 	  ADD_PARM_SIZE (*offset_ptr, s2);
3518 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
3519 	}
3520     }
3521 }
3522 
3523 /* Walk the tree of blocks describing the binding levels within a function
3524    and warn about variables the might be killed by setjmp or vfork.
3525    This is done after calling flow_analysis and before global_alloc
3526    clobbers the pseudo-regs to hard regs.  */
3527 
3528 void
setjmp_vars_warning(tree block)3529 setjmp_vars_warning (tree block)
3530 {
3531   tree decl, sub;
3532 
3533   for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3534     {
3535       if (TREE_CODE (decl) == VAR_DECL
3536 	  && DECL_RTL_SET_P (decl)
3537 	  && REG_P (DECL_RTL (decl))
3538 	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3539 	warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3540 		 " or %<vfork%>",
3541 		 decl);
3542     }
3543 
3544   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3545     setjmp_vars_warning (sub);
3546 }
3547 
3548 /* Do the appropriate part of setjmp_vars_warning
3549    but for arguments instead of local variables.  */
3550 
3551 void
setjmp_args_warning(void)3552 setjmp_args_warning (void)
3553 {
3554   tree decl;
3555   for (decl = DECL_ARGUMENTS (current_function_decl);
3556        decl; decl = TREE_CHAIN (decl))
3557     if (DECL_RTL (decl) != 0
3558 	&& REG_P (DECL_RTL (decl))
3559 	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3560       warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3561 	       decl);
3562 }
3563 
3564 
3565 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3566    and create duplicate blocks.  */
3567 /* ??? Need an option to either create block fragments or to create
3568    abstract origin duplicates of a source block.  It really depends
3569    on what optimization has been performed.  */
3570 
3571 void
reorder_blocks(void)3572 reorder_blocks (void)
3573 {
3574   tree block = DECL_INITIAL (current_function_decl);
3575   VEC(tree,heap) *block_stack;
3576 
3577   if (block == NULL_TREE)
3578     return;
3579 
3580   block_stack = VEC_alloc (tree, heap, 10);
3581 
3582   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
3583   clear_block_marks (block);
3584 
3585   /* Prune the old trees away, so that they don't get in the way.  */
3586   BLOCK_SUBBLOCKS (block) = NULL_TREE;
3587   BLOCK_CHAIN (block) = NULL_TREE;
3588 
3589   /* Recreate the block tree from the note nesting.  */
3590   reorder_blocks_1 (get_insns (), block, &block_stack);
3591   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3592 
3593   VEC_free (tree, heap, block_stack);
3594 }
3595 
3596 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
3597 
3598 void
clear_block_marks(tree block)3599 clear_block_marks (tree block)
3600 {
3601   while (block)
3602     {
3603       TREE_ASM_WRITTEN (block) = 0;
3604       clear_block_marks (BLOCK_SUBBLOCKS (block));
3605       block = BLOCK_CHAIN (block);
3606     }
3607 }
3608 
3609 static void
reorder_blocks_1(rtx insns,tree current_block,VEC (tree,heap)** p_block_stack)3610 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3611 {
3612   rtx insn;
3613 
3614   for (insn = insns; insn; insn = NEXT_INSN (insn))
3615     {
3616       if (NOTE_P (insn))
3617 	{
3618 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3619 	    {
3620 	      tree block = NOTE_BLOCK (insn);
3621 	      tree origin;
3622 
3623 	      origin = (BLOCK_FRAGMENT_ORIGIN (block)
3624 			? BLOCK_FRAGMENT_ORIGIN (block)
3625 			: block);
3626 
3627 	      /* If we have seen this block before, that means it now
3628 		 spans multiple address regions.  Create a new fragment.  */
3629 	      if (TREE_ASM_WRITTEN (block))
3630 		{
3631 		  tree new_block = copy_node (block);
3632 
3633 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3634 		  BLOCK_FRAGMENT_CHAIN (new_block)
3635 		    = BLOCK_FRAGMENT_CHAIN (origin);
3636 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3637 
3638 		  NOTE_BLOCK (insn) = new_block;
3639 		  block = new_block;
3640 		}
3641 
3642 	      BLOCK_SUBBLOCKS (block) = 0;
3643 	      TREE_ASM_WRITTEN (block) = 1;
3644 	      /* When there's only one block for the entire function,
3645 		 current_block == block and we mustn't do this, it
3646 		 will cause infinite recursion.  */
3647 	      if (block != current_block)
3648 		{
3649 		  if (block != origin)
3650 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3651 
3652 		  BLOCK_SUPERCONTEXT (block) = current_block;
3653 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3654 		  BLOCK_SUBBLOCKS (current_block) = block;
3655 		  current_block = origin;
3656 		}
3657 	      VEC_safe_push (tree, heap, *p_block_stack, block);
3658 	    }
3659 	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3660 	    {
3661 	      NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3662 	      BLOCK_SUBBLOCKS (current_block)
3663 		= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3664 	      current_block = BLOCK_SUPERCONTEXT (current_block);
3665 	    }
3666 	}
3667     }
3668 }
3669 
3670 /* Reverse the order of elements in the chain T of blocks,
3671    and return the new head of the chain (old last element).  */
3672 
3673 tree
blocks_nreverse(tree t)3674 blocks_nreverse (tree t)
3675 {
3676   tree prev = 0, decl, next;
3677   for (decl = t; decl; decl = next)
3678     {
3679       next = BLOCK_CHAIN (decl);
3680       BLOCK_CHAIN (decl) = prev;
3681       prev = decl;
3682     }
3683   return prev;
3684 }
3685 
3686 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
3687    non-NULL, list them all into VECTOR, in a depth-first preorder
3688    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
3689    blocks.  */
3690 
3691 static int
all_blocks(tree block,tree * vector)3692 all_blocks (tree block, tree *vector)
3693 {
3694   int n_blocks = 0;
3695 
3696   while (block)
3697     {
3698       TREE_ASM_WRITTEN (block) = 0;
3699 
3700       /* Record this block.  */
3701       if (vector)
3702 	vector[n_blocks] = block;
3703 
3704       ++n_blocks;
3705 
3706       /* Record the subblocks, and their subblocks...  */
3707       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3708 			      vector ? vector + n_blocks : 0);
3709       block = BLOCK_CHAIN (block);
3710     }
3711 
3712   return n_blocks;
3713 }
3714 
3715 /* Return a vector containing all the blocks rooted at BLOCK.  The
3716    number of elements in the vector is stored in N_BLOCKS_P.  The
3717    vector is dynamically allocated; it is the caller's responsibility
3718    to call `free' on the pointer returned.  */
3719 
3720 static tree *
get_block_vector(tree block,int * n_blocks_p)3721 get_block_vector (tree block, int *n_blocks_p)
3722 {
3723   tree *block_vector;
3724 
3725   *n_blocks_p = all_blocks (block, NULL);
3726   block_vector = XNEWVEC (tree, *n_blocks_p);
3727   all_blocks (block, block_vector);
3728 
3729   return block_vector;
3730 }
3731 
3732 static GTY(()) int next_block_index = 2;
3733 
3734 /* Set BLOCK_NUMBER for all the blocks in FN.  */
3735 
3736 void
number_blocks(tree fn)3737 number_blocks (tree fn)
3738 {
3739   int i;
3740   int n_blocks;
3741   tree *block_vector;
3742 
3743   /* For SDB and XCOFF debugging output, we start numbering the blocks
3744      from 1 within each function, rather than keeping a running
3745      count.  */
3746 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3747   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3748     next_block_index = 1;
3749 #endif
3750 
3751   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3752 
3753   /* The top-level BLOCK isn't numbered at all.  */
3754   for (i = 1; i < n_blocks; ++i)
3755     /* We number the blocks from two.  */
3756     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3757 
3758   free (block_vector);
3759 
3760   return;
3761 }
3762 
3763 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
3764 
3765 tree
debug_find_var_in_block_tree(tree var,tree block)3766 debug_find_var_in_block_tree (tree var, tree block)
3767 {
3768   tree t;
3769 
3770   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3771     if (t == var)
3772       return block;
3773 
3774   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3775     {
3776       tree ret = debug_find_var_in_block_tree (var, t);
3777       if (ret)
3778 	return ret;
3779     }
3780 
3781   return NULL_TREE;
3782 }
3783 
3784 /* Allocate a function structure for FNDECL and set its contents
3785    to the defaults.  */
3786 
3787 void
allocate_struct_function(tree fndecl)3788 allocate_struct_function (tree fndecl)
3789 {
3790   tree result;
3791   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3792 
3793   cfun = ggc_alloc_cleared (sizeof (struct function));
3794 
3795   cfun->stack_alignment_needed = STACK_BOUNDARY;
3796   cfun->preferred_stack_boundary = STACK_BOUNDARY;
3797 
3798   current_function_funcdef_no = funcdef_no++;
3799 
3800   cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3801 
3802   init_eh_for_function ();
3803 
3804   lang_hooks.function.init (cfun);
3805   if (init_machine_status)
3806     cfun->machine = (*init_machine_status) ();
3807 
3808   if (fndecl == NULL)
3809     return;
3810 
3811   DECL_STRUCT_FUNCTION (fndecl) = cfun;
3812   cfun->decl = fndecl;
3813 
3814   result = DECL_RESULT (fndecl);
3815   if (aggregate_value_p (result, fndecl))
3816     {
3817 #ifdef PCC_STATIC_STRUCT_RETURN
3818       current_function_returns_pcc_struct = 1;
3819 #endif
3820       current_function_returns_struct = 1;
3821     }
3822 
3823   current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3824 
3825   current_function_stdarg
3826     = (fntype
3827        && TYPE_ARG_TYPES (fntype) != 0
3828        && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3829 	   != void_type_node));
3830 
3831   /* Assume all registers in stdarg functions need to be saved.  */
3832   cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3833   cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3834 }
3835 
3836 /* Reset cfun, and other non-struct-function variables to defaults as
3837    appropriate for emitting rtl at the start of a function.  */
3838 
3839 static void
prepare_function_start(tree fndecl)3840 prepare_function_start (tree fndecl)
3841 {
3842   if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3843     cfun = DECL_STRUCT_FUNCTION (fndecl);
3844   else
3845     allocate_struct_function (fndecl);
3846   init_emit ();
3847   init_varasm_status (cfun);
3848   init_expr ();
3849 
3850   cse_not_expected = ! optimize;
3851 
3852   /* Caller save not needed yet.  */
3853   caller_save_needed = 0;
3854 
3855   /* We haven't done register allocation yet.  */
3856   reg_renumber = 0;
3857 
3858   /* Indicate that we have not instantiated virtual registers yet.  */
3859   virtuals_instantiated = 0;
3860 
3861   /* Indicate that we want CONCATs now.  */
3862   generating_concat_p = 1;
3863 
3864   /* Indicate we have no need of a frame pointer yet.  */
3865   frame_pointer_needed = 0;
3866 }
3867 
3868 /* Initialize the rtl expansion mechanism so that we can do simple things
3869    like generate sequences.  This is used to provide a context during global
3870    initialization of some passes.  */
3871 void
init_dummy_function_start(void)3872 init_dummy_function_start (void)
3873 {
3874   prepare_function_start (NULL);
3875 }
3876 
3877 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3878    and initialize static variables for generating RTL for the statements
3879    of the function.  */
3880 
3881 void
init_function_start(tree subr)3882 init_function_start (tree subr)
3883 {
3884   prepare_function_start (subr);
3885 
3886   /* Prevent ever trying to delete the first instruction of a
3887      function.  Also tell final how to output a linenum before the
3888      function prologue.  Note linenums could be missing, e.g. when
3889      compiling a Java .class file.  */
3890   if (! DECL_IS_BUILTIN (subr))
3891     emit_line_note (DECL_SOURCE_LOCATION (subr));
3892 
3893   /* Make sure first insn is a note even if we don't want linenums.
3894      This makes sure the first insn will never be deleted.
3895      Also, final expects a note to appear there.  */
3896   emit_note (NOTE_INSN_DELETED);
3897 
3898   /* Warn if this value is an aggregate type,
3899      regardless of which calling convention we are using for it.  */
3900   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3901     warning (OPT_Waggregate_return, "function returns an aggregate");
3902 }
3903 
3904 /* Make sure all values used by the optimization passes have sane
3905    defaults.  */
3906 unsigned int
init_function_for_compilation(void)3907 init_function_for_compilation (void)
3908 {
3909   reg_renumber = 0;
3910 
3911   /* No prologue/epilogue insns yet.  Make sure that these vectors are
3912      empty.  */
3913   gcc_assert (VEC_length (int, prologue) == 0);
3914   gcc_assert (VEC_length (int, epilogue) == 0);
3915   gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3916   return 0;
3917 }
3918 
3919 struct tree_opt_pass pass_init_function =
3920 {
3921   NULL,                                 /* name */
3922   NULL,                                 /* gate */
3923   init_function_for_compilation,        /* execute */
3924   NULL,                                 /* sub */
3925   NULL,                                 /* next */
3926   0,                                    /* static_pass_number */
3927   0,                                    /* tv_id */
3928   0,                                    /* properties_required */
3929   0,                                    /* properties_provided */
3930   0,                                    /* properties_destroyed */
3931   0,                                    /* todo_flags_start */
3932   0,                                    /* todo_flags_finish */
3933   0                                     /* letter */
3934 };
3935 
3936 
3937 void
expand_main_function(void)3938 expand_main_function (void)
3939 {
3940 #if (defined(INVOKE__main)				\
3941      || (!defined(HAS_INIT_SECTION)			\
3942 	 && !defined(INIT_SECTION_ASM_OP)		\
3943 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3944   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3945 #endif
3946 }
3947 
3948 /* Expand code to initialize the stack_protect_guard.  This is invoked at
3949    the beginning of a function to be protected.  */
3950 
3951 #ifndef HAVE_stack_protect_set
3952 # define HAVE_stack_protect_set		0
3953 # define gen_stack_protect_set(x,y)	(gcc_unreachable (), NULL_RTX)
3954 #endif
3955 
3956 void
stack_protect_prologue(void)3957 stack_protect_prologue (void)
3958 {
3959   tree guard_decl = targetm.stack_protect_guard ();
3960   rtx x, y;
3961 
3962   /* Avoid expand_expr here, because we don't want guard_decl pulled
3963      into registers unless absolutely necessary.  And we know that
3964      cfun->stack_protect_guard is a local stack slot, so this skips
3965      all the fluff.  */
3966   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3967   y = validize_mem (DECL_RTL (guard_decl));
3968 
3969   /* Allow the target to copy from Y to X without leaking Y into a
3970      register.  */
3971   if (HAVE_stack_protect_set)
3972     {
3973       rtx insn = gen_stack_protect_set (x, y);
3974       if (insn)
3975 	{
3976 	  emit_insn (insn);
3977 	  return;
3978 	}
3979     }
3980 
3981   /* Otherwise do a straight move.  */
3982   emit_move_insn (x, y);
3983 }
3984 
3985 /* Expand code to verify the stack_protect_guard.  This is invoked at
3986    the end of a function to be protected.  */
3987 
3988 #ifndef HAVE_stack_protect_test
3989 # define HAVE_stack_protect_test		0
3990 # define gen_stack_protect_test(x, y, z)	(gcc_unreachable (), NULL_RTX)
3991 #endif
3992 
3993 void
stack_protect_epilogue(void)3994 stack_protect_epilogue (void)
3995 {
3996   tree guard_decl = targetm.stack_protect_guard ();
3997   rtx label = gen_label_rtx ();
3998   rtx x, y, tmp;
3999 
4000   /* Avoid expand_expr here, because we don't want guard_decl pulled
4001      into registers unless absolutely necessary.  And we know that
4002      cfun->stack_protect_guard is a local stack slot, so this skips
4003      all the fluff.  */
4004   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4005   y = validize_mem (DECL_RTL (guard_decl));
4006 
4007   /* Allow the target to compare Y with X without leaking either into
4008      a register.  */
4009   switch (HAVE_stack_protect_test != 0)
4010     {
4011     case 1:
4012       tmp = gen_stack_protect_test (x, y, label);
4013       if (tmp)
4014 	{
4015 	  emit_insn (tmp);
4016 	  break;
4017 	}
4018       /* FALLTHRU */
4019 
4020     default:
4021       emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4022       break;
4023     }
4024 
4025   /* The noreturn predictor has been moved to the tree level.  The rtl-level
4026      predictors estimate this branch about 20%, which isn't enough to get
4027      things moved out of line.  Since this is the only extant case of adding
4028      a noreturn function at the rtl level, it doesn't seem worth doing ought
4029      except adding the prediction by hand.  */
4030   tmp = get_last_insn ();
4031   if (JUMP_P (tmp))
4032     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4033 
4034   expand_expr_stmt (targetm.stack_protect_fail ());
4035   emit_label (label);
4036 }
4037 
4038 /* Start the RTL for a new function, and set variables used for
4039    emitting RTL.
4040    SUBR is the FUNCTION_DECL node.
4041    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4042    the function's parameters, which must be run at any return statement.  */
4043 
4044 void
expand_function_start(tree subr)4045 expand_function_start (tree subr)
4046 {
4047   /* Make sure volatile mem refs aren't considered
4048      valid operands of arithmetic insns.  */
4049   init_recog_no_volatile ();
4050 
4051   current_function_profile
4052     = (profile_flag
4053        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4054 
4055   current_function_limit_stack
4056     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4057 
4058   /* Make the label for return statements to jump to.  Do not special
4059      case machines with special return instructions -- they will be
4060      handled later during jump, ifcvt, or epilogue creation.  */
4061   return_label = gen_label_rtx ();
4062 
4063   /* Initialize rtx used to return the value.  */
4064   /* Do this before assign_parms so that we copy the struct value address
4065      before any library calls that assign parms might generate.  */
4066 
4067   /* Decide whether to return the value in memory or in a register.  */
4068   if (aggregate_value_p (DECL_RESULT (subr), subr))
4069     {
4070       /* Returning something that won't go in a register.  */
4071       rtx value_address = 0;
4072 
4073 #ifdef PCC_STATIC_STRUCT_RETURN
4074       if (current_function_returns_pcc_struct)
4075 	{
4076 	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4077 	  value_address = assemble_static_space (size);
4078 	}
4079       else
4080 #endif
4081 	{
4082 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4083 	  /* Expect to be passed the address of a place to store the value.
4084 	     If it is passed as an argument, assign_parms will take care of
4085 	     it.  */
4086 	  if (sv)
4087 	    {
4088 	      value_address = gen_reg_rtx (Pmode);
4089 	      emit_move_insn (value_address, sv);
4090 	    }
4091 	}
4092       if (value_address)
4093 	{
4094 	  rtx x = value_address;
4095 	  if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4096 	    {
4097 	      x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4098 	      set_mem_attributes (x, DECL_RESULT (subr), 1);
4099 	    }
4100 	  SET_DECL_RTL (DECL_RESULT (subr), x);
4101 	}
4102     }
4103   else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4104     /* If return mode is void, this decl rtl should not be used.  */
4105     SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4106   else
4107     {
4108       /* Compute the return values into a pseudo reg, which we will copy
4109 	 into the true return register after the cleanups are done.  */
4110       tree return_type = TREE_TYPE (DECL_RESULT (subr));
4111       if (TYPE_MODE (return_type) != BLKmode
4112 	  && targetm.calls.return_in_msb (return_type))
4113 	/* expand_function_end will insert the appropriate padding in
4114 	   this case.  Use the return value's natural (unpadded) mode
4115 	   within the function proper.  */
4116 	SET_DECL_RTL (DECL_RESULT (subr),
4117 		      gen_reg_rtx (TYPE_MODE (return_type)));
4118       else
4119 	{
4120 	  /* In order to figure out what mode to use for the pseudo, we
4121 	     figure out what the mode of the eventual return register will
4122 	     actually be, and use that.  */
4123 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4124 
4125 	  /* Structures that are returned in registers are not
4126 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
4127 	  if (REG_P (hard_reg))
4128 	    SET_DECL_RTL (DECL_RESULT (subr),
4129 			  gen_reg_rtx (GET_MODE (hard_reg)));
4130 	  else
4131 	    {
4132 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4133 	      SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4134 	    }
4135 	}
4136 
4137       /* Set DECL_REGISTER flag so that expand_function_end will copy the
4138 	 result to the real return register(s).  */
4139       DECL_REGISTER (DECL_RESULT (subr)) = 1;
4140     }
4141 
4142   /* Initialize rtx for parameters and local variables.
4143      In some cases this requires emitting insns.  */
4144   assign_parms (subr);
4145 
4146   /* If function gets a static chain arg, store it.  */
4147   if (cfun->static_chain_decl)
4148     {
4149       tree parm = cfun->static_chain_decl;
4150       rtx local = gen_reg_rtx (Pmode);
4151 
4152       set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4153       SET_DECL_RTL (parm, local);
4154       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4155 
4156       emit_move_insn (local, static_chain_incoming_rtx);
4157     }
4158 
4159   /* If the function receives a non-local goto, then store the
4160      bits we need to restore the frame pointer.  */
4161   if (cfun->nonlocal_goto_save_area)
4162     {
4163       tree t_save;
4164       rtx r_save;
4165 
4166       /* ??? We need to do this save early.  Unfortunately here is
4167 	 before the frame variable gets declared.  Help out...  */
4168       expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4169 
4170       t_save = build4 (ARRAY_REF, ptr_type_node,
4171 		       cfun->nonlocal_goto_save_area,
4172 		       integer_zero_node, NULL_TREE, NULL_TREE);
4173       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4174       r_save = convert_memory_address (Pmode, r_save);
4175 
4176       emit_move_insn (r_save, virtual_stack_vars_rtx);
4177       update_nonlocal_goto_save_area ();
4178     }
4179 
4180   /* The following was moved from init_function_start.
4181      The move is supposed to make sdb output more accurate.  */
4182   /* Indicate the beginning of the function body,
4183      as opposed to parm setup.  */
4184   emit_note (NOTE_INSN_FUNCTION_BEG);
4185 
4186   gcc_assert (NOTE_P (get_last_insn ()));
4187 
4188   parm_birth_insn = get_last_insn ();
4189 
4190   if (current_function_profile)
4191     {
4192 #ifdef PROFILE_HOOK
4193       PROFILE_HOOK (current_function_funcdef_no);
4194 #endif
4195     }
4196 
4197   /* After the display initializations is where the stack checking
4198      probe should go.  */
4199   if(flag_stack_check)
4200     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4201 
4202   /* Make sure there is a line number after the function entry setup code.  */
4203   force_next_line_note ();
4204 }
4205 
4206 /* Undo the effects of init_dummy_function_start.  */
4207 void
expand_dummy_function_end(void)4208 expand_dummy_function_end (void)
4209 {
4210   /* End any sequences that failed to be closed due to syntax errors.  */
4211   while (in_sequence_p ())
4212     end_sequence ();
4213 
4214   /* Outside function body, can't compute type's actual size
4215      until next function's body starts.  */
4216 
4217   free_after_parsing (cfun);
4218   free_after_compilation (cfun);
4219   cfun = 0;
4220 }
4221 
4222 /* Call DOIT for each hard register used as a return value from
4223    the current function.  */
4224 
4225 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)4226 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4227 {
4228   rtx outgoing = current_function_return_rtx;
4229 
4230   if (! outgoing)
4231     return;
4232 
4233   if (REG_P (outgoing))
4234     (*doit) (outgoing, arg);
4235   else if (GET_CODE (outgoing) == PARALLEL)
4236     {
4237       int i;
4238 
4239       for (i = 0; i < XVECLEN (outgoing, 0); i++)
4240 	{
4241 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4242 
4243 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4244 	    (*doit) (x, arg);
4245 	}
4246     }
4247 }
4248 
4249 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4250 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4251 {
4252   emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4253 }
4254 
4255 void
clobber_return_register(void)4256 clobber_return_register (void)
4257 {
4258   diddle_return_value (do_clobber_return_reg, NULL);
4259 
4260   /* In case we do use pseudo to return value, clobber it too.  */
4261   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4262     {
4263       tree decl_result = DECL_RESULT (current_function_decl);
4264       rtx decl_rtl = DECL_RTL (decl_result);
4265       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4266 	{
4267 	  do_clobber_return_reg (decl_rtl, NULL);
4268 	}
4269     }
4270 }
4271 
4272 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4273 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4274 {
4275   emit_insn (gen_rtx_USE (VOIDmode, reg));
4276 }
4277 
4278 static void
use_return_register(void)4279 use_return_register (void)
4280 {
4281   diddle_return_value (do_use_return_reg, NULL);
4282 }
4283 
4284 /* Possibly warn about unused parameters.  */
4285 void
do_warn_unused_parameter(tree fn)4286 do_warn_unused_parameter (tree fn)
4287 {
4288   tree decl;
4289 
4290   for (decl = DECL_ARGUMENTS (fn);
4291        decl; decl = TREE_CHAIN (decl))
4292     if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4293 	&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4294       warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4295 }
4296 
4297 static GTY(()) rtx initial_trampoline;
4298 
4299 /* Generate RTL for the end of the current function.  */
4300 
4301 void
expand_function_end(void)4302 expand_function_end (void)
4303 {
4304   rtx clobber_after;
4305 
4306   /* If arg_pointer_save_area was referenced only from a nested
4307      function, we will not have initialized it yet.  Do that now.  */
4308   if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4309     get_arg_pointer_save_area (cfun);
4310 
4311   /* If we are doing stack checking and this function makes calls,
4312      do a stack probe at the start of the function to ensure we have enough
4313      space for another stack frame.  */
4314   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4315     {
4316       rtx insn, seq;
4317 
4318       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4319 	if (CALL_P (insn))
4320 	  {
4321 	    start_sequence ();
4322 	    probe_stack_range (STACK_CHECK_PROTECT,
4323 			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4324 	    seq = get_insns ();
4325 	    end_sequence ();
4326 	    emit_insn_before (seq, stack_check_probe_note);
4327 	    break;
4328 	  }
4329     }
4330 
4331   /* Possibly warn about unused parameters.
4332      When frontend does unit-at-a-time, the warning is already
4333      issued at finalization time.  */
4334   if (warn_unused_parameter
4335       && !lang_hooks.callgraph.expand_function)
4336     do_warn_unused_parameter (current_function_decl);
4337 
4338   /* End any sequences that failed to be closed due to syntax errors.  */
4339   while (in_sequence_p ())
4340     end_sequence ();
4341 
4342   clear_pending_stack_adjust ();
4343   do_pending_stack_adjust ();
4344 
4345   /* Mark the end of the function body.
4346      If control reaches this insn, the function can drop through
4347      without returning a value.  */
4348   emit_note (NOTE_INSN_FUNCTION_END);
4349 
4350   /* Must mark the last line number note in the function, so that the test
4351      coverage code can avoid counting the last line twice.  This just tells
4352      the code to ignore the immediately following line note, since there
4353      already exists a copy of this note somewhere above.  This line number
4354      note is still needed for debugging though, so we can't delete it.  */
4355   if (flag_test_coverage)
4356     emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4357 
4358   /* Output a linenumber for the end of the function.
4359      SDB depends on this.  */
4360   force_next_line_note ();
4361   emit_line_note (input_location);
4362 
4363   /* Before the return label (if any), clobber the return
4364      registers so that they are not propagated live to the rest of
4365      the function.  This can only happen with functions that drop
4366      through; if there had been a return statement, there would
4367      have either been a return rtx, or a jump to the return label.
4368 
4369      We delay actual code generation after the current_function_value_rtx
4370      is computed.  */
4371   clobber_after = get_last_insn ();
4372 
4373   /* Output the label for the actual return from the function.  */
4374   emit_label (return_label);
4375 
4376   if (USING_SJLJ_EXCEPTIONS)
4377     {
4378       /* Let except.c know where it should emit the call to unregister
4379 	 the function context for sjlj exceptions.  */
4380       if (flag_exceptions)
4381 	sjlj_emit_function_exit_after (get_last_insn ());
4382     }
4383   else
4384     {
4385       /* @@@ This is a kludge.  We want to ensure that instructions that
4386 	 may trap are not moved into the epilogue by scheduling, because
4387 	 we don't always emit unwind information for the epilogue.
4388 	 However, not all machine descriptions define a blockage insn, so
4389 	 emit an ASM_INPUT to act as one.  */
4390       if (flag_non_call_exceptions)
4391 	emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4392     }
4393 
4394   /* If this is an implementation of throw, do what's necessary to
4395      communicate between __builtin_eh_return and the epilogue.  */
4396   expand_eh_return ();
4397 
4398   /* If scalar return value was computed in a pseudo-reg, or was a named
4399      return value that got dumped to the stack, copy that to the hard
4400      return register.  */
4401   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4402     {
4403       tree decl_result = DECL_RESULT (current_function_decl);
4404       rtx decl_rtl = DECL_RTL (decl_result);
4405 
4406       if (REG_P (decl_rtl)
4407 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4408 	  : DECL_REGISTER (decl_result))
4409 	{
4410 	  rtx real_decl_rtl = current_function_return_rtx;
4411 
4412 	  /* This should be set in assign_parms.  */
4413 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4414 
4415 	  /* If this is a BLKmode structure being returned in registers,
4416 	     then use the mode computed in expand_return.  Note that if
4417 	     decl_rtl is memory, then its mode may have been changed,
4418 	     but that current_function_return_rtx has not.  */
4419 	  if (GET_MODE (real_decl_rtl) == BLKmode)
4420 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4421 
4422 	  /* If a non-BLKmode return value should be padded at the least
4423 	     significant end of the register, shift it left by the appropriate
4424 	     amount.  BLKmode results are handled using the group load/store
4425 	     machinery.  */
4426 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4427 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4428 	    {
4429 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4430 					   REGNO (real_decl_rtl)),
4431 			      decl_rtl);
4432 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4433 	    }
4434 	  /* If a named return value dumped decl_return to memory, then
4435 	     we may need to re-do the PROMOTE_MODE signed/unsigned
4436 	     extension.  */
4437 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4438 	    {
4439 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4440 
4441 	      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4442 		promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4443 			      &unsignedp, 1);
4444 
4445 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
4446 	    }
4447 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
4448 	    {
4449 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
4450 		 move the result to the real return registers.  Otherwise, do
4451 		 a group load from decl_rtl for a named return.  */
4452 	      if (GET_CODE (decl_rtl) == PARALLEL)
4453 		emit_group_move (real_decl_rtl, decl_rtl);
4454 	      else
4455 		emit_group_load (real_decl_rtl, decl_rtl,
4456 				 TREE_TYPE (decl_result),
4457 				 int_size_in_bytes (TREE_TYPE (decl_result)));
4458 	    }
4459 	  /* In the case of complex integer modes smaller than a word, we'll
4460 	     need to generate some non-trivial bitfield insertions.  Do that
4461 	     on a pseudo and not the hard register.  */
4462 	  else if (GET_CODE (decl_rtl) == CONCAT
4463 		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4464 		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4465 	    {
4466 	      int old_generating_concat_p;
4467 	      rtx tmp;
4468 
4469 	      old_generating_concat_p = generating_concat_p;
4470 	      generating_concat_p = 0;
4471 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4472 	      generating_concat_p = old_generating_concat_p;
4473 
4474 	      emit_move_insn (tmp, decl_rtl);
4475 	      emit_move_insn (real_decl_rtl, tmp);
4476 	    }
4477 	  else
4478 	    emit_move_insn (real_decl_rtl, decl_rtl);
4479 	}
4480     }
4481 
4482   /* If returning a structure, arrange to return the address of the value
4483      in a place where debuggers expect to find it.
4484 
4485      If returning a structure PCC style,
4486      the caller also depends on this value.
4487      And current_function_returns_pcc_struct is not necessarily set.  */
4488   if (current_function_returns_struct
4489       || current_function_returns_pcc_struct)
4490     {
4491       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4492       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4493       rtx outgoing;
4494 
4495       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4496 	type = TREE_TYPE (type);
4497       else
4498 	value_address = XEXP (value_address, 0);
4499 
4500       outgoing = targetm.calls.function_value (build_pointer_type (type),
4501 					       current_function_decl, true);
4502 
4503       /* Mark this as a function return value so integrate will delete the
4504 	 assignment and USE below when inlining this function.  */
4505       REG_FUNCTION_VALUE_P (outgoing) = 1;
4506 
4507       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
4508       value_address = convert_memory_address (GET_MODE (outgoing),
4509 					      value_address);
4510 
4511       emit_move_insn (outgoing, value_address);
4512 
4513       /* Show return register used to hold result (in this case the address
4514 	 of the result.  */
4515       current_function_return_rtx = outgoing;
4516     }
4517 
4518   /* Emit the actual code to clobber return register.  */
4519   {
4520     rtx seq;
4521 
4522     start_sequence ();
4523     clobber_return_register ();
4524     expand_naked_return ();
4525     seq = get_insns ();
4526     end_sequence ();
4527 
4528     emit_insn_after (seq, clobber_after);
4529   }
4530 
4531   /* Output the label for the naked return from the function.  */
4532   emit_label (naked_return_label);
4533 
4534   /* If stack protection is enabled for this function, check the guard.  */
4535   if (cfun->stack_protect_guard)
4536     stack_protect_epilogue ();
4537 
4538   /* If we had calls to alloca, and this machine needs
4539      an accurate stack pointer to exit the function,
4540      insert some code to save and restore the stack pointer.  */
4541   if (! EXIT_IGNORE_STACK
4542       && current_function_calls_alloca)
4543     {
4544       rtx tem = 0;
4545 
4546       emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4547       emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4548     }
4549 
4550   /* ??? This should no longer be necessary since stupid is no longer with
4551      us, but there are some parts of the compiler (eg reload_combine, and
4552      sh mach_dep_reorg) that still try and compute their own lifetime info
4553      instead of using the general framework.  */
4554   use_return_register ();
4555 }
4556 
4557 rtx
get_arg_pointer_save_area(struct function * f)4558 get_arg_pointer_save_area (struct function *f)
4559 {
4560   rtx ret = f->x_arg_pointer_save_area;
4561 
4562   if (! ret)
4563     {
4564       ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4565       f->x_arg_pointer_save_area = ret;
4566     }
4567 
4568   if (f == cfun && ! f->arg_pointer_save_area_init)
4569     {
4570       rtx seq;
4571 
4572       /* Save the arg pointer at the beginning of the function.  The
4573 	 generated stack slot may not be a valid memory address, so we
4574 	 have to check it and fix it if necessary.  */
4575       start_sequence ();
4576       emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4577       seq = get_insns ();
4578       end_sequence ();
4579 
4580       push_topmost_sequence ();
4581       emit_insn_after (seq, entry_of_function ());
4582       pop_topmost_sequence ();
4583     }
4584 
4585   return ret;
4586 }
4587 
4588 /* Extend a vector that records the INSN_UIDs of INSNS
4589    (a list of one or more insns).  */
4590 
4591 static void
record_insns(rtx insns,VEC (int,heap)** vecp)4592 record_insns (rtx insns, VEC(int,heap) **vecp)
4593 {
4594   rtx tmp;
4595 
4596   for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4597     VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4598 }
4599 
4600 /* Set the locator of the insn chain starting at INSN to LOC.  */
4601 static void
set_insn_locators(rtx insn,int loc)4602 set_insn_locators (rtx insn, int loc)
4603 {
4604   while (insn != NULL_RTX)
4605     {
4606       if (INSN_P (insn))
4607 	INSN_LOCATOR (insn) = loc;
4608       insn = NEXT_INSN (insn);
4609     }
4610 }
4611 
4612 /* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
4613    be running after reorg, SEQUENCE rtl is possible.  */
4614 
4615 static int
contains(rtx insn,VEC (int,heap)** vec)4616 contains (rtx insn, VEC(int,heap) **vec)
4617 {
4618   int i, j;
4619 
4620   if (NONJUMP_INSN_P (insn)
4621       && GET_CODE (PATTERN (insn)) == SEQUENCE)
4622     {
4623       int count = 0;
4624       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4625 	for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4626 	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4627 	      == VEC_index (int, *vec, j))
4628 	    count++;
4629       return count;
4630     }
4631   else
4632     {
4633       for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4634 	if (INSN_UID (insn) == VEC_index (int, *vec, j))
4635 	  return 1;
4636     }
4637   return 0;
4638 }
4639 
4640 int
prologue_epilogue_contains(rtx insn)4641 prologue_epilogue_contains (rtx insn)
4642 {
4643   if (contains (insn, &prologue))
4644     return 1;
4645   if (contains (insn, &epilogue))
4646     return 1;
4647   return 0;
4648 }
4649 
4650 int
sibcall_epilogue_contains(rtx insn)4651 sibcall_epilogue_contains (rtx insn)
4652 {
4653   if (sibcall_epilogue)
4654     return contains (insn, &sibcall_epilogue);
4655   return 0;
4656 }
4657 
4658 #ifdef HAVE_return
4659 /* Insert gen_return at the end of block BB.  This also means updating
4660    block_for_insn appropriately.  */
4661 
4662 static void
emit_return_into_block(basic_block bb,rtx line_note)4663 emit_return_into_block (basic_block bb, rtx line_note)
4664 {
4665   emit_jump_insn_after (gen_return (), BB_END (bb));
4666   if (line_note)
4667     emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4668 }
4669 #endif /* HAVE_return */
4670 
4671 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4672 
4673 /* These functions convert the epilogue into a variant that does not
4674    modify the stack pointer.  This is used in cases where a function
4675    returns an object whose size is not known until it is computed.
4676    The called function leaves the object on the stack, leaves the
4677    stack depressed, and returns a pointer to the object.
4678 
4679    What we need to do is track all modifications and references to the
4680    stack pointer, deleting the modifications and changing the
4681    references to point to the location the stack pointer would have
4682    pointed to had the modifications taken place.
4683 
4684    These functions need to be portable so we need to make as few
4685    assumptions about the epilogue as we can.  However, the epilogue
4686    basically contains three things: instructions to reset the stack
4687    pointer, instructions to reload registers, possibly including the
4688    frame pointer, and an instruction to return to the caller.
4689 
4690    We must be sure of what a relevant epilogue insn is doing.  We also
4691    make no attempt to validate the insns we make since if they are
4692    invalid, we probably can't do anything valid.  The intent is that
4693    these routines get "smarter" as more and more machines start to use
4694    them and they try operating on different epilogues.
4695 
4696    We use the following structure to track what the part of the
4697    epilogue that we've already processed has done.  We keep two copies
4698    of the SP equivalence, one for use during the insn we are
4699    processing and one for use in the next insn.  The difference is
4700    because one part of a PARALLEL may adjust SP and the other may use
4701    it.  */
4702 
4703 struct epi_info
4704 {
4705   rtx sp_equiv_reg;		/* REG that SP is set from, perhaps SP.  */
4706   HOST_WIDE_INT sp_offset;	/* Offset from SP_EQUIV_REG of present SP.  */
4707   rtx new_sp_equiv_reg;		/* REG to be used at end of insn.  */
4708   HOST_WIDE_INT new_sp_offset;	/* Offset to be used at end of insn.  */
4709   rtx equiv_reg_src;		/* If nonzero, the value that SP_EQUIV_REG
4710 				   should be set to once we no longer need
4711 				   its value.  */
4712   rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4713 					     for registers.  */
4714 };
4715 
4716 static void handle_epilogue_set (rtx, struct epi_info *);
4717 static void update_epilogue_consts (rtx, rtx, void *);
4718 static void emit_equiv_load (struct epi_info *);
4719 
4720 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4721    no modifications to the stack pointer.  Return the new list of insns.  */
4722 
4723 static rtx
keep_stack_depressed(rtx insns)4724 keep_stack_depressed (rtx insns)
4725 {
4726   int j;
4727   struct epi_info info;
4728   rtx insn, next;
4729 
4730   /* If the epilogue is just a single instruction, it must be OK as is.  */
4731   if (NEXT_INSN (insns) == NULL_RTX)
4732     return insns;
4733 
4734   /* Otherwise, start a sequence, initialize the information we have, and
4735      process all the insns we were given.  */
4736   start_sequence ();
4737 
4738   info.sp_equiv_reg = stack_pointer_rtx;
4739   info.sp_offset = 0;
4740   info.equiv_reg_src = 0;
4741 
4742   for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4743     info.const_equiv[j] = 0;
4744 
4745   insn = insns;
4746   next = NULL_RTX;
4747   while (insn != NULL_RTX)
4748     {
4749       next = NEXT_INSN (insn);
4750 
4751       if (!INSN_P (insn))
4752 	{
4753 	  add_insn (insn);
4754 	  insn = next;
4755 	  continue;
4756 	}
4757 
4758       /* If this insn references the register that SP is equivalent to and
4759 	 we have a pending load to that register, we must force out the load
4760 	 first and then indicate we no longer know what SP's equivalent is.  */
4761       if (info.equiv_reg_src != 0
4762 	  && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4763 	{
4764 	  emit_equiv_load (&info);
4765 	  info.sp_equiv_reg = 0;
4766 	}
4767 
4768       info.new_sp_equiv_reg = info.sp_equiv_reg;
4769       info.new_sp_offset = info.sp_offset;
4770 
4771       /* If this is a (RETURN) and the return address is on the stack,
4772 	 update the address and change to an indirect jump.  */
4773       if (GET_CODE (PATTERN (insn)) == RETURN
4774 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4775 	      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4776 	{
4777 	  rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4778 	  rtx base = 0;
4779 	  HOST_WIDE_INT offset = 0;
4780 	  rtx jump_insn, jump_set;
4781 
4782 	  /* If the return address is in a register, we can emit the insn
4783 	     unchanged.  Otherwise, it must be a MEM and we see what the
4784 	     base register and offset are.  In any case, we have to emit any
4785 	     pending load to the equivalent reg of SP, if any.  */
4786 	  if (REG_P (retaddr))
4787 	    {
4788 	      emit_equiv_load (&info);
4789 	      add_insn (insn);
4790 	      insn = next;
4791 	      continue;
4792 	    }
4793 	  else
4794 	    {
4795 	      rtx ret_ptr;
4796 	      gcc_assert (MEM_P (retaddr));
4797 
4798 	      ret_ptr = XEXP (retaddr, 0);
4799 
4800 	      if (REG_P (ret_ptr))
4801 		{
4802 		  base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4803 		  offset = 0;
4804 		}
4805 	      else
4806 		{
4807 		  gcc_assert (GET_CODE (ret_ptr) == PLUS
4808 			      && REG_P (XEXP (ret_ptr, 0))
4809 			      && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4810 		  base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4811 		  offset = INTVAL (XEXP (ret_ptr, 1));
4812 		}
4813 	    }
4814 
4815 	  /* If the base of the location containing the return pointer
4816 	     is SP, we must update it with the replacement address.  Otherwise,
4817 	     just build the necessary MEM.  */
4818 	  retaddr = plus_constant (base, offset);
4819 	  if (base == stack_pointer_rtx)
4820 	    retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4821 					    plus_constant (info.sp_equiv_reg,
4822 							   info.sp_offset));
4823 
4824 	  retaddr = gen_rtx_MEM (Pmode, retaddr);
4825 	  MEM_NOTRAP_P (retaddr) = 1;
4826 
4827 	  /* If there is a pending load to the equivalent register for SP
4828 	     and we reference that register, we must load our address into
4829 	     a scratch register and then do that load.  */
4830 	  if (info.equiv_reg_src
4831 	      && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4832 	    {
4833 	      unsigned int regno;
4834 	      rtx reg;
4835 
4836 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4837 		if (HARD_REGNO_MODE_OK (regno, Pmode)
4838 		    && !fixed_regs[regno]
4839 		    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4840 		    && !REGNO_REG_SET_P
4841 		         (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4842 		    && !refers_to_regno_p (regno,
4843 					   regno + hard_regno_nregs[regno]
4844 								   [Pmode],
4845 					   info.equiv_reg_src, NULL)
4846 		    && info.const_equiv[regno] == 0)
4847 		  break;
4848 
4849 	      gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4850 
4851 	      reg = gen_rtx_REG (Pmode, regno);
4852 	      emit_move_insn (reg, retaddr);
4853 	      retaddr = reg;
4854 	    }
4855 
4856 	  emit_equiv_load (&info);
4857 	  jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4858 
4859 	  /* Show the SET in the above insn is a RETURN.  */
4860 	  jump_set = single_set (jump_insn);
4861 	  gcc_assert (jump_set);
4862 	  SET_IS_RETURN_P (jump_set) = 1;
4863 	}
4864 
4865       /* If SP is not mentioned in the pattern and its equivalent register, if
4866 	 any, is not modified, just emit it.  Otherwise, if neither is set,
4867 	 replace the reference to SP and emit the insn.  If none of those are
4868 	 true, handle each SET individually.  */
4869       else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4870 	       && (info.sp_equiv_reg == stack_pointer_rtx
4871 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4872 	add_insn (insn);
4873       else if (! reg_set_p (stack_pointer_rtx, insn)
4874 	       && (info.sp_equiv_reg == stack_pointer_rtx
4875 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4876 	{
4877 	  int changed;
4878 
4879 	  changed = validate_replace_rtx (stack_pointer_rtx,
4880 					  plus_constant (info.sp_equiv_reg,
4881 							 info.sp_offset),
4882 					  insn);
4883 	  gcc_assert (changed);
4884 
4885 	  add_insn (insn);
4886 	}
4887       else if (GET_CODE (PATTERN (insn)) == SET)
4888 	handle_epilogue_set (PATTERN (insn), &info);
4889       else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4890 	{
4891 	  for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4892 	    if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4893 	      handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4894 	}
4895       else
4896 	add_insn (insn);
4897 
4898       info.sp_equiv_reg = info.new_sp_equiv_reg;
4899       info.sp_offset = info.new_sp_offset;
4900 
4901       /* Now update any constants this insn sets.  */
4902       note_stores (PATTERN (insn), update_epilogue_consts, &info);
4903       insn = next;
4904     }
4905 
4906   insns = get_insns ();
4907   end_sequence ();
4908   return insns;
4909 }
4910 
4911 /* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
4912    structure that contains information about what we've seen so far.  We
4913    process this SET by either updating that data or by emitting one or
4914    more insns.  */
4915 
4916 static void
handle_epilogue_set(rtx set,struct epi_info * p)4917 handle_epilogue_set (rtx set, struct epi_info *p)
4918 {
4919   /* First handle the case where we are setting SP.  Record what it is being
4920      set from, which we must be able to determine  */
4921   if (reg_set_p (stack_pointer_rtx, set))
4922     {
4923       gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4924 
4925       if (GET_CODE (SET_SRC (set)) == PLUS)
4926 	{
4927 	  p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4928 	  if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4929 	    p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4930 	  else
4931 	    {
4932 	      gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4933 			  && (REGNO (XEXP (SET_SRC (set), 1))
4934 			      < FIRST_PSEUDO_REGISTER)
4935 			  && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4936 	      p->new_sp_offset
4937 		= INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4938 	    }
4939 	}
4940       else
4941 	p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4942 
4943       /* If we are adjusting SP, we adjust from the old data.  */
4944       if (p->new_sp_equiv_reg == stack_pointer_rtx)
4945 	{
4946 	  p->new_sp_equiv_reg = p->sp_equiv_reg;
4947 	  p->new_sp_offset += p->sp_offset;
4948 	}
4949 
4950       gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4951 
4952       return;
4953     }
4954 
4955   /* Next handle the case where we are setting SP's equivalent
4956      register.  We must not already have a value to set it to.  We
4957      could update, but there seems little point in handling that case.
4958      Note that we have to allow for the case where we are setting the
4959      register set in the previous part of a PARALLEL inside a single
4960      insn.  But use the old offset for any updates within this insn.
4961      We must allow for the case where the register is being set in a
4962      different (usually wider) mode than Pmode).  */
4963   else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4964     {
4965       gcc_assert (!p->equiv_reg_src
4966 		  && REG_P (p->new_sp_equiv_reg)
4967 		  && REG_P (SET_DEST (set))
4968 		  && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4969 		      <= BITS_PER_WORD)
4970 		  && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4971       p->equiv_reg_src
4972 	= simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4973 				plus_constant (p->sp_equiv_reg,
4974 					       p->sp_offset));
4975     }
4976 
4977   /* Otherwise, replace any references to SP in the insn to its new value
4978      and emit the insn.  */
4979   else
4980     {
4981       SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4982 					    plus_constant (p->sp_equiv_reg,
4983 							   p->sp_offset));
4984       SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4985 					     plus_constant (p->sp_equiv_reg,
4986 							    p->sp_offset));
4987       emit_insn (set);
4988     }
4989 }
4990 
4991 /* Update the tracking information for registers set to constants.  */
4992 
4993 static void
update_epilogue_consts(rtx dest,rtx x,void * data)4994 update_epilogue_consts (rtx dest, rtx x, void *data)
4995 {
4996   struct epi_info *p = (struct epi_info *) data;
4997   rtx new;
4998 
4999   if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5000     return;
5001 
5002   /* If we are either clobbering a register or doing a partial set,
5003      show we don't know the value.  */
5004   else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5005     p->const_equiv[REGNO (dest)] = 0;
5006 
5007   /* If we are setting it to a constant, record that constant.  */
5008   else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5009     p->const_equiv[REGNO (dest)] = SET_SRC (x);
5010 
5011   /* If this is a binary operation between a register we have been tracking
5012      and a constant, see if we can compute a new constant value.  */
5013   else if (ARITHMETIC_P (SET_SRC (x))
5014 	   && REG_P (XEXP (SET_SRC (x), 0))
5015 	   && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5016 	   && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5017 	   && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5018 	   && 0 != (new = simplify_binary_operation
5019 		    (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5020 		     p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5021 		     XEXP (SET_SRC (x), 1)))
5022 	   && GET_CODE (new) == CONST_INT)
5023     p->const_equiv[REGNO (dest)] = new;
5024 
5025   /* Otherwise, we can't do anything with this value.  */
5026   else
5027     p->const_equiv[REGNO (dest)] = 0;
5028 }
5029 
5030 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
5031 
5032 static void
emit_equiv_load(struct epi_info * p)5033 emit_equiv_load (struct epi_info *p)
5034 {
5035   if (p->equiv_reg_src != 0)
5036     {
5037       rtx dest = p->sp_equiv_reg;
5038 
5039       if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5040 	dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5041 			    REGNO (p->sp_equiv_reg));
5042 
5043       emit_move_insn (dest, p->equiv_reg_src);
5044       p->equiv_reg_src = 0;
5045     }
5046 }
5047 #endif
5048 
5049 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5050    this into place with notes indicating where the prologue ends and where
5051    the epilogue begins.  Update the basic block information when possible.  */
5052 
5053 void
thread_prologue_and_epilogue_insns(rtx f ATTRIBUTE_UNUSED)5054 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5055 {
5056   int inserted = 0;
5057   edge e;
5058 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5059   rtx seq;
5060 #endif
5061 #ifdef HAVE_prologue
5062   rtx prologue_end = NULL_RTX;
5063 #endif
5064 #if defined (HAVE_epilogue) || defined(HAVE_return)
5065   rtx epilogue_end = NULL_RTX;
5066 #endif
5067   edge_iterator ei;
5068 
5069 #ifdef HAVE_prologue
5070   if (HAVE_prologue)
5071     {
5072       start_sequence ();
5073       seq = gen_prologue ();
5074       emit_insn (seq);
5075 
5076       /* Retain a map of the prologue insns.  */
5077       record_insns (seq, &prologue);
5078       prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5079 
5080 #ifndef PROFILE_BEFORE_PROLOGUE
5081       /* Ensure that instructions are not moved into the prologue when
5082 	 profiling is on.  The call to the profiling routine can be
5083 	 emitted within the live range of a call-clobbered register.  */
5084       if (current_function_profile)
5085 	emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
5086 #endif
5087 
5088       seq = get_insns ();
5089       end_sequence ();
5090       set_insn_locators (seq, prologue_locator);
5091 
5092       /* Can't deal with multiple successors of the entry block
5093          at the moment.  Function should always have at least one
5094          entry point.  */
5095       gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5096 
5097       insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5098       inserted = 1;
5099     }
5100 #endif
5101 
5102   /* If the exit block has no non-fake predecessors, we don't need
5103      an epilogue.  */
5104   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5105     if ((e->flags & EDGE_FAKE) == 0)
5106       break;
5107   if (e == NULL)
5108     goto epilogue_done;
5109 
5110 #ifdef HAVE_return
5111   if (optimize && HAVE_return)
5112     {
5113       /* If we're allowed to generate a simple return instruction,
5114 	 then by definition we don't need a full epilogue.  Examine
5115 	 the block that falls through to EXIT.   If it does not
5116 	 contain any code, examine its predecessors and try to
5117 	 emit (conditional) return instructions.  */
5118 
5119       basic_block last;
5120       rtx label;
5121 
5122       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5123 	if (e->flags & EDGE_FALLTHRU)
5124 	  break;
5125       if (e == NULL)
5126 	goto epilogue_done;
5127       last = e->src;
5128 
5129       /* Verify that there are no active instructions in the last block.  */
5130       label = BB_END (last);
5131       while (label && !LABEL_P (label))
5132 	{
5133 	  if (active_insn_p (label))
5134 	    break;
5135 	  label = PREV_INSN (label);
5136 	}
5137 
5138       if (BB_HEAD (last) == label && LABEL_P (label))
5139 	{
5140 	  edge_iterator ei2;
5141 	  rtx epilogue_line_note = NULL_RTX;
5142 
5143 	  /* Locate the line number associated with the closing brace,
5144 	     if we can find one.  */
5145 	  for (seq = get_last_insn ();
5146 	       seq && ! active_insn_p (seq);
5147 	       seq = PREV_INSN (seq))
5148 	    if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5149 	      {
5150 		epilogue_line_note = seq;
5151 		break;
5152 	      }
5153 
5154 	  for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5155 	    {
5156 	      basic_block bb = e->src;
5157 	      rtx jump;
5158 
5159 	      if (bb == ENTRY_BLOCK_PTR)
5160 		{
5161 		  ei_next (&ei2);
5162 		  continue;
5163 		}
5164 
5165 	      jump = BB_END (bb);
5166 	      if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5167 		{
5168 		  ei_next (&ei2);
5169 		  continue;
5170 		}
5171 
5172 	      /* If we have an unconditional jump, we can replace that
5173 		 with a simple return instruction.  */
5174 	      if (simplejump_p (jump))
5175 		{
5176 		  emit_return_into_block (bb, epilogue_line_note);
5177 		  delete_insn (jump);
5178 		}
5179 
5180 	      /* If we have a conditional jump, we can try to replace
5181 		 that with a conditional return instruction.  */
5182 	      else if (condjump_p (jump))
5183 		{
5184 		  if (! redirect_jump (jump, 0, 0))
5185 		    {
5186 		      ei_next (&ei2);
5187 		      continue;
5188 		    }
5189 
5190 		  /* If this block has only one successor, it both jumps
5191 		     and falls through to the fallthru block, so we can't
5192 		     delete the edge.  */
5193 		  if (single_succ_p (bb))
5194 		    {
5195 		      ei_next (&ei2);
5196 		      continue;
5197 		    }
5198 		}
5199 	      else
5200 		{
5201 		  ei_next (&ei2);
5202 		  continue;
5203 		}
5204 
5205 	      /* Fix up the CFG for the successful change we just made.  */
5206 	      redirect_edge_succ (e, EXIT_BLOCK_PTR);
5207 	    }
5208 
5209 	  /* Emit a return insn for the exit fallthru block.  Whether
5210 	     this is still reachable will be determined later.  */
5211 
5212 	  emit_barrier_after (BB_END (last));
5213 	  emit_return_into_block (last, epilogue_line_note);
5214 	  epilogue_end = BB_END (last);
5215 	  single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5216 	  goto epilogue_done;
5217 	}
5218     }
5219 #endif
5220   /* Find the edge that falls through to EXIT.  Other edges may exist
5221      due to RETURN instructions, but those don't need epilogues.
5222      There really shouldn't be a mixture -- either all should have
5223      been converted or none, however...  */
5224 
5225   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5226     if (e->flags & EDGE_FALLTHRU)
5227       break;
5228   if (e == NULL)
5229     goto epilogue_done;
5230 
5231 #ifdef HAVE_epilogue
5232   if (HAVE_epilogue)
5233     {
5234       start_sequence ();
5235       epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5236 
5237       seq = gen_epilogue ();
5238 
5239 #ifdef INCOMING_RETURN_ADDR_RTX
5240       /* If this function returns with the stack depressed and we can support
5241 	 it, massage the epilogue to actually do that.  */
5242       if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5243 	  && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5244 	seq = keep_stack_depressed (seq);
5245 #endif
5246 
5247       emit_jump_insn (seq);
5248 
5249       /* Retain a map of the epilogue insns.  */
5250       record_insns (seq, &epilogue);
5251       set_insn_locators (seq, epilogue_locator);
5252 
5253       seq = get_insns ();
5254       end_sequence ();
5255 
5256       insert_insn_on_edge (seq, e);
5257       inserted = 1;
5258     }
5259   else
5260 #endif
5261     {
5262       basic_block cur_bb;
5263 
5264       if (! next_active_insn (BB_END (e->src)))
5265 	goto epilogue_done;
5266       /* We have a fall-through edge to the exit block, the source is not
5267          at the end of the function, and there will be an assembler epilogue
5268          at the end of the function.
5269          We can't use force_nonfallthru here, because that would try to
5270          use return.  Inserting a jump 'by hand' is extremely messy, so
5271 	 we take advantage of cfg_layout_finalize using
5272 	fixup_fallthru_exit_predecessor.  */
5273       cfg_layout_initialize (0);
5274       FOR_EACH_BB (cur_bb)
5275 	if (cur_bb->index >= NUM_FIXED_BLOCKS
5276 	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5277 	  cur_bb->aux = cur_bb->next_bb;
5278       cfg_layout_finalize ();
5279     }
5280 epilogue_done:
5281 
5282   if (inserted)
5283     commit_edge_insertions ();
5284 
5285 #ifdef HAVE_sibcall_epilogue
5286   /* Emit sibling epilogues before any sibling call sites.  */
5287   for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5288     {
5289       basic_block bb = e->src;
5290       rtx insn = BB_END (bb);
5291 
5292       if (!CALL_P (insn)
5293 	  || ! SIBLING_CALL_P (insn))
5294 	{
5295 	  ei_next (&ei);
5296 	  continue;
5297 	}
5298 
5299       start_sequence ();
5300       emit_insn (gen_sibcall_epilogue ());
5301       seq = get_insns ();
5302       end_sequence ();
5303 
5304       /* Retain a map of the epilogue insns.  Used in life analysis to
5305 	 avoid getting rid of sibcall epilogue insns.  Do this before we
5306 	 actually emit the sequence.  */
5307       record_insns (seq, &sibcall_epilogue);
5308       set_insn_locators (seq, epilogue_locator);
5309 
5310       emit_insn_before (seq, insn);
5311       ei_next (&ei);
5312     }
5313 #endif
5314 
5315 #ifdef HAVE_prologue
5316   /* This is probably all useless now that we use locators.  */
5317   if (prologue_end)
5318     {
5319       rtx insn, prev;
5320 
5321       /* GDB handles `break f' by setting a breakpoint on the first
5322 	 line note after the prologue.  Which means (1) that if
5323 	 there are line number notes before where we inserted the
5324 	 prologue we should move them, and (2) we should generate a
5325 	 note before the end of the first basic block, if there isn't
5326 	 one already there.
5327 
5328 	 ??? This behavior is completely broken when dealing with
5329 	 multiple entry functions.  We simply place the note always
5330 	 into first basic block and let alternate entry points
5331 	 to be missed.
5332        */
5333 
5334       for (insn = prologue_end; insn; insn = prev)
5335 	{
5336 	  prev = PREV_INSN (insn);
5337 	  if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5338 	    {
5339 	      /* Note that we cannot reorder the first insn in the
5340 		 chain, since rest_of_compilation relies on that
5341 		 remaining constant.  */
5342 	      if (prev == NULL)
5343 		break;
5344 	      reorder_insns (insn, insn, prologue_end);
5345 	    }
5346 	}
5347 
5348       /* Find the last line number note in the first block.  */
5349       for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5350 	   insn != prologue_end && insn;
5351 	   insn = PREV_INSN (insn))
5352 	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5353 	  break;
5354 
5355       /* If we didn't find one, make a copy of the first line number
5356 	 we run across.  */
5357       if (! insn)
5358 	{
5359 	  for (insn = next_active_insn (prologue_end);
5360 	       insn;
5361 	       insn = PREV_INSN (insn))
5362 	    if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5363 	      {
5364 		emit_note_copy_after (insn, prologue_end);
5365 		break;
5366 	      }
5367 	}
5368     }
5369 #endif
5370 #ifdef HAVE_epilogue
5371   if (epilogue_end)
5372     {
5373       rtx insn, next;
5374 
5375       /* Similarly, move any line notes that appear after the epilogue.
5376          There is no need, however, to be quite so anal about the existence
5377 	 of such a note.  Also move the NOTE_INSN_FUNCTION_END and (possibly)
5378 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5379 	 info generation.  */
5380       for (insn = epilogue_end; insn; insn = next)
5381 	{
5382 	  next = NEXT_INSN (insn);
5383 	  if (NOTE_P (insn)
5384 	      && (NOTE_LINE_NUMBER (insn) > 0
5385 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5386 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5387 	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5388 	}
5389     }
5390 #endif
5391 }
5392 
5393 /* Reposition the prologue-end and epilogue-begin notes after instruction
5394    scheduling and delayed branch scheduling.  */
5395 
5396 void
reposition_prologue_and_epilogue_notes(rtx f ATTRIBUTE_UNUSED)5397 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5398 {
5399 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5400   rtx insn, last, note;
5401   int len;
5402 
5403   if ((len = VEC_length (int, prologue)) > 0)
5404     {
5405       last = 0, note = 0;
5406 
5407       /* Scan from the beginning until we reach the last prologue insn.
5408 	 We apparently can't depend on basic_block_{head,end} after
5409 	 reorg has run.  */
5410       for (insn = f; insn; insn = NEXT_INSN (insn))
5411 	{
5412 	  if (NOTE_P (insn))
5413 	    {
5414 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5415 		note = insn;
5416 	    }
5417 	  else if (contains (insn, &prologue))
5418 	    {
5419 	      last = insn;
5420 	      if (--len == 0)
5421 		break;
5422 	    }
5423 	}
5424 
5425       if (last)
5426 	{
5427 	  /* Find the prologue-end note if we haven't already, and
5428 	     move it to just after the last prologue insn.  */
5429 	  if (note == 0)
5430 	    {
5431 	      for (note = last; (note = NEXT_INSN (note));)
5432 		if (NOTE_P (note)
5433 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5434 		  break;
5435 	    }
5436 
5437 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
5438 	  if (LABEL_P (last))
5439 	    last = NEXT_INSN (last);
5440 	  reorder_insns (note, note, last);
5441 	}
5442     }
5443 
5444   if ((len = VEC_length (int, epilogue)) > 0)
5445     {
5446       last = 0, note = 0;
5447 
5448       /* Scan from the end until we reach the first epilogue insn.
5449 	 We apparently can't depend on basic_block_{head,end} after
5450 	 reorg has run.  */
5451       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5452 	{
5453 	  if (NOTE_P (insn))
5454 	    {
5455 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5456 		note = insn;
5457 	    }
5458 	  else if (contains (insn, &epilogue))
5459 	    {
5460 	      last = insn;
5461 	      if (--len == 0)
5462 		break;
5463 	    }
5464 	}
5465 
5466       if (last)
5467 	{
5468 	  /* Find the epilogue-begin note if we haven't already, and
5469 	     move it to just before the first epilogue insn.  */
5470 	  if (note == 0)
5471 	    {
5472 	      for (note = insn; (note = PREV_INSN (note));)
5473 		if (NOTE_P (note)
5474 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5475 		  break;
5476 	    }
5477 
5478 	  if (PREV_INSN (last) != note)
5479 	    reorder_insns (note, note, PREV_INSN (last));
5480 	}
5481     }
5482 #endif /* HAVE_prologue or HAVE_epilogue */
5483 }
5484 
5485 /* Resets insn_block_boundaries array.  */
5486 
5487 void
reset_block_changes(void)5488 reset_block_changes (void)
5489 {
5490   cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5491   VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5492 }
5493 
5494 /* Record the boundary for BLOCK.  */
5495 void
record_block_change(tree block)5496 record_block_change (tree block)
5497 {
5498   int i, n;
5499   tree last_block;
5500 
5501   if (!block)
5502     return;
5503 
5504   if(!cfun->ib_boundaries_block)
5505     return;
5506 
5507   last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5508   n = get_max_uid ();
5509   for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5510     VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5511 
5512   VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5513 }
5514 
5515 /* Finishes record of boundaries.  */
5516 void
finalize_block_changes(void)5517 finalize_block_changes (void)
5518 {
5519   record_block_change (DECL_INITIAL (current_function_decl));
5520 }
5521 
5522 /* For INSN return the BLOCK it belongs to.  */
5523 void
check_block_change(rtx insn,tree * block)5524 check_block_change (rtx insn, tree *block)
5525 {
5526   unsigned uid = INSN_UID (insn);
5527 
5528   if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5529     return;
5530 
5531   *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5532 }
5533 
5534 /* Releases the ib_boundaries_block records.  */
5535 void
free_block_changes(void)5536 free_block_changes (void)
5537 {
5538   VEC_free (tree, gc, cfun->ib_boundaries_block);
5539 }
5540 
5541 /* Returns the name of the current function.  */
5542 const char *
current_function_name(void)5543 current_function_name (void)
5544 {
5545   return lang_hooks.decl_printable_name (cfun->decl, 2);
5546 }
5547 
5548 
5549 static unsigned int
rest_of_handle_check_leaf_regs(void)5550 rest_of_handle_check_leaf_regs (void)
5551 {
5552 #ifdef LEAF_REGISTERS
5553   current_function_uses_only_leaf_regs
5554     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5555 #endif
5556   return 0;
5557 }
5558 
5559 /* Insert a TYPE into the used types hash table of CFUN.  */
5560 static void
used_types_insert_helper(tree type,struct function * func)5561 used_types_insert_helper (tree type, struct function *func)
5562 {
5563   if (type != NULL && func != NULL)
5564     {
5565       void **slot;
5566 
5567       if (func->used_types_hash == NULL)
5568 	func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5569 						 htab_eq_pointer, NULL);
5570       slot = htab_find_slot (func->used_types_hash, type, INSERT);
5571       if (*slot == NULL)
5572 	*slot = type;
5573     }
5574 }
5575 
5576 /* Given a type, insert it into the used hash table in cfun.  */
5577 void
used_types_insert(tree t)5578 used_types_insert (tree t)
5579 {
5580   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5581     t = TREE_TYPE (t);
5582   t = TYPE_MAIN_VARIANT (t);
5583   if (debug_info_level > DINFO_LEVEL_NONE)
5584     used_types_insert_helper (t, cfun);
5585 }
5586 
5587 struct tree_opt_pass pass_leaf_regs =
5588 {
5589   NULL,                                 /* name */
5590   NULL,                                 /* gate */
5591   rest_of_handle_check_leaf_regs,       /* execute */
5592   NULL,                                 /* sub */
5593   NULL,                                 /* next */
5594   0,                                    /* static_pass_number */
5595   0,                                    /* tv_id */
5596   0,                                    /* properties_required */
5597   0,                                    /* properties_provided */
5598   0,                                    /* properties_destroyed */
5599   0,                                    /* todo_flags_start */
5600   0,                                    /* todo_flags_finish */
5601   0                                     /* letter */
5602 };
5603 
5604 
5605 #include "gt-function.h"
5606