1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* This file handles the generation of rtl code from tree structure
24    at the level of the function as a whole.
25    It creates the rtl expressions for parameters and auto variables
26    and has full responsibility for allocating stack slots.
27 
28    `expand_function_start' is called at the beginning of a function,
29    before the function body is parsed, and `expand_function_end' is
30    called after parsing the body.
31 
32    Call `assign_stack_local' to allocate a stack slot for a local variable.
33    This is usually done during the RTL generation for the function body,
34    but it can also be done in the reload pass when a pseudo-register does
35    not get a hard register.  */
36 
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
66 
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
70 
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
74 
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76 
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79    give the same symbol without quotes for an alternative entry point.  You
80    must define both, or neither.  */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84 
85 /* Round a value to the lowest integer less than it that is a multiple of
86    the required alignment.  Avoid using division in case the value is
87    negative.  Assume the alignment is a power of two.  */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89 
90 /* Similar, but round to the next highest integer that meets the
91    alignment.  */
92 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93 
94 /* Nonzero if function being compiled doesn't contain any calls
95    (ignoring the prologue and epilogue).  This is set prior to
96    local register allocation and is valid for the remaining
97    compiler passes.  */
98 int current_function_is_leaf;
99 
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101    (ignoring the prologue and epilogue).  This is only valid after
102    life_analysis has run.  */
103 int current_function_sp_is_unchanging;
104 
105 /* Nonzero if the function being compiled is a leaf function which only
106    uses leaf registers.  This is valid after reload (specifically after
107    sched2) and is useful only if the port defines LEAF_REGISTERS.  */
108 int current_function_uses_only_leaf_regs;
109 
110 /* Nonzero once virtual register instantiation has been done.
111    assign_stack_local uses frame_pointer_rtx when this is nonzero.
112    calls.c:emit_library_call_value_1 uses it to set up
113    post-instantiation libcalls.  */
114 int virtuals_instantiated;
115 
116 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
117 static GTY(()) int funcdef_no;
118 
119 /* These variables hold pointers to functions to create and destroy
120    target specific, per-function data structures.  */
121 struct machine_function * (*init_machine_status) (void);
122 
123 /* The currently compiled function.  */
124 struct function *cfun = 0;
125 
126 DEF_VEC_I(int);
127 DEF_VEC_ALLOC_I(int,heap);
128 
129 /* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
130 static VEC(int,heap) *prologue;
131 static VEC(int,heap) *epilogue;
132 
133 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
134    in this function.  */
135 static VEC(int,heap) *sibcall_epilogue;
136 
137 /* In order to evaluate some expressions, such as function calls returning
138    structures in memory, we need to temporarily allocate stack locations.
139    We record each allocated temporary in the following structure.
140 
141    Associated with each temporary slot is a nesting level.  When we pop up
142    one level, all temporaries associated with the previous level are freed.
143    Normally, all temporaries are freed after the execution of the statement
144    in which they were created.  However, if we are inside a ({...}) grouping,
145    the result may be in a temporary and hence must be preserved.  If the
146    result could be in a temporary, we preserve it if we can determine which
147    one it is in.  If we cannot determine which temporary may contain the
148    result, all temporaries are preserved.  A temporary is preserved by
149    pretending it was allocated at the previous nesting level.
150 
151    Automatic variables are also assigned temporary slots, at the nesting
152    level where they are defined.  They are marked a "kept" so that
153    free_temp_slots will not free them.  */
154 
155 struct temp_slot GTY(())
156 {
157   /* Points to next temporary slot.  */
158   struct temp_slot *next;
159   /* Points to previous temporary slot.  */
160   struct temp_slot *prev;
161 
162   /* The rtx to used to reference the slot.  */
163   rtx slot;
164   /* The rtx used to represent the address if not the address of the
165      slot above.  May be an EXPR_LIST if multiple addresses exist.  */
166   rtx address;
167   /* The alignment (in bits) of the slot.  */
168   unsigned int align;
169   /* The size, in units, of the slot.  */
170   HOST_WIDE_INT size;
171   /* The type of the object in the slot, or zero if it doesn't correspond
172      to a type.  We use this to determine whether a slot can be reused.
173      It can be reused if objects of the type of the new slot will always
174      conflict with objects of the type of the old slot.  */
175   tree type;
176   /* Nonzero if this temporary is currently in use.  */
177   char in_use;
178   /* Nonzero if this temporary has its address taken.  */
179   char addr_taken;
180   /* Nesting level at which this slot is being used.  */
181   int level;
182   /* Nonzero if this should survive a call to free_temp_slots.  */
183   int keep;
184   /* The offset of the slot from the frame_pointer, including extra space
185      for alignment.  This info is for combine_temp_slots.  */
186   HOST_WIDE_INT base_offset;
187   /* The size of the slot, including extra space for alignment.  This
188      info is for combine_temp_slots.  */
189   HOST_WIDE_INT full_size;
190 };
191 
192 /* Forward declarations.  */
193 
194 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
195 				 struct function *);
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static void reorder_fix_fragments (tree);
201 static int all_blocks (tree, tree *);
202 static tree *get_block_vector (tree, int *);
203 extern tree debug_find_var_in_block_tree (tree, tree);
204 /* We always define `record_insns' even if it's not used so that we
205    can always export `prologue_epilogue_contains'.  */
206 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
207 static int contains (rtx, VEC(int,heap) **);
208 #ifdef HAVE_return
209 static void emit_return_into_block (basic_block, rtx);
210 #endif
211 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
212 static rtx keep_stack_depressed (rtx);
213 #endif
214 static void prepare_function_start (tree);
215 static void do_clobber_return_reg (rtx, void *);
216 static void do_use_return_reg (rtx, void *);
217 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
218 
219 /* Pointer to chain of `struct function' for containing functions.  */
220 struct function *outer_function_chain;
221 
222 /* Given a function decl for a containing function,
223    return the `struct function' for it.  */
224 
225 struct function *
find_function_data(tree decl)226 find_function_data (tree decl)
227 {
228   struct function *p;
229 
230   for (p = outer_function_chain; p; p = p->outer)
231     if (p->decl == decl)
232       return p;
233 
234   gcc_unreachable ();
235 }
236 
237 /* Save the current context for compilation of a nested function.
238    This is called from language-specific code.  The caller should use
239    the enter_nested langhook to save any language-specific state,
240    since this function knows only about language-independent
241    variables.  */
242 
243 void
push_function_context_to(tree context ATTRIBUTE_UNUSED)244 push_function_context_to (tree context ATTRIBUTE_UNUSED)
245 {
246   struct function *p;
247 
248   if (cfun == 0)
249     init_dummy_function_start ();
250   p = cfun;
251 
252   p->outer = outer_function_chain;
253   outer_function_chain = p;
254 
255   lang_hooks.function.enter_nested (p);
256 
257   cfun = 0;
258 }
259 
260 void
push_function_context(void)261 push_function_context (void)
262 {
263   push_function_context_to (current_function_decl);
264 }
265 
266 /* Restore the last saved context, at the end of a nested function.
267    This function is called from language-specific code.  */
268 
269 void
pop_function_context_from(tree context ATTRIBUTE_UNUSED)270 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
271 {
272   struct function *p = outer_function_chain;
273 
274   cfun = p;
275   outer_function_chain = p->outer;
276 
277   current_function_decl = p->decl;
278 
279   lang_hooks.function.leave_nested (p);
280 
281   /* Reset variables that have known state during rtx generation.  */
282   virtuals_instantiated = 0;
283   generating_concat_p = 1;
284 }
285 
286 void
pop_function_context(void)287 pop_function_context (void)
288 {
289   pop_function_context_from (current_function_decl);
290 }
291 
292 /* Clear out all parts of the state in F that can safely be discarded
293    after the function has been parsed, but not compiled, to let
294    garbage collection reclaim the memory.  */
295 
296 void
free_after_parsing(struct function * f)297 free_after_parsing (struct function *f)
298 {
299   /* f->expr->forced_labels is used by code generation.  */
300   /* f->emit->regno_reg_rtx is used by code generation.  */
301   /* f->varasm is used by code generation.  */
302   /* f->eh->eh_return_stub_label is used by code generation.  */
303 
304   lang_hooks.function.final (f);
305 }
306 
307 /* Clear out all parts of the state in F that can safely be discarded
308    after the function has been compiled, to let garbage collection
309    reclaim the memory.  */
310 
311 void
free_after_compilation(struct function * f)312 free_after_compilation (struct function *f)
313 {
314   VEC_free (int, heap, prologue);
315   VEC_free (int, heap, epilogue);
316   VEC_free (int, heap, sibcall_epilogue);
317 
318   f->eh = NULL;
319   f->expr = NULL;
320   f->emit = NULL;
321   f->varasm = NULL;
322   f->machine = NULL;
323   f->cfg = NULL;
324 
325   f->x_avail_temp_slots = NULL;
326   f->x_used_temp_slots = NULL;
327   f->arg_offset_rtx = NULL;
328   f->return_rtx = NULL;
329   f->internal_arg_pointer = NULL;
330   f->x_nonlocal_goto_handler_labels = NULL;
331   f->x_return_label = NULL;
332   f->x_naked_return_label = NULL;
333   f->x_stack_slot_list = NULL;
334   f->x_tail_recursion_reentry = NULL;
335   f->x_arg_pointer_save_area = NULL;
336   f->x_parm_birth_insn = NULL;
337   f->original_arg_vector = NULL;
338   f->original_decl_initial = NULL;
339   f->epilogue_delay_list = NULL;
340 }
341 
342 /* Allocate fixed slots in the stack frame of the current function.  */
343 
344 /* Return size needed for stack frame based on slots so far allocated in
345    function F.
346    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
347    the caller may have to do that.  */
348 
349 static HOST_WIDE_INT
get_func_frame_size(struct function * f)350 get_func_frame_size (struct function *f)
351 {
352   if (FRAME_GROWS_DOWNWARD)
353     return -f->x_frame_offset;
354   else
355     return f->x_frame_offset;
356 }
357 
358 /* Return size needed for stack frame based on slots so far allocated.
359    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
360    the caller may have to do that.  */
361 HOST_WIDE_INT
get_frame_size(void)362 get_frame_size (void)
363 {
364   return get_func_frame_size (cfun);
365 }
366 
367 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
368    with machine mode MODE.
369 
370    ALIGN controls the amount of alignment for the address of the slot:
371    0 means according to MODE,
372    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
373    -2 means use BITS_PER_UNIT,
374    positive specifies alignment boundary in bits.
375 
376    We do not round to stack_boundary here.
377 
378    FUNCTION specifies the function to allocate in.  */
379 
380 static rtx
assign_stack_local_1(enum machine_mode mode,HOST_WIDE_INT size,int align,struct function * function)381 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
382 		      struct function *function)
383 {
384   rtx x, addr;
385   int bigend_correction = 0;
386   unsigned int alignment;
387   int frame_off, frame_alignment, frame_phase;
388 
389   if (align == 0)
390     {
391       tree type;
392 
393       if (mode == BLKmode)
394 	alignment = BIGGEST_ALIGNMENT;
395       else
396 	alignment = GET_MODE_ALIGNMENT (mode);
397 
398       /* Allow the target to (possibly) increase the alignment of this
399 	 stack slot.  */
400       type = lang_hooks.types.type_for_mode (mode, 0);
401       if (type)
402 	alignment = LOCAL_ALIGNMENT (type, alignment);
403 
404       alignment /= BITS_PER_UNIT;
405     }
406   else if (align == -1)
407     {
408       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
409       size = CEIL_ROUND (size, alignment);
410     }
411   else if (align == -2)
412     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
413   else
414     alignment = align / BITS_PER_UNIT;
415 
416   if (FRAME_GROWS_DOWNWARD)
417     function->x_frame_offset -= size;
418 
419   /* Ignore alignment we can't do with expected alignment of the boundary.  */
420   if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
421     alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
422 
423   if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
424     function->stack_alignment_needed = alignment * BITS_PER_UNIT;
425 
426   /* Calculate how many bytes the start of local variables is off from
427      stack alignment.  */
428   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
429   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
430   frame_phase = frame_off ? frame_alignment - frame_off : 0;
431 
432   /* Round the frame offset to the specified alignment.  The default is
433      to always honor requests to align the stack but a port may choose to
434      do its own stack alignment by defining STACK_ALIGNMENT_NEEDED.  */
435   if (STACK_ALIGNMENT_NEEDED
436       || mode != BLKmode
437       || size != 0)
438     {
439       /*  We must be careful here, since FRAME_OFFSET might be negative and
440 	  division with a negative dividend isn't as well defined as we might
441 	  like.  So we instead assume that ALIGNMENT is a power of two and
442 	  use logical operations which are unambiguous.  */
443       if (FRAME_GROWS_DOWNWARD)
444 	function->x_frame_offset
445 	  = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
446 			  (unsigned HOST_WIDE_INT) alignment)
447 	     + frame_phase);
448       else
449 	function->x_frame_offset
450 	  = (CEIL_ROUND (function->x_frame_offset - frame_phase,
451 			 (unsigned HOST_WIDE_INT) alignment)
452 	     + frame_phase);
453     }
454 
455   /* On a big-endian machine, if we are allocating more space than we will use,
456      use the least significant bytes of those that are allocated.  */
457   if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
458     bigend_correction = size - GET_MODE_SIZE (mode);
459 
460   /* If we have already instantiated virtual registers, return the actual
461      address relative to the frame pointer.  */
462   if (function == cfun && virtuals_instantiated)
463     addr = plus_constant (frame_pointer_rtx,
464 			  trunc_int_for_mode
465 			  (frame_offset + bigend_correction
466 			   + STARTING_FRAME_OFFSET, Pmode));
467   else
468     addr = plus_constant (virtual_stack_vars_rtx,
469 			  trunc_int_for_mode
470 			  (function->x_frame_offset + bigend_correction,
471 			   Pmode));
472 
473   if (!FRAME_GROWS_DOWNWARD)
474     function->x_frame_offset += size;
475 
476   x = gen_rtx_MEM (mode, addr);
477   MEM_NOTRAP_P (x) = 1;
478 
479   function->x_stack_slot_list
480     = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
481 
482   /* Try to detect frame size overflows on native platforms.  */
483 #if BITS_PER_WORD >= 32
484   if ((FRAME_GROWS_DOWNWARD
485        ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
486        : (unsigned HOST_WIDE_INT) function->x_frame_offset)
487 	> ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
488 	    /* Leave room for the fixed part of the frame.  */
489 	    - 64 * UNITS_PER_WORD)
490     {
491       error ("%Jtotal size of local objects too large", function->decl);
492       /* Avoid duplicate error messages as much as possible.  */
493       function->x_frame_offset = 0;
494     }
495 #endif
496 
497   return x;
498 }
499 
500 /* Wrapper around assign_stack_local_1;  assign a local stack slot for the
501    current function.  */
502 
503 rtx
assign_stack_local(enum machine_mode mode,HOST_WIDE_INT size,int align)504 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
505 {
506   return assign_stack_local_1 (mode, size, align, cfun);
507 }
508 
509 
510 /* Removes temporary slot TEMP from LIST.  */
511 
512 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)513 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
514 {
515   if (temp->next)
516     temp->next->prev = temp->prev;
517   if (temp->prev)
518     temp->prev->next = temp->next;
519   else
520     *list = temp->next;
521 
522   temp->prev = temp->next = NULL;
523 }
524 
525 /* Inserts temporary slot TEMP to LIST.  */
526 
527 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)528 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
529 {
530   temp->next = *list;
531   if (*list)
532     (*list)->prev = temp;
533   temp->prev = NULL;
534   *list = temp;
535 }
536 
537 /* Returns the list of used temp slots at LEVEL.  */
538 
539 static struct temp_slot **
temp_slots_at_level(int level)540 temp_slots_at_level (int level)
541 {
542 
543   if (!used_temp_slots)
544     VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
545 
546   while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
547     VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
548 
549   return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
550 }
551 
552 /* Returns the maximal temporary slot level.  */
553 
554 static int
max_slot_level(void)555 max_slot_level (void)
556 {
557   if (!used_temp_slots)
558     return -1;
559 
560   return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
561 }
562 
563 /* Moves temporary slot TEMP to LEVEL.  */
564 
565 static void
move_slot_to_level(struct temp_slot * temp,int level)566 move_slot_to_level (struct temp_slot *temp, int level)
567 {
568   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
569   insert_slot_to_list (temp, temp_slots_at_level (level));
570   temp->level = level;
571 }
572 
573 /* Make temporary slot TEMP available.  */
574 
575 static void
make_slot_available(struct temp_slot * temp)576 make_slot_available (struct temp_slot *temp)
577 {
578   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
579   insert_slot_to_list (temp, &avail_temp_slots);
580   temp->in_use = 0;
581   temp->level = -1;
582 }
583 
584 /* Allocate a temporary stack slot and record it for possible later
585    reuse.
586 
587    MODE is the machine mode to be given to the returned rtx.
588 
589    SIZE is the size in units of the space required.  We do no rounding here
590    since assign_stack_local will do any required rounding.
591 
592    KEEP is 1 if this slot is to be retained after a call to
593    free_temp_slots.  Automatic variables for a block are allocated
594    with this flag.  KEEP values of 2 or 3 were needed respectively
595    for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
596    or for SAVE_EXPRs, but they are now unused.
597 
598    TYPE is the type that will be used for the stack slot.  */
599 
600 rtx
assign_stack_temp_for_type(enum machine_mode mode,HOST_WIDE_INT size,int keep,tree type)601 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
602 			    int keep, tree type)
603 {
604   unsigned int align;
605   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
606   rtx slot;
607 
608   /* If SIZE is -1 it means that somebody tried to allocate a temporary
609      of a variable size.  */
610   gcc_assert (size != -1);
611 
612   /* These are now unused.  */
613   gcc_assert (keep <= 1);
614 
615   if (mode == BLKmode)
616     align = BIGGEST_ALIGNMENT;
617   else
618     align = GET_MODE_ALIGNMENT (mode);
619 
620   if (! type)
621     type = lang_hooks.types.type_for_mode (mode, 0);
622 
623   if (type)
624     align = LOCAL_ALIGNMENT (type, align);
625 
626   /* Try to find an available, already-allocated temporary of the proper
627      mode which meets the size and alignment requirements.  Choose the
628      smallest one with the closest alignment.  */
629   for (p = avail_temp_slots; p; p = p->next)
630     {
631       if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
632 	  && objects_must_conflict_p (p->type, type)
633 	  && (best_p == 0 || best_p->size > p->size
634 	      || (best_p->size == p->size && best_p->align > p->align)))
635 	{
636 	  if (p->align == align && p->size == size)
637 	    {
638 	      selected = p;
639 	      cut_slot_from_list (selected, &avail_temp_slots);
640 	      best_p = 0;
641 	      break;
642 	    }
643 	  best_p = p;
644 	}
645     }
646 
647   /* Make our best, if any, the one to use.  */
648   if (best_p)
649     {
650       selected = best_p;
651       cut_slot_from_list (selected, &avail_temp_slots);
652 
653       /* If there are enough aligned bytes left over, make them into a new
654 	 temp_slot so that the extra bytes don't get wasted.  Do this only
655 	 for BLKmode slots, so that we can be sure of the alignment.  */
656       if (GET_MODE (best_p->slot) == BLKmode)
657 	{
658 	  int alignment = best_p->align / BITS_PER_UNIT;
659 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
660 
661 	  if (best_p->size - rounded_size >= alignment)
662 	    {
663 	      p = ggc_alloc (sizeof (struct temp_slot));
664 	      p->in_use = p->addr_taken = 0;
665 	      p->size = best_p->size - rounded_size;
666 	      p->base_offset = best_p->base_offset + rounded_size;
667 	      p->full_size = best_p->full_size - rounded_size;
668 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
669 	      p->align = best_p->align;
670 	      p->address = 0;
671 	      p->type = best_p->type;
672 	      insert_slot_to_list (p, &avail_temp_slots);
673 
674 	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
675 						   stack_slot_list);
676 
677 	      best_p->size = rounded_size;
678 	      best_p->full_size = rounded_size;
679 	    }
680 	}
681     }
682 
683   /* If we still didn't find one, make a new temporary.  */
684   if (selected == 0)
685     {
686       HOST_WIDE_INT frame_offset_old = frame_offset;
687 
688       p = ggc_alloc (sizeof (struct temp_slot));
689 
690       /* We are passing an explicit alignment request to assign_stack_local.
691 	 One side effect of that is assign_stack_local will not round SIZE
692 	 to ensure the frame offset remains suitably aligned.
693 
694 	 So for requests which depended on the rounding of SIZE, we go ahead
695 	 and round it now.  We also make sure ALIGNMENT is at least
696 	 BIGGEST_ALIGNMENT.  */
697       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
698       p->slot = assign_stack_local (mode,
699 				    (mode == BLKmode
700 				     ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
701 				     : size),
702 				    align);
703 
704       p->align = align;
705 
706       /* The following slot size computation is necessary because we don't
707 	 know the actual size of the temporary slot until assign_stack_local
708 	 has performed all the frame alignment and size rounding for the
709 	 requested temporary.  Note that extra space added for alignment
710 	 can be either above or below this stack slot depending on which
711 	 way the frame grows.  We include the extra space if and only if it
712 	 is above this slot.  */
713       if (FRAME_GROWS_DOWNWARD)
714 	p->size = frame_offset_old - frame_offset;
715       else
716 	p->size = size;
717 
718       /* Now define the fields used by combine_temp_slots.  */
719       if (FRAME_GROWS_DOWNWARD)
720 	{
721 	  p->base_offset = frame_offset;
722 	  p->full_size = frame_offset_old - frame_offset;
723 	}
724       else
725 	{
726 	  p->base_offset = frame_offset_old;
727 	  p->full_size = frame_offset - frame_offset_old;
728 	}
729       p->address = 0;
730 
731       selected = p;
732     }
733 
734   p = selected;
735   p->in_use = 1;
736   p->addr_taken = 0;
737   p->type = type;
738   p->level = temp_slot_level;
739   p->keep = keep;
740 
741   pp = temp_slots_at_level (p->level);
742   insert_slot_to_list (p, pp);
743 
744   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
745   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
746   stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
747 
748   /* If we know the alias set for the memory that will be used, use
749      it.  If there's no TYPE, then we don't know anything about the
750      alias set for the memory.  */
751   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
752   set_mem_align (slot, align);
753 
754   /* If a type is specified, set the relevant flags.  */
755   if (type != 0)
756     {
757       MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
758       MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
759     }
760   MEM_NOTRAP_P (slot) = 1;
761 
762   return slot;
763 }
764 
765 /* Allocate a temporary stack slot and record it for possible later
766    reuse.  First three arguments are same as in preceding function.  */
767 
768 rtx
assign_stack_temp(enum machine_mode mode,HOST_WIDE_INT size,int keep)769 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
770 {
771   return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
772 }
773 
774 /* Assign a temporary.
775    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
776    and so that should be used in error messages.  In either case, we
777    allocate of the given type.
778    KEEP is as for assign_stack_temp.
779    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
780    it is 0 if a register is OK.
781    DONT_PROMOTE is 1 if we should not promote values in register
782    to wider modes.  */
783 
784 rtx
assign_temp(tree type_or_decl,int keep,int memory_required,int dont_promote ATTRIBUTE_UNUSED)785 assign_temp (tree type_or_decl, int keep, int memory_required,
786 	     int dont_promote ATTRIBUTE_UNUSED)
787 {
788   tree type, decl;
789   enum machine_mode mode;
790 #ifdef PROMOTE_MODE
791   int unsignedp;
792 #endif
793 
794   if (DECL_P (type_or_decl))
795     decl = type_or_decl, type = TREE_TYPE (decl);
796   else
797     decl = NULL, type = type_or_decl;
798 
799   mode = TYPE_MODE (type);
800 #ifdef PROMOTE_MODE
801   unsignedp = TYPE_UNSIGNED (type);
802 #endif
803 
804   if (mode == BLKmode || memory_required)
805     {
806       HOST_WIDE_INT size = int_size_in_bytes (type);
807       tree size_tree;
808       rtx tmp;
809 
810       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
811 	 problems with allocating the stack space.  */
812       if (size == 0)
813 	size = 1;
814 
815       /* Unfortunately, we don't yet know how to allocate variable-sized
816 	 temporaries.  However, sometimes we have a fixed upper limit on
817 	 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
818 	 instead.  This is the case for Chill variable-sized strings.  */
819       if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
820 	  && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
821 	  && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
822 	size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
823 
824       /* If we still haven't been able to get a size, see if the language
825 	 can compute a maximum size.  */
826       if (size == -1
827 	  && (size_tree = lang_hooks.types.max_size (type)) != 0
828 	  && host_integerp (size_tree, 1))
829 	size = tree_low_cst (size_tree, 1);
830 
831       /* The size of the temporary may be too large to fit into an integer.  */
832       /* ??? Not sure this should happen except for user silliness, so limit
833 	 this to things that aren't compiler-generated temporaries.  The
834 	 rest of the time we'll die in assign_stack_temp_for_type.  */
835       if (decl && size == -1
836 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
837 	{
838 	  error ("size of variable %q+D is too large", decl);
839 	  size = 1;
840 	}
841 
842       tmp = assign_stack_temp_for_type (mode, size, keep, type);
843       return tmp;
844     }
845 
846 #ifdef PROMOTE_MODE
847   if (! dont_promote)
848     mode = promote_mode (type, mode, &unsignedp, 0);
849 #endif
850 
851   return gen_reg_rtx (mode);
852 }
853 
854 /* Combine temporary stack slots which are adjacent on the stack.
855 
856    This allows for better use of already allocated stack space.  This is only
857    done for BLKmode slots because we can be sure that we won't have alignment
858    problems in this case.  */
859 
860 static void
combine_temp_slots(void)861 combine_temp_slots (void)
862 {
863   struct temp_slot *p, *q, *next, *next_q;
864   int num_slots;
865 
866   /* We can't combine slots, because the information about which slot
867      is in which alias set will be lost.  */
868   if (flag_strict_aliasing)
869     return;
870 
871   /* If there are a lot of temp slots, don't do anything unless
872      high levels of optimization.  */
873   if (! flag_expensive_optimizations)
874     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
875       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
876 	return;
877 
878   for (p = avail_temp_slots; p; p = next)
879     {
880       int delete_p = 0;
881 
882       next = p->next;
883 
884       if (GET_MODE (p->slot) != BLKmode)
885 	continue;
886 
887       for (q = p->next; q; q = next_q)
888 	{
889        	  int delete_q = 0;
890 
891 	  next_q = q->next;
892 
893 	  if (GET_MODE (q->slot) != BLKmode)
894 	    continue;
895 
896 	  if (p->base_offset + p->full_size == q->base_offset)
897 	    {
898 	      /* Q comes after P; combine Q into P.  */
899 	      p->size += q->size;
900 	      p->full_size += q->full_size;
901 	      delete_q = 1;
902 	    }
903 	  else if (q->base_offset + q->full_size == p->base_offset)
904 	    {
905 	      /* P comes after Q; combine P into Q.  */
906 	      q->size += p->size;
907 	      q->full_size += p->full_size;
908 	      delete_p = 1;
909 	      break;
910 	    }
911 	  if (delete_q)
912 	    cut_slot_from_list (q, &avail_temp_slots);
913 	}
914 
915       /* Either delete P or advance past it.  */
916       if (delete_p)
917 	cut_slot_from_list (p, &avail_temp_slots);
918     }
919 }
920 
921 /* Find the temp slot corresponding to the object at address X.  */
922 
923 static struct temp_slot *
find_temp_slot_from_address(rtx x)924 find_temp_slot_from_address (rtx x)
925 {
926   struct temp_slot *p;
927   rtx next;
928   int i;
929 
930   for (i = max_slot_level (); i >= 0; i--)
931     for (p = *temp_slots_at_level (i); p; p = p->next)
932       {
933 	if (XEXP (p->slot, 0) == x
934 	    || p->address == x
935 	    || (GET_CODE (x) == PLUS
936 		&& XEXP (x, 0) == virtual_stack_vars_rtx
937 		&& GET_CODE (XEXP (x, 1)) == CONST_INT
938 		&& INTVAL (XEXP (x, 1)) >= p->base_offset
939 		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
940 	  return p;
941 
942 	else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
943 	  for (next = p->address; next; next = XEXP (next, 1))
944 	    if (XEXP (next, 0) == x)
945 	      return p;
946       }
947 
948   /* If we have a sum involving a register, see if it points to a temp
949      slot.  */
950   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
951       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
952     return p;
953   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
954 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
955     return p;
956 
957   return 0;
958 }
959 
960 /* Indicate that NEW is an alternate way of referring to the temp slot
961    that previously was known by OLD.  */
962 
963 void
update_temp_slot_address(rtx old,rtx new)964 update_temp_slot_address (rtx old, rtx new)
965 {
966   struct temp_slot *p;
967 
968   if (rtx_equal_p (old, new))
969     return;
970 
971   p = find_temp_slot_from_address (old);
972 
973   /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
974      is a register, see if one operand of the PLUS is a temporary
975      location.  If so, NEW points into it.  Otherwise, if both OLD and
976      NEW are a PLUS and if there is a register in common between them.
977      If so, try a recursive call on those values.  */
978   if (p == 0)
979     {
980       if (GET_CODE (old) != PLUS)
981 	return;
982 
983       if (REG_P (new))
984 	{
985 	  update_temp_slot_address (XEXP (old, 0), new);
986 	  update_temp_slot_address (XEXP (old, 1), new);
987 	  return;
988 	}
989       else if (GET_CODE (new) != PLUS)
990 	return;
991 
992       if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
993 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
994       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
995 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
996       else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
997 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
998       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
999 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1000 
1001       return;
1002     }
1003 
1004   /* Otherwise add an alias for the temp's address.  */
1005   else if (p->address == 0)
1006     p->address = new;
1007   else
1008     {
1009       if (GET_CODE (p->address) != EXPR_LIST)
1010 	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1011 
1012       p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1013     }
1014 }
1015 
1016 /* If X could be a reference to a temporary slot, mark the fact that its
1017    address was taken.  */
1018 
1019 void
mark_temp_addr_taken(rtx x)1020 mark_temp_addr_taken (rtx x)
1021 {
1022   struct temp_slot *p;
1023 
1024   if (x == 0)
1025     return;
1026 
1027   /* If X is not in memory or is at a constant address, it cannot be in
1028      a temporary slot.  */
1029   if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1030     return;
1031 
1032   p = find_temp_slot_from_address (XEXP (x, 0));
1033   if (p != 0)
1034     p->addr_taken = 1;
1035 }
1036 
1037 /* If X could be a reference to a temporary slot, mark that slot as
1038    belonging to the to one level higher than the current level.  If X
1039    matched one of our slots, just mark that one.  Otherwise, we can't
1040    easily predict which it is, so upgrade all of them.  Kept slots
1041    need not be touched.
1042 
1043    This is called when an ({...}) construct occurs and a statement
1044    returns a value in memory.  */
1045 
1046 void
preserve_temp_slots(rtx x)1047 preserve_temp_slots (rtx x)
1048 {
1049   struct temp_slot *p = 0, *next;
1050 
1051   /* If there is no result, we still might have some objects whose address
1052      were taken, so we need to make sure they stay around.  */
1053   if (x == 0)
1054     {
1055       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1056 	{
1057 	  next = p->next;
1058 
1059 	  if (p->addr_taken)
1060 	    move_slot_to_level (p, temp_slot_level - 1);
1061 	}
1062 
1063       return;
1064     }
1065 
1066   /* If X is a register that is being used as a pointer, see if we have
1067      a temporary slot we know it points to.  To be consistent with
1068      the code below, we really should preserve all non-kept slots
1069      if we can't find a match, but that seems to be much too costly.  */
1070   if (REG_P (x) && REG_POINTER (x))
1071     p = find_temp_slot_from_address (x);
1072 
1073   /* If X is not in memory or is at a constant address, it cannot be in
1074      a temporary slot, but it can contain something whose address was
1075      taken.  */
1076   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1077     {
1078       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1079 	{
1080 	  next = p->next;
1081 
1082 	  if (p->addr_taken)
1083 	    move_slot_to_level (p, temp_slot_level - 1);
1084 	}
1085 
1086       return;
1087     }
1088 
1089   /* First see if we can find a match.  */
1090   if (p == 0)
1091     p = find_temp_slot_from_address (XEXP (x, 0));
1092 
1093   if (p != 0)
1094     {
1095       /* Move everything at our level whose address was taken to our new
1096 	 level in case we used its address.  */
1097       struct temp_slot *q;
1098 
1099       if (p->level == temp_slot_level)
1100 	{
1101 	  for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1102 	    {
1103 	      next = q->next;
1104 
1105 	      if (p != q && q->addr_taken)
1106 		move_slot_to_level (q, temp_slot_level - 1);
1107 	    }
1108 
1109 	  move_slot_to_level (p, temp_slot_level - 1);
1110 	  p->addr_taken = 0;
1111 	}
1112       return;
1113     }
1114 
1115   /* Otherwise, preserve all non-kept slots at this level.  */
1116   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1117     {
1118       next = p->next;
1119 
1120       if (!p->keep)
1121 	move_slot_to_level (p, temp_slot_level - 1);
1122     }
1123 }
1124 
1125 /* Free all temporaries used so far.  This is normally called at the
1126    end of generating code for a statement.  */
1127 
1128 void
free_temp_slots(void)1129 free_temp_slots (void)
1130 {
1131   struct temp_slot *p, *next;
1132 
1133   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1134     {
1135       next = p->next;
1136 
1137       if (!p->keep)
1138 	make_slot_available (p);
1139     }
1140 
1141   combine_temp_slots ();
1142 }
1143 
1144 /* Push deeper into the nesting level for stack temporaries.  */
1145 
1146 void
push_temp_slots(void)1147 push_temp_slots (void)
1148 {
1149   temp_slot_level++;
1150 }
1151 
1152 /* Pop a temporary nesting level.  All slots in use in the current level
1153    are freed.  */
1154 
1155 void
pop_temp_slots(void)1156 pop_temp_slots (void)
1157 {
1158   struct temp_slot *p, *next;
1159 
1160   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1161     {
1162       next = p->next;
1163       make_slot_available (p);
1164     }
1165 
1166   combine_temp_slots ();
1167 
1168   temp_slot_level--;
1169 }
1170 
1171 /* Initialize temporary slots.  */
1172 
1173 void
init_temp_slots(void)1174 init_temp_slots (void)
1175 {
1176   /* We have not allocated any temporaries yet.  */
1177   avail_temp_slots = 0;
1178   used_temp_slots = 0;
1179   temp_slot_level = 0;
1180 }
1181 
1182 /* These routines are responsible for converting virtual register references
1183    to the actual hard register references once RTL generation is complete.
1184 
1185    The following four variables are used for communication between the
1186    routines.  They contain the offsets of the virtual registers from their
1187    respective hard registers.  */
1188 
1189 static int in_arg_offset;
1190 static int var_offset;
1191 static int dynamic_offset;
1192 static int out_arg_offset;
1193 static int cfa_offset;
1194 
1195 /* In most machines, the stack pointer register is equivalent to the bottom
1196    of the stack.  */
1197 
1198 #ifndef STACK_POINTER_OFFSET
1199 #define STACK_POINTER_OFFSET	0
1200 #endif
1201 
1202 /* If not defined, pick an appropriate default for the offset of dynamically
1203    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1204    REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1205 
1206 #ifndef STACK_DYNAMIC_OFFSET
1207 
1208 /* The bottom of the stack points to the actual arguments.  If
1209    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1210    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1211    stack space for register parameters is not pushed by the caller, but
1212    rather part of the fixed stack areas and hence not included in
1213    `current_function_outgoing_args_size'.  Nevertheless, we must allow
1214    for it when allocating stack dynamic objects.  */
1215 
1216 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1217 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1218 ((ACCUMULATE_OUTGOING_ARGS						      \
1219   ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1220  + (STACK_POINTER_OFFSET))						      \
1221 
1222 #else
1223 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1224 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)	      \
1225  + (STACK_POINTER_OFFSET))
1226 #endif
1227 #endif
1228 
1229 
1230 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1231    is a virtual register, return the equivalent hard register and set the
1232    offset indirectly through the pointer.  Otherwise, return 0.  */
1233 
1234 static rtx
instantiate_new_reg(rtx x,HOST_WIDE_INT * poffset)1235 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1236 {
1237   rtx new;
1238   HOST_WIDE_INT offset;
1239 
1240   if (x == virtual_incoming_args_rtx)
1241     new = arg_pointer_rtx, offset = in_arg_offset;
1242   else if (x == virtual_stack_vars_rtx)
1243     new = frame_pointer_rtx, offset = var_offset;
1244   else if (x == virtual_stack_dynamic_rtx)
1245     new = stack_pointer_rtx, offset = dynamic_offset;
1246   else if (x == virtual_outgoing_args_rtx)
1247     new = stack_pointer_rtx, offset = out_arg_offset;
1248   else if (x == virtual_cfa_rtx)
1249     {
1250 #ifdef FRAME_POINTER_CFA_OFFSET
1251       new = frame_pointer_rtx;
1252 #else
1253       new = arg_pointer_rtx;
1254 #endif
1255       offset = cfa_offset;
1256     }
1257   else
1258     return NULL_RTX;
1259 
1260   *poffset = offset;
1261   return new;
1262 }
1263 
1264 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1265    Instantiate any virtual registers present inside of *LOC.  The expression
1266    is simplified, as much as possible, but is not to be considered "valid"
1267    in any sense implied by the target.  If any change is made, set CHANGED
1268    to true.  */
1269 
1270 static int
instantiate_virtual_regs_in_rtx(rtx * loc,void * data)1271 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1272 {
1273   HOST_WIDE_INT offset;
1274   bool *changed = (bool *) data;
1275   rtx x, new;
1276 
1277   x = *loc;
1278   if (x == 0)
1279     return 0;
1280 
1281   switch (GET_CODE (x))
1282     {
1283     case REG:
1284       new = instantiate_new_reg (x, &offset);
1285       if (new)
1286 	{
1287 	  *loc = plus_constant (new, offset);
1288 	  if (changed)
1289 	    *changed = true;
1290 	}
1291       return -1;
1292 
1293     case PLUS:
1294       new = instantiate_new_reg (XEXP (x, 0), &offset);
1295       if (new)
1296 	{
1297 	  new = plus_constant (new, offset);
1298 	  *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1299 	  if (changed)
1300 	    *changed = true;
1301 	  return -1;
1302 	}
1303 
1304       /* FIXME -- from old code */
1305 	  /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1306 	     we can commute the PLUS and SUBREG because pointers into the
1307 	     frame are well-behaved.  */
1308       break;
1309 
1310     default:
1311       break;
1312     }
1313 
1314   return 0;
1315 }
1316 
1317 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1318    matches the predicate for insn CODE operand OPERAND.  */
1319 
1320 static int
safe_insn_predicate(int code,int operand,rtx x)1321 safe_insn_predicate (int code, int operand, rtx x)
1322 {
1323   const struct insn_operand_data *op_data;
1324 
1325   if (code < 0)
1326     return true;
1327 
1328   op_data = &insn_data[code].operand[operand];
1329   if (op_data->predicate == NULL)
1330     return true;
1331 
1332   return op_data->predicate (x, op_data->mode);
1333 }
1334 
1335 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1336    registers present inside of insn.  The result will be a valid insn.  */
1337 
1338 static void
instantiate_virtual_regs_in_insn(rtx insn)1339 instantiate_virtual_regs_in_insn (rtx insn)
1340 {
1341   HOST_WIDE_INT offset;
1342   int insn_code, i;
1343   bool any_change = false;
1344   rtx set, new, x, seq;
1345 
1346   /* There are some special cases to be handled first.  */
1347   set = single_set (insn);
1348   if (set)
1349     {
1350       /* We're allowed to assign to a virtual register.  This is interpreted
1351 	 to mean that the underlying register gets assigned the inverse
1352 	 transformation.  This is used, for example, in the handling of
1353 	 non-local gotos.  */
1354       new = instantiate_new_reg (SET_DEST (set), &offset);
1355       if (new)
1356 	{
1357 	  start_sequence ();
1358 
1359 	  for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1360 	  x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1361 				   GEN_INT (-offset));
1362 	  x = force_operand (x, new);
1363 	  if (x != new)
1364 	    emit_move_insn (new, x);
1365 
1366 	  seq = get_insns ();
1367 	  end_sequence ();
1368 
1369 	  emit_insn_before (seq, insn);
1370 	  delete_insn (insn);
1371 	  return;
1372 	}
1373 
1374       /* Handle a straight copy from a virtual register by generating a
1375 	 new add insn.  The difference between this and falling through
1376 	 to the generic case is avoiding a new pseudo and eliminating a
1377 	 move insn in the initial rtl stream.  */
1378       new = instantiate_new_reg (SET_SRC (set), &offset);
1379       if (new && offset != 0
1380 	  && REG_P (SET_DEST (set))
1381 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1382 	{
1383 	  start_sequence ();
1384 
1385 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1386 				   new, GEN_INT (offset), SET_DEST (set),
1387 				   1, OPTAB_LIB_WIDEN);
1388 	  if (x != SET_DEST (set))
1389 	    emit_move_insn (SET_DEST (set), x);
1390 
1391 	  seq = get_insns ();
1392 	  end_sequence ();
1393 
1394 	  emit_insn_before (seq, insn);
1395 	  delete_insn (insn);
1396 	  return;
1397 	}
1398 
1399       extract_insn (insn);
1400       insn_code = INSN_CODE (insn);
1401 
1402       /* Handle a plus involving a virtual register by determining if the
1403 	 operands remain valid if they're modified in place.  */
1404       if (GET_CODE (SET_SRC (set)) == PLUS
1405 	  && recog_data.n_operands >= 3
1406 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1407 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1408 	  && GET_CODE (recog_data.operand[2]) == CONST_INT
1409 	  && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1410 	{
1411 	  offset += INTVAL (recog_data.operand[2]);
1412 
1413 	  /* If the sum is zero, then replace with a plain move.  */
1414 	  if (offset == 0
1415 	      && REG_P (SET_DEST (set))
1416 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1417 	    {
1418 	      start_sequence ();
1419 	      emit_move_insn (SET_DEST (set), new);
1420 	      seq = get_insns ();
1421 	      end_sequence ();
1422 
1423 	      emit_insn_before (seq, insn);
1424 	      delete_insn (insn);
1425 	      return;
1426 	    }
1427 
1428 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1429 
1430 	  /* Using validate_change and apply_change_group here leaves
1431 	     recog_data in an invalid state.  Since we know exactly what
1432 	     we want to check, do those two by hand.  */
1433 	  if (safe_insn_predicate (insn_code, 1, new)
1434 	      && safe_insn_predicate (insn_code, 2, x))
1435 	    {
1436 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1437 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1438 	      any_change = true;
1439 
1440 	      /* Fall through into the regular operand fixup loop in
1441 		 order to take care of operands other than 1 and 2.  */
1442 	    }
1443 	}
1444     }
1445   else
1446     {
1447       extract_insn (insn);
1448       insn_code = INSN_CODE (insn);
1449     }
1450 
1451   /* In the general case, we expect virtual registers to appear only in
1452      operands, and then only as either bare registers or inside memories.  */
1453   for (i = 0; i < recog_data.n_operands; ++i)
1454     {
1455       x = recog_data.operand[i];
1456       switch (GET_CODE (x))
1457 	{
1458 	case MEM:
1459 	  {
1460 	    rtx addr = XEXP (x, 0);
1461 	    bool changed = false;
1462 
1463 	    for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1464 	    if (!changed)
1465 	      continue;
1466 
1467 	    start_sequence ();
1468 	    x = replace_equiv_address (x, addr);
1469 	    seq = get_insns ();
1470 	    end_sequence ();
1471 	    if (seq)
1472 	      emit_insn_before (seq, insn);
1473 	  }
1474 	  break;
1475 
1476 	case REG:
1477 	  new = instantiate_new_reg (x, &offset);
1478 	  if (new == NULL)
1479 	    continue;
1480 	  if (offset == 0)
1481 	    x = new;
1482 	  else
1483 	    {
1484 	      start_sequence ();
1485 
1486 	      /* Careful, special mode predicates may have stuff in
1487 		 insn_data[insn_code].operand[i].mode that isn't useful
1488 		 to us for computing a new value.  */
1489 	      /* ??? Recognize address_operand and/or "p" constraints
1490 		 to see if (plus new offset) is a valid before we put
1491 		 this through expand_simple_binop.  */
1492 	      x = expand_simple_binop (GET_MODE (x), PLUS, new,
1493 				       GEN_INT (offset), NULL_RTX,
1494 				       1, OPTAB_LIB_WIDEN);
1495 	      seq = get_insns ();
1496 	      end_sequence ();
1497 	      emit_insn_before (seq, insn);
1498 	    }
1499 	  break;
1500 
1501 	case SUBREG:
1502 	  new = instantiate_new_reg (SUBREG_REG (x), &offset);
1503 	  if (new == NULL)
1504 	    continue;
1505 	  if (offset != 0)
1506 	    {
1507 	      start_sequence ();
1508 	      new = expand_simple_binop (GET_MODE (new), PLUS, new,
1509 					 GEN_INT (offset), NULL_RTX,
1510 					 1, OPTAB_LIB_WIDEN);
1511 	      seq = get_insns ();
1512 	      end_sequence ();
1513 	      emit_insn_before (seq, insn);
1514 	    }
1515 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1516 				   GET_MODE (new), SUBREG_BYTE (x));
1517 	  break;
1518 
1519 	default:
1520 	  continue;
1521 	}
1522 
1523       /* At this point, X contains the new value for the operand.
1524 	 Validate the new value vs the insn predicate.  Note that
1525 	 asm insns will have insn_code -1 here.  */
1526       if (!safe_insn_predicate (insn_code, i, x))
1527 	x = force_reg (insn_data[insn_code].operand[i].mode, x);
1528 
1529       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1530       any_change = true;
1531     }
1532 
1533   if (any_change)
1534     {
1535       /* Propagate operand changes into the duplicates.  */
1536       for (i = 0; i < recog_data.n_dups; ++i)
1537 	*recog_data.dup_loc[i]
1538 	  = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1539 
1540       /* Force re-recognition of the instruction for validation.  */
1541       INSN_CODE (insn) = -1;
1542     }
1543 
1544   if (asm_noperands (PATTERN (insn)) >= 0)
1545     {
1546       if (!check_asm_operands (PATTERN (insn)))
1547 	{
1548 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1549 	  delete_insn (insn);
1550 	}
1551     }
1552   else
1553     {
1554       if (recog_memoized (insn) < 0)
1555 	fatal_insn_not_found (insn);
1556     }
1557 }
1558 
1559 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1560    do any instantiation required.  */
1561 
1562 static void
instantiate_decl(rtx x)1563 instantiate_decl (rtx x)
1564 {
1565   rtx addr;
1566 
1567   if (x == 0)
1568     return;
1569 
1570   /* If this is a CONCAT, recurse for the pieces.  */
1571   if (GET_CODE (x) == CONCAT)
1572     {
1573       instantiate_decl (XEXP (x, 0));
1574       instantiate_decl (XEXP (x, 1));
1575       return;
1576     }
1577 
1578   /* If this is not a MEM, no need to do anything.  Similarly if the
1579      address is a constant or a register that is not a virtual register.  */
1580   if (!MEM_P (x))
1581     return;
1582 
1583   addr = XEXP (x, 0);
1584   if (CONSTANT_P (addr)
1585       || (REG_P (addr)
1586 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1587 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1588     return;
1589 
1590   for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1591 }
1592 
1593 /* Helper for instantiate_decls called via walk_tree: Process all decls
1594    in the given DECL_VALUE_EXPR.  */
1595 
1596 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1597 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1598 {
1599   tree t = *tp;
1600   if (! EXPR_P (t))
1601     {
1602       *walk_subtrees = 0;
1603       if (DECL_P (t) && DECL_RTL_SET_P (t))
1604 	instantiate_decl (DECL_RTL (t));
1605     }
1606   return NULL;
1607 }
1608 
1609 /* Subroutine of instantiate_decls: Process all decls in the given
1610    BLOCK node and all its subblocks.  */
1611 
1612 static void
instantiate_decls_1(tree let)1613 instantiate_decls_1 (tree let)
1614 {
1615   tree t;
1616 
1617   for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1618     {
1619       if (DECL_RTL_SET_P (t))
1620 	instantiate_decl (DECL_RTL (t));
1621       if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1622 	{
1623 	  tree v = DECL_VALUE_EXPR (t);
1624 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1625 	}
1626     }
1627 
1628   /* Process all subblocks.  */
1629   for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1630     instantiate_decls_1 (t);
1631 }
1632 
1633 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1634    all virtual registers in their DECL_RTL's.  */
1635 
1636 static void
instantiate_decls(tree fndecl)1637 instantiate_decls (tree fndecl)
1638 {
1639   tree decl;
1640 
1641   /* Process all parameters of the function.  */
1642   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1643     {
1644       instantiate_decl (DECL_RTL (decl));
1645       instantiate_decl (DECL_INCOMING_RTL (decl));
1646       if (DECL_HAS_VALUE_EXPR_P (decl))
1647 	{
1648 	  tree v = DECL_VALUE_EXPR (decl);
1649 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1650 	}
1651     }
1652 
1653   /* Now process all variables defined in the function or its subblocks.  */
1654   instantiate_decls_1 (DECL_INITIAL (fndecl));
1655 }
1656 
1657 /* Pass through the INSNS of function FNDECL and convert virtual register
1658    references to hard register references.  */
1659 
1660 void
instantiate_virtual_regs(void)1661 instantiate_virtual_regs (void)
1662 {
1663   rtx insn;
1664 
1665   /* Compute the offsets to use for this function.  */
1666   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1667   var_offset = STARTING_FRAME_OFFSET;
1668   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1669   out_arg_offset = STACK_POINTER_OFFSET;
1670 #ifdef FRAME_POINTER_CFA_OFFSET
1671   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1672 #else
1673   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1674 #endif
1675 
1676   /* Initialize recognition, indicating that volatile is OK.  */
1677   init_recog ();
1678 
1679   /* Scan through all the insns, instantiating every virtual register still
1680      present.  */
1681   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1682     if (INSN_P (insn))
1683       {
1684 	/* These patterns in the instruction stream can never be recognized.
1685 	   Fortunately, they shouldn't contain virtual registers either.  */
1686 	if (GET_CODE (PATTERN (insn)) == USE
1687 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1688 	    || GET_CODE (PATTERN (insn)) == ADDR_VEC
1689 	    || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1690 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1691 	  continue;
1692 
1693 	instantiate_virtual_regs_in_insn (insn);
1694 
1695 	if (INSN_DELETED_P (insn))
1696 	  continue;
1697 
1698 	for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1699 
1700 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1701 	if (GET_CODE (insn) == CALL_INSN)
1702 	  for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1703 			instantiate_virtual_regs_in_rtx, NULL);
1704       }
1705 
1706   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1707   instantiate_decls (current_function_decl);
1708 
1709   /* Indicate that, from now on, assign_stack_local should use
1710      frame_pointer_rtx.  */
1711   virtuals_instantiated = 1;
1712 }
1713 
1714 struct tree_opt_pass pass_instantiate_virtual_regs =
1715 {
1716   "vregs",                              /* name */
1717   NULL,                                 /* gate */
1718   instantiate_virtual_regs,             /* execute */
1719   NULL,                                 /* sub */
1720   NULL,                                 /* next */
1721   0,                                    /* static_pass_number */
1722   0,                                    /* tv_id */
1723   0,                                    /* properties_required */
1724   0,                                    /* properties_provided */
1725   0,                                    /* properties_destroyed */
1726   0,                                    /* todo_flags_start */
1727   TODO_dump_func,                       /* todo_flags_finish */
1728   0                                     /* letter */
1729 };
1730 
1731 
1732 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1733    This means a type for which function calls must pass an address to the
1734    function or get an address back from the function.
1735    EXP may be a type node or an expression (whose type is tested).  */
1736 
1737 int
aggregate_value_p(tree exp,tree fntype)1738 aggregate_value_p (tree exp, tree fntype)
1739 {
1740   int i, regno, nregs;
1741   rtx reg;
1742 
1743   tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1744 
1745   if (fntype)
1746     switch (TREE_CODE (fntype))
1747       {
1748       case CALL_EXPR:
1749 	fntype = get_callee_fndecl (fntype);
1750 	fntype = fntype ? TREE_TYPE (fntype) : 0;
1751 	break;
1752       case FUNCTION_DECL:
1753 	fntype = TREE_TYPE (fntype);
1754 	break;
1755       case FUNCTION_TYPE:
1756       case METHOD_TYPE:
1757         break;
1758       case IDENTIFIER_NODE:
1759 	fntype = 0;
1760 	break;
1761       default:
1762 	/* We don't expect other rtl types here.  */
1763 	gcc_unreachable ();
1764       }
1765 
1766   if (TREE_CODE (type) == VOID_TYPE)
1767     return 0;
1768   /* If the front end has decided that this needs to be passed by
1769      reference, do so.  */
1770   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1771       && DECL_BY_REFERENCE (exp))
1772     return 1;
1773   if (targetm.calls.return_in_memory (type, fntype))
1774     return 1;
1775   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1776      and thus can't be returned in registers.  */
1777   if (TREE_ADDRESSABLE (type))
1778     return 1;
1779   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1780     return 1;
1781   /* Make sure we have suitable call-clobbered regs to return
1782      the value in; if not, we must return it in memory.  */
1783   reg = hard_function_value (type, 0, fntype, 0);
1784 
1785   /* If we have something other than a REG (e.g. a PARALLEL), then assume
1786      it is OK.  */
1787   if (!REG_P (reg))
1788     return 0;
1789 
1790   regno = REGNO (reg);
1791   nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1792   for (i = 0; i < nregs; i++)
1793     if (! call_used_regs[regno + i])
1794       return 1;
1795   return 0;
1796 }
1797 
1798 /* Return true if we should assign DECL a pseudo register; false if it
1799    should live on the local stack.  */
1800 
1801 bool
use_register_for_decl(tree decl)1802 use_register_for_decl (tree decl)
1803 {
1804   /* Honor volatile.  */
1805   if (TREE_SIDE_EFFECTS (decl))
1806     return false;
1807 
1808   /* Honor addressability.  */
1809   if (TREE_ADDRESSABLE (decl))
1810     return false;
1811 
1812   /* Only register-like things go in registers.  */
1813   if (DECL_MODE (decl) == BLKmode)
1814     return false;
1815 
1816   /* If -ffloat-store specified, don't put explicit float variables
1817      into registers.  */
1818   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1819      propagates values across these stores, and it probably shouldn't.  */
1820   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1821     return false;
1822 
1823   /* If we're not interested in tracking debugging information for
1824      this decl, then we can certainly put it in a register.  */
1825   if (DECL_IGNORED_P (decl))
1826     return true;
1827 
1828   return (optimize || DECL_REGISTER (decl));
1829 }
1830 
1831 /* Return true if TYPE should be passed by invisible reference.  */
1832 
1833 bool
pass_by_reference(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1834 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1835 		   tree type, bool named_arg)
1836 {
1837   if (type)
1838     {
1839       /* If this type contains non-trivial constructors, then it is
1840 	 forbidden for the middle-end to create any new copies.  */
1841       if (TREE_ADDRESSABLE (type))
1842 	return true;
1843 
1844       /* GCC post 3.4 passes *all* variable sized types by reference.  */
1845       if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1846 	return true;
1847     }
1848 
1849   return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1850 }
1851 
1852 /* Return true if TYPE, which is passed by reference, should be callee
1853    copied instead of caller copied.  */
1854 
1855 bool
reference_callee_copied(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1856 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1857 			 tree type, bool named_arg)
1858 {
1859   if (type && TREE_ADDRESSABLE (type))
1860     return false;
1861   return targetm.calls.callee_copies (ca, mode, type, named_arg);
1862 }
1863 
1864 /* Structures to communicate between the subroutines of assign_parms.
1865    The first holds data persistent across all parameters, the second
1866    is cleared out for each parameter.  */
1867 
1868 struct assign_parm_data_all
1869 {
1870   CUMULATIVE_ARGS args_so_far;
1871   struct args_size stack_args_size;
1872   tree function_result_decl;
1873   tree orig_fnargs;
1874   rtx conversion_insns;
1875   HOST_WIDE_INT pretend_args_size;
1876   HOST_WIDE_INT extra_pretend_bytes;
1877   int reg_parm_stack_space;
1878 };
1879 
1880 struct assign_parm_data_one
1881 {
1882   tree nominal_type;
1883   tree passed_type;
1884   rtx entry_parm;
1885   rtx stack_parm;
1886   enum machine_mode nominal_mode;
1887   enum machine_mode passed_mode;
1888   enum machine_mode promoted_mode;
1889   struct locate_and_pad_arg_data locate;
1890   int partial;
1891   BOOL_BITFIELD named_arg : 1;
1892   BOOL_BITFIELD passed_pointer : 1;
1893   BOOL_BITFIELD on_stack : 1;
1894   BOOL_BITFIELD loaded_in_reg : 1;
1895 };
1896 
1897 /* A subroutine of assign_parms.  Initialize ALL.  */
1898 
1899 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)1900 assign_parms_initialize_all (struct assign_parm_data_all *all)
1901 {
1902   tree fntype;
1903 
1904   memset (all, 0, sizeof (*all));
1905 
1906   fntype = TREE_TYPE (current_function_decl);
1907 
1908 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1909   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1910 #else
1911   INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1912 			current_function_decl, -1);
1913 #endif
1914 
1915 #ifdef REG_PARM_STACK_SPACE
1916   all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1917 #endif
1918 }
1919 
1920 /* If ARGS contains entries with complex types, split the entry into two
1921    entries of the component type.  Return a new list of substitutions are
1922    needed, else the old list.  */
1923 
1924 static tree
split_complex_args(tree args)1925 split_complex_args (tree args)
1926 {
1927   tree p;
1928 
1929   /* Before allocating memory, check for the common case of no complex.  */
1930   for (p = args; p; p = TREE_CHAIN (p))
1931     {
1932       tree type = TREE_TYPE (p);
1933       if (TREE_CODE (type) == COMPLEX_TYPE
1934 	  && targetm.calls.split_complex_arg (type))
1935         goto found;
1936     }
1937   return args;
1938 
1939  found:
1940   args = copy_list (args);
1941 
1942   for (p = args; p; p = TREE_CHAIN (p))
1943     {
1944       tree type = TREE_TYPE (p);
1945       if (TREE_CODE (type) == COMPLEX_TYPE
1946 	  && targetm.calls.split_complex_arg (type))
1947 	{
1948 	  tree decl;
1949 	  tree subtype = TREE_TYPE (type);
1950 	  bool addressable = TREE_ADDRESSABLE (p);
1951 
1952 	  /* Rewrite the PARM_DECL's type with its component.  */
1953 	  TREE_TYPE (p) = subtype;
1954 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1955 	  DECL_MODE (p) = VOIDmode;
1956 	  DECL_SIZE (p) = NULL;
1957 	  DECL_SIZE_UNIT (p) = NULL;
1958 	  /* If this arg must go in memory, put it in a pseudo here.
1959 	     We can't allow it to go in memory as per normal parms,
1960 	     because the usual place might not have the imag part
1961 	     adjacent to the real part.  */
1962 	  DECL_ARTIFICIAL (p) = addressable;
1963 	  DECL_IGNORED_P (p) = addressable;
1964 	  TREE_ADDRESSABLE (p) = 0;
1965 	  layout_decl (p, 0);
1966 
1967 	  /* Build a second synthetic decl.  */
1968 	  decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1969 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1970 	  DECL_ARTIFICIAL (decl) = addressable;
1971 	  DECL_IGNORED_P (decl) = addressable;
1972 	  layout_decl (decl, 0);
1973 
1974 	  /* Splice it in; skip the new decl.  */
1975 	  TREE_CHAIN (decl) = TREE_CHAIN (p);
1976 	  TREE_CHAIN (p) = decl;
1977 	  p = decl;
1978 	}
1979     }
1980 
1981   return args;
1982 }
1983 
1984 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
1985    the hidden struct return argument, and (abi willing) complex args.
1986    Return the new parameter list.  */
1987 
1988 static tree
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)1989 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1990 {
1991   tree fndecl = current_function_decl;
1992   tree fntype = TREE_TYPE (fndecl);
1993   tree fnargs = DECL_ARGUMENTS (fndecl);
1994 
1995   /* If struct value address is treated as the first argument, make it so.  */
1996   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1997       && ! current_function_returns_pcc_struct
1998       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1999     {
2000       tree type = build_pointer_type (TREE_TYPE (fntype));
2001       tree decl;
2002 
2003       decl = build_decl (PARM_DECL, NULL_TREE, type);
2004       DECL_ARG_TYPE (decl) = type;
2005       DECL_ARTIFICIAL (decl) = 1;
2006       DECL_IGNORED_P (decl) = 1;
2007 
2008       TREE_CHAIN (decl) = fnargs;
2009       fnargs = decl;
2010       all->function_result_decl = decl;
2011     }
2012 
2013   all->orig_fnargs = fnargs;
2014 
2015   /* If the target wants to split complex arguments into scalars, do so.  */
2016   if (targetm.calls.split_complex_arg)
2017     fnargs = split_complex_args (fnargs);
2018 
2019   return fnargs;
2020 }
2021 
2022 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2023    data for the parameter.  Incorporate ABI specifics such as pass-by-
2024    reference and type promotion.  */
2025 
2026 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2027 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2028 			     struct assign_parm_data_one *data)
2029 {
2030   tree nominal_type, passed_type;
2031   enum machine_mode nominal_mode, passed_mode, promoted_mode;
2032 
2033   memset (data, 0, sizeof (*data));
2034 
2035   /* NAMED_ARG is a mis-nomer.  We really mean 'non-varadic'. */
2036   if (!current_function_stdarg)
2037     data->named_arg = 1;  /* No varadic parms.  */
2038   else if (TREE_CHAIN (parm))
2039     data->named_arg = 1;  /* Not the last non-varadic parm. */
2040   else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2041     data->named_arg = 1;  /* Only varadic ones are unnamed.  */
2042   else
2043     data->named_arg = 0;  /* Treat as varadic.  */
2044 
2045   nominal_type = TREE_TYPE (parm);
2046   passed_type = DECL_ARG_TYPE (parm);
2047 
2048   /* Look out for errors propagating this far.  Also, if the parameter's
2049      type is void then its value doesn't matter.  */
2050   if (TREE_TYPE (parm) == error_mark_node
2051       /* This can happen after weird syntax errors
2052 	 or if an enum type is defined among the parms.  */
2053       || TREE_CODE (parm) != PARM_DECL
2054       || passed_type == NULL
2055       || VOID_TYPE_P (nominal_type))
2056     {
2057       nominal_type = passed_type = void_type_node;
2058       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2059       goto egress;
2060     }
2061 
2062   /* Find mode of arg as it is passed, and mode of arg as it should be
2063      during execution of this function.  */
2064   passed_mode = TYPE_MODE (passed_type);
2065   nominal_mode = TYPE_MODE (nominal_type);
2066 
2067   /* If the parm is to be passed as a transparent union, use the type of
2068      the first field for the tests below.  We have already verified that
2069      the modes are the same.  */
2070   if (TREE_CODE (passed_type) == UNION_TYPE
2071       && TYPE_TRANSPARENT_UNION (passed_type))
2072     passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2073 
2074   /* See if this arg was passed by invisible reference.  */
2075   if (pass_by_reference (&all->args_so_far, passed_mode,
2076 			 passed_type, data->named_arg))
2077     {
2078       passed_type = nominal_type = build_pointer_type (passed_type);
2079       data->passed_pointer = true;
2080       passed_mode = nominal_mode = Pmode;
2081     }
2082 
2083   /* Find mode as it is passed by the ABI.  */
2084   promoted_mode = passed_mode;
2085   if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2086     {
2087       int unsignedp = TYPE_UNSIGNED (passed_type);
2088       promoted_mode = promote_mode (passed_type, promoted_mode,
2089 				    &unsignedp, 1);
2090     }
2091 
2092  egress:
2093   data->nominal_type = nominal_type;
2094   data->passed_type = passed_type;
2095   data->nominal_mode = nominal_mode;
2096   data->passed_mode = passed_mode;
2097   data->promoted_mode = promoted_mode;
2098 }
2099 
2100 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2101 
2102 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2103 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2104 			    struct assign_parm_data_one *data, bool no_rtl)
2105 {
2106   int varargs_pretend_bytes = 0;
2107 
2108   targetm.calls.setup_incoming_varargs (&all->args_so_far,
2109 					data->promoted_mode,
2110 					data->passed_type,
2111 					&varargs_pretend_bytes, no_rtl);
2112 
2113   /* If the back-end has requested extra stack space, record how much is
2114      needed.  Do not change pretend_args_size otherwise since it may be
2115      nonzero from an earlier partial argument.  */
2116   if (varargs_pretend_bytes > 0)
2117     all->pretend_args_size = varargs_pretend_bytes;
2118 }
2119 
2120 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2121    the incoming location of the current parameter.  */
2122 
2123 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2124 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2125 			    struct assign_parm_data_one *data)
2126 {
2127   HOST_WIDE_INT pretend_bytes = 0;
2128   rtx entry_parm;
2129   bool in_regs;
2130 
2131   if (data->promoted_mode == VOIDmode)
2132     {
2133       data->entry_parm = data->stack_parm = const0_rtx;
2134       return;
2135     }
2136 
2137 #ifdef FUNCTION_INCOMING_ARG
2138   entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2139 				      data->passed_type, data->named_arg);
2140 #else
2141   entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2142 			     data->passed_type, data->named_arg);
2143 #endif
2144 
2145   if (entry_parm == 0)
2146     data->promoted_mode = data->passed_mode;
2147 
2148   /* Determine parm's home in the stack, in case it arrives in the stack
2149      or we should pretend it did.  Compute the stack position and rtx where
2150      the argument arrives and its size.
2151 
2152      There is one complexity here:  If this was a parameter that would
2153      have been passed in registers, but wasn't only because it is
2154      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2155      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2156      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2157      as it was the previous time.  */
2158   in_regs = entry_parm != 0;
2159 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2160   in_regs = true;
2161 #endif
2162   if (!in_regs && !data->named_arg)
2163     {
2164       if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2165 	{
2166 	  rtx tem;
2167 #ifdef FUNCTION_INCOMING_ARG
2168 	  tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2169 				       data->passed_type, true);
2170 #else
2171 	  tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2172 			      data->passed_type, true);
2173 #endif
2174 	  in_regs = tem != NULL;
2175 	}
2176     }
2177 
2178   /* If this parameter was passed both in registers and in the stack, use
2179      the copy on the stack.  */
2180   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2181 					data->passed_type))
2182     entry_parm = 0;
2183 
2184   if (entry_parm)
2185     {
2186       int partial;
2187 
2188       partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2189 						 data->promoted_mode,
2190 						 data->passed_type,
2191 						 data->named_arg);
2192       data->partial = partial;
2193 
2194       /* The caller might already have allocated stack space for the
2195 	 register parameters.  */
2196       if (partial != 0 && all->reg_parm_stack_space == 0)
2197 	{
2198 	  /* Part of this argument is passed in registers and part
2199 	     is passed on the stack.  Ask the prologue code to extend
2200 	     the stack part so that we can recreate the full value.
2201 
2202 	     PRETEND_BYTES is the size of the registers we need to store.
2203 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2204 	     stack space that the prologue should allocate.
2205 
2206 	     Internally, gcc assumes that the argument pointer is aligned
2207 	     to STACK_BOUNDARY bits.  This is used both for alignment
2208 	     optimizations (see init_emit) and to locate arguments that are
2209 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2210 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2211 	     a stack boundary.  */
2212 
2213 	  /* We assume at most one partial arg, and it must be the first
2214 	     argument on the stack.  */
2215 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2216 
2217 	  pretend_bytes = partial;
2218 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2219 
2220 	  /* We want to align relative to the actual stack pointer, so
2221 	     don't include this in the stack size until later.  */
2222 	  all->extra_pretend_bytes = all->pretend_args_size;
2223 	}
2224     }
2225 
2226   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2227 		       entry_parm ? data->partial : 0, current_function_decl,
2228 		       &all->stack_args_size, &data->locate);
2229 
2230   /* Adjust offsets to include the pretend args.  */
2231   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2232   data->locate.slot_offset.constant += pretend_bytes;
2233   data->locate.offset.constant += pretend_bytes;
2234 
2235   data->entry_parm = entry_parm;
2236 }
2237 
2238 /* A subroutine of assign_parms.  If there is actually space on the stack
2239    for this parm, count it in stack_args_size and return true.  */
2240 
2241 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2242 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2243 			   struct assign_parm_data_one *data)
2244 {
2245   /* Trivially true if we've no incoming register.  */
2246   if (data->entry_parm == NULL)
2247     ;
2248   /* Also true if we're partially in registers and partially not,
2249      since we've arranged to drop the entire argument on the stack.  */
2250   else if (data->partial != 0)
2251     ;
2252   /* Also true if the target says that it's passed in both registers
2253      and on the stack.  */
2254   else if (GET_CODE (data->entry_parm) == PARALLEL
2255 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2256     ;
2257   /* Also true if the target says that there's stack allocated for
2258      all register parameters.  */
2259   else if (all->reg_parm_stack_space > 0)
2260     ;
2261   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2262   else
2263     return false;
2264 
2265   all->stack_args_size.constant += data->locate.size.constant;
2266   if (data->locate.size.var)
2267     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2268 
2269   return true;
2270 }
2271 
2272 /* A subroutine of assign_parms.  Given that this parameter is allocated
2273    stack space by the ABI, find it.  */
2274 
2275 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2276 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2277 {
2278   rtx offset_rtx, stack_parm;
2279   unsigned int align, boundary;
2280 
2281   /* If we're passing this arg using a reg, make its stack home the
2282      aligned stack slot.  */
2283   if (data->entry_parm)
2284     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2285   else
2286     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2287 
2288   stack_parm = current_function_internal_arg_pointer;
2289   if (offset_rtx != const0_rtx)
2290     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2291   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2292 
2293   set_mem_attributes (stack_parm, parm, 1);
2294 
2295   boundary = data->locate.boundary;
2296   align = BITS_PER_UNIT;
2297 
2298   /* If we're padding upward, we know that the alignment of the slot
2299      is FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2300      intentionally forcing upward padding.  Otherwise we have to come
2301      up with a guess at the alignment based on OFFSET_RTX.  */
2302   if (data->locate.where_pad != downward || data->entry_parm)
2303     align = boundary;
2304   else if (GET_CODE (offset_rtx) == CONST_INT)
2305     {
2306       align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2307       align = align & -align;
2308     }
2309   set_mem_align (stack_parm, align);
2310 
2311   if (data->entry_parm)
2312     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2313 
2314   data->stack_parm = stack_parm;
2315 }
2316 
2317 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2318    always valid and contiguous.  */
2319 
2320 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2321 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2322 {
2323   rtx entry_parm = data->entry_parm;
2324   rtx stack_parm = data->stack_parm;
2325 
2326   /* If this parm was passed part in regs and part in memory, pretend it
2327      arrived entirely in memory by pushing the register-part onto the stack.
2328      In the special case of a DImode or DFmode that is split, we could put
2329      it together in a pseudoreg directly, but for now that's not worth
2330      bothering with.  */
2331   if (data->partial != 0)
2332     {
2333       /* Handle calls that pass values in multiple non-contiguous
2334 	 locations.  The Irix 6 ABI has examples of this.  */
2335       if (GET_CODE (entry_parm) == PARALLEL)
2336 	emit_group_store (validize_mem (stack_parm), entry_parm,
2337 			  data->passed_type,
2338 			  int_size_in_bytes (data->passed_type));
2339       else
2340 	{
2341 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2342 	  move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2343 			       data->partial / UNITS_PER_WORD);
2344 	}
2345 
2346       entry_parm = stack_parm;
2347     }
2348 
2349   /* If we didn't decide this parm came in a register, by default it came
2350      on the stack.  */
2351   else if (entry_parm == NULL)
2352     entry_parm = stack_parm;
2353 
2354   /* When an argument is passed in multiple locations, we can't make use
2355      of this information, but we can save some copying if the whole argument
2356      is passed in a single register.  */
2357   else if (GET_CODE (entry_parm) == PARALLEL
2358 	   && data->nominal_mode != BLKmode
2359 	   && data->passed_mode != BLKmode)
2360     {
2361       size_t i, len = XVECLEN (entry_parm, 0);
2362 
2363       for (i = 0; i < len; i++)
2364 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2365 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2366 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2367 		== data->passed_mode)
2368 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2369 	  {
2370 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2371 	    break;
2372 	  }
2373     }
2374 
2375   data->entry_parm = entry_parm;
2376 }
2377 
2378 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2379    always valid and properly aligned.  */
2380 
2381 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2382 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2383 {
2384   rtx stack_parm = data->stack_parm;
2385 
2386   /* If we can't trust the parm stack slot to be aligned enough for its
2387      ultimate type, don't use that slot after entry.  We'll make another
2388      stack slot, if we need one.  */
2389   if (stack_parm
2390       && ((STRICT_ALIGNMENT
2391 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2392 	  || (data->nominal_type
2393 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2394 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2395     stack_parm = NULL;
2396 
2397   /* If parm was passed in memory, and we need to convert it on entry,
2398      don't store it back in that same slot.  */
2399   else if (data->entry_parm == stack_parm
2400 	   && data->nominal_mode != BLKmode
2401 	   && data->nominal_mode != data->passed_mode)
2402     stack_parm = NULL;
2403 
2404   /* If stack protection is in effect for this function, don't leave any
2405      pointers in their passed stack slots.  */
2406   else if (cfun->stack_protect_guard
2407 	   && (flag_stack_protect == 2
2408 	       || data->passed_pointer
2409 	       || POINTER_TYPE_P (data->nominal_type)))
2410     stack_parm = NULL;
2411 
2412   data->stack_parm = stack_parm;
2413 }
2414 
2415 /* A subroutine of assign_parms.  Return true if the current parameter
2416    should be stored as a BLKmode in the current frame.  */
2417 
2418 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2419 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2420 {
2421   if (data->nominal_mode == BLKmode)
2422     return true;
2423   if (GET_CODE (data->entry_parm) == PARALLEL)
2424     return true;
2425 
2426 #ifdef BLOCK_REG_PADDING
2427   /* Only assign_parm_setup_block knows how to deal with register arguments
2428      that are padded at the least significant end.  */
2429   if (REG_P (data->entry_parm)
2430       && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2431       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2432 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2433     return true;
2434 #endif
2435 
2436   return false;
2437 }
2438 
2439 /* A subroutine of assign_parms.  Arrange for the parameter to be
2440    present and valid in DATA->STACK_RTL.  */
2441 
2442 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2443 assign_parm_setup_block (struct assign_parm_data_all *all,
2444 			 tree parm, struct assign_parm_data_one *data)
2445 {
2446   rtx entry_parm = data->entry_parm;
2447   rtx stack_parm = data->stack_parm;
2448   HOST_WIDE_INT size;
2449   HOST_WIDE_INT size_stored;
2450   rtx orig_entry_parm = entry_parm;
2451 
2452   if (GET_CODE (entry_parm) == PARALLEL)
2453     entry_parm = emit_group_move_into_temps (entry_parm);
2454 
2455   /* If we've a non-block object that's nevertheless passed in parts,
2456      reconstitute it in register operations rather than on the stack.  */
2457   if (GET_CODE (entry_parm) == PARALLEL
2458       && data->nominal_mode != BLKmode)
2459     {
2460       rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2461 
2462       if ((XVECLEN (entry_parm, 0) > 1
2463 	   || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2464 	  && use_register_for_decl (parm))
2465 	{
2466 	  rtx parmreg = gen_reg_rtx (data->nominal_mode);
2467 
2468 	  push_to_sequence (all->conversion_insns);
2469 
2470 	  /* For values returned in multiple registers, handle possible
2471 	     incompatible calls to emit_group_store.
2472 
2473 	     For example, the following would be invalid, and would have to
2474 	     be fixed by the conditional below:
2475 
2476 	     emit_group_store ((reg:SF), (parallel:DF))
2477 	     emit_group_store ((reg:SI), (parallel:DI))
2478 
2479 	     An example of this are doubles in e500 v2:
2480 	     (parallel:DF (expr_list (reg:SI) (const_int 0))
2481 	     (expr_list (reg:SI) (const_int 4))).  */
2482 	  if (data->nominal_mode != data->passed_mode)
2483 	    {
2484 	      rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2485 	      emit_group_store (t, entry_parm, NULL_TREE,
2486 				GET_MODE_SIZE (GET_MODE (entry_parm)));
2487 	      convert_move (parmreg, t, 0);
2488 	    }
2489 	  else
2490 	    emit_group_store (parmreg, entry_parm, data->nominal_type,
2491 			      int_size_in_bytes (data->nominal_type));
2492 
2493 	  all->conversion_insns = get_insns ();
2494 	  end_sequence ();
2495 
2496 	  SET_DECL_RTL (parm, parmreg);
2497 	  return;
2498 	}
2499     }
2500 
2501   size = int_size_in_bytes (data->passed_type);
2502   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2503   if (stack_parm == 0)
2504     {
2505       DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2506       stack_parm = assign_stack_local (BLKmode, size_stored,
2507 				       DECL_ALIGN (parm));
2508       if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2509 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2510       set_mem_attributes (stack_parm, parm, 1);
2511     }
2512 
2513   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2514      calls that pass values in multiple non-contiguous locations.  */
2515   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2516     {
2517       rtx mem;
2518 
2519       /* Note that we will be storing an integral number of words.
2520 	 So we have to be careful to ensure that we allocate an
2521 	 integral number of words.  We do this above when we call
2522 	 assign_stack_local if space was not allocated in the argument
2523 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2524 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2525 	 if it becomes a problem.  Exception is when BLKmode arrives
2526 	 with arguments not conforming to word_mode.  */
2527 
2528       if (data->stack_parm == 0)
2529 	;
2530       else if (GET_CODE (entry_parm) == PARALLEL)
2531 	;
2532       else
2533 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2534 
2535       mem = validize_mem (stack_parm);
2536 
2537       /* Handle values in multiple non-contiguous locations.  */
2538       if (GET_CODE (entry_parm) == PARALLEL)
2539 	{
2540 	  push_to_sequence (all->conversion_insns);
2541 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2542 	  all->conversion_insns = get_insns ();
2543 	  end_sequence ();
2544 	}
2545 
2546       else if (size == 0)
2547 	;
2548 
2549       /* If SIZE is that of a mode no bigger than a word, just use
2550 	 that mode's store operation.  */
2551       else if (size <= UNITS_PER_WORD)
2552 	{
2553 	  enum machine_mode mode
2554 	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2555 
2556 	  if (mode != BLKmode
2557 #ifdef BLOCK_REG_PADDING
2558 	      && (size == UNITS_PER_WORD
2559 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2560 		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2561 #endif
2562 	      )
2563 	    {
2564 	      rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2565 	      emit_move_insn (change_address (mem, mode, 0), reg);
2566 	    }
2567 
2568 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2569 	     machine must be aligned to the left before storing
2570 	     to memory.  Note that the previous test doesn't
2571 	     handle all cases (e.g. SIZE == 3).  */
2572 	  else if (size != UNITS_PER_WORD
2573 #ifdef BLOCK_REG_PADDING
2574 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2575 		       == downward)
2576 #else
2577 		   && BYTES_BIG_ENDIAN
2578 #endif
2579 		   )
2580 	    {
2581 	      rtx tem, x;
2582 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2583 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2584 
2585 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2586 				build_int_cst (NULL_TREE, by),
2587 				NULL_RTX, 1);
2588 	      tem = change_address (mem, word_mode, 0);
2589 	      emit_move_insn (tem, x);
2590 	    }
2591 	  else
2592 	    move_block_from_reg (REGNO (entry_parm), mem,
2593 				 size_stored / UNITS_PER_WORD);
2594 	}
2595       else
2596 	move_block_from_reg (REGNO (entry_parm), mem,
2597 			     size_stored / UNITS_PER_WORD);
2598     }
2599   else if (data->stack_parm == 0)
2600     {
2601       push_to_sequence (all->conversion_insns);
2602       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2603 		       BLOCK_OP_NORMAL);
2604       all->conversion_insns = get_insns ();
2605       end_sequence ();
2606     }
2607 
2608   data->stack_parm = stack_parm;
2609   SET_DECL_RTL (parm, stack_parm);
2610 }
2611 
2612 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
2613    parameter.  Get it there.  Perform all ABI specified conversions.  */
2614 
2615 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2616 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2617 		       struct assign_parm_data_one *data)
2618 {
2619   rtx parmreg;
2620   enum machine_mode promoted_nominal_mode;
2621   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2622   bool did_conversion = false;
2623 
2624   /* Store the parm in a pseudoregister during the function, but we may
2625      need to do it in a wider mode.  */
2626 
2627   promoted_nominal_mode
2628     = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2629 
2630   parmreg = gen_reg_rtx (promoted_nominal_mode);
2631 
2632   if (!DECL_ARTIFICIAL (parm))
2633     mark_user_reg (parmreg);
2634 
2635   /* If this was an item that we received a pointer to,
2636      set DECL_RTL appropriately.  */
2637   if (data->passed_pointer)
2638     {
2639       rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2640       set_mem_attributes (x, parm, 1);
2641       SET_DECL_RTL (parm, x);
2642     }
2643   else
2644     SET_DECL_RTL (parm, parmreg);
2645 
2646   /* Copy the value into the register.  */
2647   if (data->nominal_mode != data->passed_mode
2648       || promoted_nominal_mode != data->promoted_mode)
2649     {
2650       int save_tree_used;
2651 
2652       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2653 	 mode, by the caller.  We now have to convert it to
2654 	 NOMINAL_MODE, if different.  However, PARMREG may be in
2655 	 a different mode than NOMINAL_MODE if it is being stored
2656 	 promoted.
2657 
2658 	 If ENTRY_PARM is a hard register, it might be in a register
2659 	 not valid for operating in its mode (e.g., an odd-numbered
2660 	 register for a DFmode).  In that case, moves are the only
2661 	 thing valid, so we can't do a convert from there.  This
2662 	 occurs when the calling sequence allow such misaligned
2663 	 usages.
2664 
2665 	 In addition, the conversion may involve a call, which could
2666 	 clobber parameters which haven't been copied to pseudo
2667 	 registers yet.  Therefore, we must first copy the parm to
2668 	 a pseudo reg here, and save the conversion until after all
2669 	 parameters have been moved.  */
2670 
2671       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2672 
2673       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2674 
2675       push_to_sequence (all->conversion_insns);
2676       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2677 
2678       if (GET_CODE (tempreg) == SUBREG
2679 	  && GET_MODE (tempreg) == data->nominal_mode
2680 	  && REG_P (SUBREG_REG (tempreg))
2681 	  && data->nominal_mode == data->passed_mode
2682 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2683 	  && GET_MODE_SIZE (GET_MODE (tempreg))
2684 	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2685 	{
2686 	  /* The argument is already sign/zero extended, so note it
2687 	     into the subreg.  */
2688 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2689 	  SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2690 	}
2691 
2692       /* TREE_USED gets set erroneously during expand_assignment.  */
2693       save_tree_used = TREE_USED (parm);
2694       expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2695       TREE_USED (parm) = save_tree_used;
2696       all->conversion_insns = get_insns ();
2697       end_sequence ();
2698 
2699       did_conversion = true;
2700     }
2701   else
2702     emit_move_insn (parmreg, validize_mem (data->entry_parm));
2703 
2704   /* If we were passed a pointer but the actual value can safely live
2705      in a register, put it in one.  */
2706   if (data->passed_pointer
2707       && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2708       /* If by-reference argument was promoted, demote it.  */
2709       && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2710 	  || use_register_for_decl (parm)))
2711     {
2712       /* We can't use nominal_mode, because it will have been set to
2713 	 Pmode above.  We must use the actual mode of the parm.  */
2714       parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2715       mark_user_reg (parmreg);
2716 
2717       if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2718 	{
2719 	  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2720 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2721 
2722 	  push_to_sequence (all->conversion_insns);
2723 	  emit_move_insn (tempreg, DECL_RTL (parm));
2724 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2725 	  emit_move_insn (parmreg, tempreg);
2726 	  all->conversion_insns = get_insns ();
2727 	  end_sequence ();
2728 
2729 	  did_conversion = true;
2730 	}
2731       else
2732 	emit_move_insn (parmreg, DECL_RTL (parm));
2733 
2734       SET_DECL_RTL (parm, parmreg);
2735 
2736       /* STACK_PARM is the pointer, not the parm, and PARMREG is
2737 	 now the parm.  */
2738       data->stack_parm = NULL;
2739     }
2740 
2741   /* Mark the register as eliminable if we did no conversion and it was
2742      copied from memory at a fixed offset, and the arg pointer was not
2743      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
2744      offset formed an invalid address, such memory-equivalences as we
2745      make here would screw up life analysis for it.  */
2746   if (data->nominal_mode == data->passed_mode
2747       && !did_conversion
2748       && data->stack_parm != 0
2749       && MEM_P (data->stack_parm)
2750       && data->locate.offset.var == 0
2751       && reg_mentioned_p (virtual_incoming_args_rtx,
2752 			  XEXP (data->stack_parm, 0)))
2753     {
2754       rtx linsn = get_last_insn ();
2755       rtx sinsn, set;
2756 
2757       /* Mark complex types separately.  */
2758       if (GET_CODE (parmreg) == CONCAT)
2759 	{
2760 	  enum machine_mode submode
2761 	    = GET_MODE_INNER (GET_MODE (parmreg));
2762 	  int regnor = REGNO (XEXP (parmreg, 0));
2763 	  int regnoi = REGNO (XEXP (parmreg, 1));
2764 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2765 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
2766 					  GET_MODE_SIZE (submode));
2767 
2768 	  /* Scan backwards for the set of the real and
2769 	     imaginary parts.  */
2770 	  for (sinsn = linsn; sinsn != 0;
2771 	       sinsn = prev_nonnote_insn (sinsn))
2772 	    {
2773 	      set = single_set (sinsn);
2774 	      if (set == 0)
2775 		continue;
2776 
2777 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
2778 		REG_NOTES (sinsn)
2779 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2780 				       REG_NOTES (sinsn));
2781 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
2782 		REG_NOTES (sinsn)
2783 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2784 				       REG_NOTES (sinsn));
2785 	    }
2786 	}
2787       else if ((set = single_set (linsn)) != 0
2788 	       && SET_DEST (set) == parmreg)
2789 	REG_NOTES (linsn)
2790 	  = gen_rtx_EXPR_LIST (REG_EQUIV,
2791 			       data->stack_parm, REG_NOTES (linsn));
2792     }
2793 
2794   /* For pointer data type, suggest pointer register.  */
2795   if (POINTER_TYPE_P (TREE_TYPE (parm)))
2796     mark_reg_pointer (parmreg,
2797 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2798 }
2799 
2800 /* A subroutine of assign_parms.  Allocate stack space to hold the current
2801    parameter.  Get it there.  Perform all ABI specified conversions.  */
2802 
2803 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2804 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2805 		         struct assign_parm_data_one *data)
2806 {
2807   /* Value must be stored in the stack slot STACK_PARM during function
2808      execution.  */
2809   bool to_conversion = false;
2810 
2811   if (data->promoted_mode != data->nominal_mode)
2812     {
2813       /* Conversion is required.  */
2814       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2815 
2816       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2817 
2818       push_to_sequence (all->conversion_insns);
2819       to_conversion = true;
2820 
2821       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2822 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
2823 
2824       if (data->stack_parm)
2825 	/* ??? This may need a big-endian conversion on sparc64.  */
2826 	data->stack_parm
2827 	  = adjust_address (data->stack_parm, data->nominal_mode, 0);
2828     }
2829 
2830   if (data->entry_parm != data->stack_parm)
2831     {
2832       rtx src, dest;
2833 
2834       if (data->stack_parm == 0)
2835 	{
2836 	  data->stack_parm
2837 	    = assign_stack_local (GET_MODE (data->entry_parm),
2838 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2839 				  TYPE_ALIGN (data->passed_type));
2840 	  set_mem_attributes (data->stack_parm, parm, 1);
2841 	}
2842 
2843       dest = validize_mem (data->stack_parm);
2844       src = validize_mem (data->entry_parm);
2845 
2846       if (MEM_P (src))
2847 	{
2848 	  /* Use a block move to handle potentially misaligned entry_parm.  */
2849 	  if (!to_conversion)
2850 	    push_to_sequence (all->conversion_insns);
2851 	  to_conversion = true;
2852 
2853 	  emit_block_move (dest, src,
2854 			   GEN_INT (int_size_in_bytes (data->passed_type)),
2855 			   BLOCK_OP_NORMAL);
2856 	}
2857       else
2858 	emit_move_insn (dest, src);
2859     }
2860 
2861   if (to_conversion)
2862     {
2863       all->conversion_insns = get_insns ();
2864       end_sequence ();
2865     }
2866 
2867   SET_DECL_RTL (parm, data->stack_parm);
2868 }
2869 
2870 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
2871    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
2872 
2873 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,tree fnargs)2874 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2875 {
2876   tree parm;
2877   tree orig_fnargs = all->orig_fnargs;
2878 
2879   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2880     {
2881       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2882 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2883 	{
2884 	  rtx tmp, real, imag;
2885 	  enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2886 
2887 	  real = DECL_RTL (fnargs);
2888 	  imag = DECL_RTL (TREE_CHAIN (fnargs));
2889 	  if (inner != GET_MODE (real))
2890 	    {
2891 	      real = gen_lowpart_SUBREG (inner, real);
2892 	      imag = gen_lowpart_SUBREG (inner, imag);
2893 	    }
2894 
2895 	  if (TREE_ADDRESSABLE (parm))
2896 	    {
2897 	      rtx rmem, imem;
2898 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2899 
2900 	      /* split_complex_arg put the real and imag parts in
2901 		 pseudos.  Move them to memory.  */
2902 	      tmp = assign_stack_local (DECL_MODE (parm), size,
2903 					TYPE_ALIGN (TREE_TYPE (parm)));
2904 	      set_mem_attributes (tmp, parm, 1);
2905 	      rmem = adjust_address_nv (tmp, inner, 0);
2906 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2907 	      push_to_sequence (all->conversion_insns);
2908 	      emit_move_insn (rmem, real);
2909 	      emit_move_insn (imem, imag);
2910 	      all->conversion_insns = get_insns ();
2911 	      end_sequence ();
2912 	    }
2913 	  else
2914 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2915 	  SET_DECL_RTL (parm, tmp);
2916 
2917 	  real = DECL_INCOMING_RTL (fnargs);
2918 	  imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2919 	  if (inner != GET_MODE (real))
2920 	    {
2921 	      real = gen_lowpart_SUBREG (inner, real);
2922 	      imag = gen_lowpart_SUBREG (inner, imag);
2923 	    }
2924 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2925 	  set_decl_incoming_rtl (parm, tmp);
2926 	  fnargs = TREE_CHAIN (fnargs);
2927 	}
2928       else
2929 	{
2930 	  SET_DECL_RTL (parm, DECL_RTL (fnargs));
2931 	  set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2932 
2933 	  /* Set MEM_EXPR to the original decl, i.e. to PARM,
2934 	     instead of the copy of decl, i.e. FNARGS.  */
2935 	  if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2936 	    set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2937 	}
2938 
2939       fnargs = TREE_CHAIN (fnargs);
2940     }
2941 }
2942 
2943 /* Assign RTL expressions to the function's parameters.  This may involve
2944    copying them into registers and using those registers as the DECL_RTL.  */
2945 
2946 static void
assign_parms(tree fndecl)2947 assign_parms (tree fndecl)
2948 {
2949   struct assign_parm_data_all all;
2950   tree fnargs, parm;
2951 
2952   current_function_internal_arg_pointer
2953     = targetm.calls.internal_arg_pointer ();
2954 
2955   assign_parms_initialize_all (&all);
2956   fnargs = assign_parms_augmented_arg_list (&all);
2957 
2958   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2959     {
2960       struct assign_parm_data_one data;
2961 
2962       /* Extract the type of PARM; adjust it according to ABI.  */
2963       assign_parm_find_data_types (&all, parm, &data);
2964 
2965       /* Early out for errors and void parameters.  */
2966       if (data.passed_mode == VOIDmode)
2967 	{
2968 	  SET_DECL_RTL (parm, const0_rtx);
2969 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2970 	  continue;
2971 	}
2972 
2973       if (current_function_stdarg && !TREE_CHAIN (parm))
2974 	assign_parms_setup_varargs (&all, &data, false);
2975 
2976       /* Find out where the parameter arrives in this function.  */
2977       assign_parm_find_entry_rtl (&all, &data);
2978 
2979       /* Find out where stack space for this parameter might be.  */
2980       if (assign_parm_is_stack_parm (&all, &data))
2981 	{
2982 	  assign_parm_find_stack_rtl (parm, &data);
2983 	  assign_parm_adjust_entry_rtl (&data);
2984 	}
2985 
2986       /* Record permanently how this parm was passed.  */
2987       set_decl_incoming_rtl (parm, data.entry_parm);
2988 
2989       /* Update info on where next arg arrives in registers.  */
2990       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2991 			    data.passed_type, data.named_arg);
2992 
2993       assign_parm_adjust_stack_rtl (&data);
2994 
2995       if (assign_parm_setup_block_p (&data))
2996 	assign_parm_setup_block (&all, parm, &data);
2997       else if (data.passed_pointer || use_register_for_decl (parm))
2998 	assign_parm_setup_reg (&all, parm, &data);
2999       else
3000 	assign_parm_setup_stack (&all, parm, &data);
3001     }
3002 
3003   if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3004     assign_parms_unsplit_complex (&all, fnargs);
3005 
3006   /* Output all parameter conversion instructions (possibly including calls)
3007      now that all parameters have been copied out of hard registers.  */
3008   emit_insn (all.conversion_insns);
3009 
3010   /* If we are receiving a struct value address as the first argument, set up
3011      the RTL for the function result. As this might require code to convert
3012      the transmitted address to Pmode, we do this here to ensure that possible
3013      preliminary conversions of the address have been emitted already.  */
3014   if (all.function_result_decl)
3015     {
3016       tree result = DECL_RESULT (current_function_decl);
3017       rtx addr = DECL_RTL (all.function_result_decl);
3018       rtx x;
3019 
3020       if (DECL_BY_REFERENCE (result))
3021 	x = addr;
3022       else
3023 	{
3024 	  addr = convert_memory_address (Pmode, addr);
3025 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3026 	  set_mem_attributes (x, result, 1);
3027 	}
3028       SET_DECL_RTL (result, x);
3029     }
3030 
3031   /* We have aligned all the args, so add space for the pretend args.  */
3032   current_function_pretend_args_size = all.pretend_args_size;
3033   all.stack_args_size.constant += all.extra_pretend_bytes;
3034   current_function_args_size = all.stack_args_size.constant;
3035 
3036   /* Adjust function incoming argument size for alignment and
3037      minimum length.  */
3038 
3039 #ifdef REG_PARM_STACK_SPACE
3040   current_function_args_size = MAX (current_function_args_size,
3041 				    REG_PARM_STACK_SPACE (fndecl));
3042 #endif
3043 
3044   current_function_args_size = CEIL_ROUND (current_function_args_size,
3045 					   PARM_BOUNDARY / BITS_PER_UNIT);
3046 
3047 #ifdef ARGS_GROW_DOWNWARD
3048   current_function_arg_offset_rtx
3049     = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3050        : expand_expr (size_diffop (all.stack_args_size.var,
3051 				   size_int (-all.stack_args_size.constant)),
3052 		      NULL_RTX, VOIDmode, 0));
3053 #else
3054   current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3055 #endif
3056 
3057   /* See how many bytes, if any, of its args a function should try to pop
3058      on return.  */
3059 
3060   current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3061 						 current_function_args_size);
3062 
3063   /* For stdarg.h function, save info about
3064      regs and stack space used by the named args.  */
3065 
3066   current_function_args_info = all.args_so_far;
3067 
3068   /* Set the rtx used for the function return value.  Put this in its
3069      own variable so any optimizers that need this information don't have
3070      to include tree.h.  Do this here so it gets done when an inlined
3071      function gets output.  */
3072 
3073   current_function_return_rtx
3074     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3075        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3076 
3077   /* If scalar return value was computed in a pseudo-reg, or was a named
3078      return value that got dumped to the stack, copy that to the hard
3079      return register.  */
3080   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3081     {
3082       tree decl_result = DECL_RESULT (fndecl);
3083       rtx decl_rtl = DECL_RTL (decl_result);
3084 
3085       if (REG_P (decl_rtl)
3086 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3087 	  : DECL_REGISTER (decl_result))
3088 	{
3089 	  rtx real_decl_rtl;
3090 
3091 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3092 							fndecl, true);
3093 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3094 	  /* The delay slot scheduler assumes that current_function_return_rtx
3095 	     holds the hard register containing the return value, not a
3096 	     temporary pseudo.  */
3097 	  current_function_return_rtx = real_decl_rtl;
3098 	}
3099     }
3100 }
3101 
3102 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3103    For all seen types, gimplify their sizes.  */
3104 
3105 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3106 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3107 {
3108   tree t = *tp;
3109 
3110   *walk_subtrees = 0;
3111   if (TYPE_P (t))
3112     {
3113       if (POINTER_TYPE_P (t))
3114 	*walk_subtrees = 1;
3115       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3116 	       && !TYPE_SIZES_GIMPLIFIED (t))
3117 	{
3118 	  gimplify_type_sizes (t, (tree *) data);
3119 	  *walk_subtrees = 1;
3120 	}
3121     }
3122 
3123   return NULL;
3124 }
3125 
3126 /* Gimplify the parameter list for current_function_decl.  This involves
3127    evaluating SAVE_EXPRs of variable sized parameters and generating code
3128    to implement callee-copies reference parameters.  Returns a list of
3129    statements to add to the beginning of the function, or NULL if nothing
3130    to do.  */
3131 
3132 tree
gimplify_parameters(void)3133 gimplify_parameters (void)
3134 {
3135   struct assign_parm_data_all all;
3136   tree fnargs, parm, stmts = NULL;
3137 
3138   assign_parms_initialize_all (&all);
3139   fnargs = assign_parms_augmented_arg_list (&all);
3140 
3141   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3142     {
3143       struct assign_parm_data_one data;
3144 
3145       /* Extract the type of PARM; adjust it according to ABI.  */
3146       assign_parm_find_data_types (&all, parm, &data);
3147 
3148       /* Early out for errors and void parameters.  */
3149       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3150 	continue;
3151 
3152       /* Update info on where next arg arrives in registers.  */
3153       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3154 			    data.passed_type, data.named_arg);
3155 
3156       /* ??? Once upon a time variable_size stuffed parameter list
3157 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3158 	 turned out to be less than manageable in the gimple world.
3159 	 Now we have to hunt them down ourselves.  */
3160       walk_tree_without_duplicates (&data.passed_type,
3161 				    gimplify_parm_type, &stmts);
3162 
3163       if (!TREE_CONSTANT (DECL_SIZE (parm)))
3164 	{
3165 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3166 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3167 	}
3168 
3169       if (data.passed_pointer)
3170 	{
3171           tree type = TREE_TYPE (data.passed_type);
3172 	  if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3173 				       type, data.named_arg))
3174 	    {
3175 	      tree local, t;
3176 
3177 	      /* For constant sized objects, this is trivial; for
3178 		 variable-sized objects, we have to play games.  */
3179 	      if (TREE_CONSTANT (DECL_SIZE (parm)))
3180 		{
3181 		  local = create_tmp_var (type, get_name (parm));
3182 		  DECL_IGNORED_P (local) = 0;
3183 		}
3184 	      else
3185 		{
3186 		  tree ptr_type, addr, args;
3187 
3188 		  ptr_type = build_pointer_type (type);
3189 		  addr = create_tmp_var (ptr_type, get_name (parm));
3190 		  DECL_IGNORED_P (addr) = 0;
3191 		  local = build_fold_indirect_ref (addr);
3192 
3193 		  args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3194 		  t = built_in_decls[BUILT_IN_ALLOCA];
3195 		  t = build_function_call_expr (t, args);
3196 		  t = fold_convert (ptr_type, t);
3197 		  t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3198 		  gimplify_and_add (t, &stmts);
3199 		}
3200 
3201 	      t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3202 	      gimplify_and_add (t, &stmts);
3203 
3204 	      SET_DECL_VALUE_EXPR (parm, local);
3205 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3206 	    }
3207 	}
3208     }
3209 
3210   return stmts;
3211 }
3212 
3213 /* Indicate whether REGNO is an incoming argument to the current function
3214    that was promoted to a wider mode.  If so, return the RTX for the
3215    register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
3216    that REGNO is promoted from and whether the promotion was signed or
3217    unsigned.  */
3218 
3219 rtx
promoted_input_arg(unsigned int regno,enum machine_mode * pmode,int * punsignedp)3220 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3221 {
3222   tree arg;
3223 
3224   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3225        arg = TREE_CHAIN (arg))
3226     if (REG_P (DECL_INCOMING_RTL (arg))
3227 	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
3228 	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3229       {
3230 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3231 	int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3232 
3233 	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3234 	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3235 	    && mode != DECL_MODE (arg))
3236 	  {
3237 	    *pmode = DECL_MODE (arg);
3238 	    *punsignedp = unsignedp;
3239 	    return DECL_INCOMING_RTL (arg);
3240 	  }
3241       }
3242 
3243   return 0;
3244 }
3245 
3246 
3247 /* Compute the size and offset from the start of the stacked arguments for a
3248    parm passed in mode PASSED_MODE and with type TYPE.
3249 
3250    INITIAL_OFFSET_PTR points to the current offset into the stacked
3251    arguments.
3252 
3253    The starting offset and size for this parm are returned in
3254    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3255    nonzero, the offset is that of stack slot, which is returned in
3256    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3257    padding required from the initial offset ptr to the stack slot.
3258 
3259    IN_REGS is nonzero if the argument will be passed in registers.  It will
3260    never be set if REG_PARM_STACK_SPACE is not defined.
3261 
3262    FNDECL is the function in which the argument was defined.
3263 
3264    There are two types of rounding that are done.  The first, controlled by
3265    FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3266    list to be aligned to the specific boundary (in bits).  This rounding
3267    affects the initial and starting offsets, but not the argument size.
3268 
3269    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3270    optionally rounds the size of the parm to PARM_BOUNDARY.  The
3271    initial offset is not affected by this rounding, while the size always
3272    is and the starting offset may be.  */
3273 
3274 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3275     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3276     callers pass in the total size of args so far as
3277     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3278 
3279 void
locate_and_pad_parm(enum machine_mode passed_mode,tree type,int in_regs,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)3280 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3281 		     int partial, tree fndecl ATTRIBUTE_UNUSED,
3282 		     struct args_size *initial_offset_ptr,
3283 		     struct locate_and_pad_arg_data *locate)
3284 {
3285   tree sizetree;
3286   enum direction where_pad;
3287   unsigned int boundary;
3288   int reg_parm_stack_space = 0;
3289   int part_size_in_regs;
3290 
3291 #ifdef REG_PARM_STACK_SPACE
3292   reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3293 
3294   /* If we have found a stack parm before we reach the end of the
3295      area reserved for registers, skip that area.  */
3296   if (! in_regs)
3297     {
3298       if (reg_parm_stack_space > 0)
3299 	{
3300 	  if (initial_offset_ptr->var)
3301 	    {
3302 	      initial_offset_ptr->var
3303 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3304 			      ssize_int (reg_parm_stack_space));
3305 	      initial_offset_ptr->constant = 0;
3306 	    }
3307 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
3308 	    initial_offset_ptr->constant = reg_parm_stack_space;
3309 	}
3310     }
3311 #endif /* REG_PARM_STACK_SPACE */
3312 
3313   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3314 
3315   sizetree
3316     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3317   where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3318   boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3319   locate->where_pad = where_pad;
3320   locate->boundary = boundary;
3321 
3322   /* Remember if the outgoing parameter requires extra alignment on the
3323      calling function side.  */
3324   if (boundary > PREFERRED_STACK_BOUNDARY)
3325     boundary = PREFERRED_STACK_BOUNDARY;
3326   if (cfun->stack_alignment_needed < boundary)
3327     cfun->stack_alignment_needed = boundary;
3328 
3329 #ifdef ARGS_GROW_DOWNWARD
3330   locate->slot_offset.constant = -initial_offset_ptr->constant;
3331   if (initial_offset_ptr->var)
3332     locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3333 					  initial_offset_ptr->var);
3334 
3335   {
3336     tree s2 = sizetree;
3337     if (where_pad != none
3338 	&& (!host_integerp (sizetree, 1)
3339 	    || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3340       s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3341     SUB_PARM_SIZE (locate->slot_offset, s2);
3342   }
3343 
3344   locate->slot_offset.constant += part_size_in_regs;
3345 
3346   if (!in_regs
3347 #ifdef REG_PARM_STACK_SPACE
3348       || REG_PARM_STACK_SPACE (fndecl) > 0
3349 #endif
3350      )
3351     pad_to_arg_alignment (&locate->slot_offset, boundary,
3352 			  &locate->alignment_pad);
3353 
3354   locate->size.constant = (-initial_offset_ptr->constant
3355 			   - locate->slot_offset.constant);
3356   if (initial_offset_ptr->var)
3357     locate->size.var = size_binop (MINUS_EXPR,
3358 				   size_binop (MINUS_EXPR,
3359 					       ssize_int (0),
3360 					       initial_offset_ptr->var),
3361 				   locate->slot_offset.var);
3362 
3363   /* Pad_below needs the pre-rounded size to know how much to pad
3364      below.  */
3365   locate->offset = locate->slot_offset;
3366   if (where_pad == downward)
3367     pad_below (&locate->offset, passed_mode, sizetree);
3368 
3369 #else /* !ARGS_GROW_DOWNWARD */
3370   if (!in_regs
3371 #ifdef REG_PARM_STACK_SPACE
3372       || REG_PARM_STACK_SPACE (fndecl) > 0
3373 #endif
3374       )
3375     pad_to_arg_alignment (initial_offset_ptr, boundary,
3376 			  &locate->alignment_pad);
3377   locate->slot_offset = *initial_offset_ptr;
3378 
3379 #ifdef PUSH_ROUNDING
3380   if (passed_mode != BLKmode)
3381     sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3382 #endif
3383 
3384   /* Pad_below needs the pre-rounded size to know how much to pad below
3385      so this must be done before rounding up.  */
3386   locate->offset = locate->slot_offset;
3387   if (where_pad == downward)
3388     pad_below (&locate->offset, passed_mode, sizetree);
3389 
3390   if (where_pad != none
3391       && (!host_integerp (sizetree, 1)
3392 	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3393     sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3394 
3395   ADD_PARM_SIZE (locate->size, sizetree);
3396 
3397   locate->size.constant -= part_size_in_regs;
3398 #endif /* ARGS_GROW_DOWNWARD */
3399 }
3400 
3401 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3402    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
3403 
3404 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)3405 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3406 		      struct args_size *alignment_pad)
3407 {
3408   tree save_var = NULL_TREE;
3409   HOST_WIDE_INT save_constant = 0;
3410   int boundary_in_bytes = boundary / BITS_PER_UNIT;
3411   HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3412 
3413 #ifdef SPARC_STACK_BOUNDARY_HACK
3414   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3415      the real alignment of %sp.  However, when it does this, the
3416      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
3417   if (SPARC_STACK_BOUNDARY_HACK)
3418     sp_offset = 0;
3419 #endif
3420 
3421   if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3422     {
3423       save_var = offset_ptr->var;
3424       save_constant = offset_ptr->constant;
3425     }
3426 
3427   alignment_pad->var = NULL_TREE;
3428   alignment_pad->constant = 0;
3429 
3430   if (boundary > BITS_PER_UNIT)
3431     {
3432       if (offset_ptr->var)
3433 	{
3434 	  tree sp_offset_tree = ssize_int (sp_offset);
3435 	  tree offset = size_binop (PLUS_EXPR,
3436 				    ARGS_SIZE_TREE (*offset_ptr),
3437 				    sp_offset_tree);
3438 #ifdef ARGS_GROW_DOWNWARD
3439 	  tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3440 #else
3441 	  tree rounded = round_up   (offset, boundary / BITS_PER_UNIT);
3442 #endif
3443 
3444 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3445 	  /* ARGS_SIZE_TREE includes constant term.  */
3446 	  offset_ptr->constant = 0;
3447 	  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3448 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3449 					     save_var);
3450 	}
3451       else
3452 	{
3453 	  offset_ptr->constant = -sp_offset +
3454 #ifdef ARGS_GROW_DOWNWARD
3455 	    FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3456 #else
3457 	    CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3458 #endif
3459 	    if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3460 	      alignment_pad->constant = offset_ptr->constant - save_constant;
3461 	}
3462     }
3463 }
3464 
3465 static void
pad_below(struct args_size * offset_ptr,enum machine_mode passed_mode,tree sizetree)3466 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3467 {
3468   if (passed_mode != BLKmode)
3469     {
3470       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3471 	offset_ptr->constant
3472 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3473 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3474 	      - GET_MODE_SIZE (passed_mode));
3475     }
3476   else
3477     {
3478       if (TREE_CODE (sizetree) != INTEGER_CST
3479 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3480 	{
3481 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
3482 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3483 	  /* Add it in.  */
3484 	  ADD_PARM_SIZE (*offset_ptr, s2);
3485 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
3486 	}
3487     }
3488 }
3489 
3490 /* Walk the tree of blocks describing the binding levels within a function
3491    and warn about variables the might be killed by setjmp or vfork.
3492    This is done after calling flow_analysis and before global_alloc
3493    clobbers the pseudo-regs to hard regs.  */
3494 
3495 void
setjmp_vars_warning(tree block)3496 setjmp_vars_warning (tree block)
3497 {
3498   tree decl, sub;
3499 
3500   for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3501     {
3502       if (TREE_CODE (decl) == VAR_DECL
3503 	  && DECL_RTL_SET_P (decl)
3504 	  && REG_P (DECL_RTL (decl))
3505 	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3506 	warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3507 		 " or %<vfork%>",
3508 		 decl);
3509     }
3510 
3511   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3512     setjmp_vars_warning (sub);
3513 }
3514 
3515 /* Do the appropriate part of setjmp_vars_warning
3516    but for arguments instead of local variables.  */
3517 
3518 void
setjmp_args_warning(void)3519 setjmp_args_warning (void)
3520 {
3521   tree decl;
3522   for (decl = DECL_ARGUMENTS (current_function_decl);
3523        decl; decl = TREE_CHAIN (decl))
3524     if (DECL_RTL (decl) != 0
3525 	&& REG_P (DECL_RTL (decl))
3526 	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3527       warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3528 	       decl);
3529 }
3530 
3531 
3532 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3533    and create duplicate blocks.  */
3534 /* ??? Need an option to either create block fragments or to create
3535    abstract origin duplicates of a source block.  It really depends
3536    on what optimization has been performed.  */
3537 
3538 void
reorder_blocks(void)3539 reorder_blocks (void)
3540 {
3541   tree block = DECL_INITIAL (current_function_decl);
3542   VEC(tree,heap) *block_stack;
3543 
3544   if (block == NULL_TREE)
3545     return;
3546 
3547   block_stack = VEC_alloc (tree, heap, 10);
3548 
3549   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
3550   clear_block_marks (block);
3551 
3552   /* Prune the old trees away, so that they don't get in the way.  */
3553   BLOCK_SUBBLOCKS (block) = NULL_TREE;
3554   BLOCK_CHAIN (block) = NULL_TREE;
3555 
3556   /* Recreate the block tree from the note nesting.  */
3557   reorder_blocks_1 (get_insns (), block, &block_stack);
3558   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3559 
3560   /* Remove deleted blocks from the block fragment chains.  */
3561   reorder_fix_fragments (block);
3562 
3563   VEC_free (tree, heap, block_stack);
3564 }
3565 
3566 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
3567 
3568 void
clear_block_marks(tree block)3569 clear_block_marks (tree block)
3570 {
3571   while (block)
3572     {
3573       TREE_ASM_WRITTEN (block) = 0;
3574       clear_block_marks (BLOCK_SUBBLOCKS (block));
3575       block = BLOCK_CHAIN (block);
3576     }
3577 }
3578 
3579 static void
reorder_blocks_1(rtx insns,tree current_block,VEC (tree,heap)** p_block_stack)3580 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3581 {
3582   rtx insn;
3583 
3584   for (insn = insns; insn; insn = NEXT_INSN (insn))
3585     {
3586       if (NOTE_P (insn))
3587 	{
3588 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3589 	    {
3590 	      tree block = NOTE_BLOCK (insn);
3591 
3592 	      /* If we have seen this block before, that means it now
3593 		 spans multiple address regions.  Create a new fragment.  */
3594 	      if (TREE_ASM_WRITTEN (block))
3595 		{
3596 		  tree new_block = copy_node (block);
3597 		  tree origin;
3598 
3599 		  origin = (BLOCK_FRAGMENT_ORIGIN (block)
3600 			    ? BLOCK_FRAGMENT_ORIGIN (block)
3601 			    : block);
3602 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3603 		  BLOCK_FRAGMENT_CHAIN (new_block)
3604 		    = BLOCK_FRAGMENT_CHAIN (origin);
3605 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3606 
3607 		  NOTE_BLOCK (insn) = new_block;
3608 		  block = new_block;
3609 		}
3610 
3611 	      BLOCK_SUBBLOCKS (block) = 0;
3612 	      TREE_ASM_WRITTEN (block) = 1;
3613 	      /* When there's only one block for the entire function,
3614 		 current_block == block and we mustn't do this, it
3615 		 will cause infinite recursion.  */
3616 	      if (block != current_block)
3617 		{
3618 		  BLOCK_SUPERCONTEXT (block) = current_block;
3619 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3620 		  BLOCK_SUBBLOCKS (current_block) = block;
3621 		  current_block = block;
3622 		}
3623 	      VEC_safe_push (tree, heap, *p_block_stack, block);
3624 	    }
3625 	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3626 	    {
3627 	      NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3628 	      BLOCK_SUBBLOCKS (current_block)
3629 		= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3630 	      current_block = BLOCK_SUPERCONTEXT (current_block);
3631 	    }
3632 	}
3633     }
3634 }
3635 
3636 /* Rationalize BLOCK_FRAGMENT_ORIGIN.  If an origin block no longer
3637    appears in the block tree, select one of the fragments to become
3638    the new origin block.  */
3639 
3640 static void
reorder_fix_fragments(tree block)3641 reorder_fix_fragments (tree block)
3642 {
3643   while (block)
3644     {
3645       tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3646       tree new_origin = NULL_TREE;
3647 
3648       if (dup_origin)
3649 	{
3650 	  if (! TREE_ASM_WRITTEN (dup_origin))
3651 	    {
3652 	      new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3653 
3654 	      /* Find the first of the remaining fragments.  There must
3655 		 be at least one -- the current block.  */
3656 	      while (! TREE_ASM_WRITTEN (new_origin))
3657 		new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3658 	      BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3659 	    }
3660 	}
3661       else if (! dup_origin)
3662 	new_origin = block;
3663 
3664       /* Re-root the rest of the fragments to the new origin.  In the
3665 	 case that DUP_ORIGIN was null, that means BLOCK was the origin
3666 	 of a chain of fragments and we want to remove those fragments
3667 	 that didn't make it to the output.  */
3668       if (new_origin)
3669 	{
3670 	  tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3671 	  tree chain = *pp;
3672 
3673 	  while (chain)
3674 	    {
3675 	      if (TREE_ASM_WRITTEN (chain))
3676 		{
3677 		  BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3678 		  *pp = chain;
3679 		  pp = &BLOCK_FRAGMENT_CHAIN (chain);
3680 		}
3681 	      chain = BLOCK_FRAGMENT_CHAIN (chain);
3682 	    }
3683 	  *pp = NULL_TREE;
3684 	}
3685 
3686       reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3687       block = BLOCK_CHAIN (block);
3688     }
3689 }
3690 
3691 /* Reverse the order of elements in the chain T of blocks,
3692    and return the new head of the chain (old last element).  */
3693 
3694 tree
blocks_nreverse(tree t)3695 blocks_nreverse (tree t)
3696 {
3697   tree prev = 0, decl, next;
3698   for (decl = t; decl; decl = next)
3699     {
3700       next = BLOCK_CHAIN (decl);
3701       BLOCK_CHAIN (decl) = prev;
3702       prev = decl;
3703     }
3704   return prev;
3705 }
3706 
3707 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
3708    non-NULL, list them all into VECTOR, in a depth-first preorder
3709    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
3710    blocks.  */
3711 
3712 static int
all_blocks(tree block,tree * vector)3713 all_blocks (tree block, tree *vector)
3714 {
3715   int n_blocks = 0;
3716 
3717   while (block)
3718     {
3719       TREE_ASM_WRITTEN (block) = 0;
3720 
3721       /* Record this block.  */
3722       if (vector)
3723 	vector[n_blocks] = block;
3724 
3725       ++n_blocks;
3726 
3727       /* Record the subblocks, and their subblocks...  */
3728       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3729 			      vector ? vector + n_blocks : 0);
3730       block = BLOCK_CHAIN (block);
3731     }
3732 
3733   return n_blocks;
3734 }
3735 
3736 /* Return a vector containing all the blocks rooted at BLOCK.  The
3737    number of elements in the vector is stored in N_BLOCKS_P.  The
3738    vector is dynamically allocated; it is the caller's responsibility
3739    to call `free' on the pointer returned.  */
3740 
3741 static tree *
get_block_vector(tree block,int * n_blocks_p)3742 get_block_vector (tree block, int *n_blocks_p)
3743 {
3744   tree *block_vector;
3745 
3746   *n_blocks_p = all_blocks (block, NULL);
3747   block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3748   all_blocks (block, block_vector);
3749 
3750   return block_vector;
3751 }
3752 
3753 static GTY(()) int next_block_index = 2;
3754 
3755 /* Set BLOCK_NUMBER for all the blocks in FN.  */
3756 
3757 void
number_blocks(tree fn)3758 number_blocks (tree fn)
3759 {
3760   int i;
3761   int n_blocks;
3762   tree *block_vector;
3763 
3764   /* For SDB and XCOFF debugging output, we start numbering the blocks
3765      from 1 within each function, rather than keeping a running
3766      count.  */
3767 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3768   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3769     next_block_index = 1;
3770 #endif
3771 
3772   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3773 
3774   /* The top-level BLOCK isn't numbered at all.  */
3775   for (i = 1; i < n_blocks; ++i)
3776     /* We number the blocks from two.  */
3777     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3778 
3779   free (block_vector);
3780 
3781   return;
3782 }
3783 
3784 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
3785 
3786 tree
debug_find_var_in_block_tree(tree var,tree block)3787 debug_find_var_in_block_tree (tree var, tree block)
3788 {
3789   tree t;
3790 
3791   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3792     if (t == var)
3793       return block;
3794 
3795   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3796     {
3797       tree ret = debug_find_var_in_block_tree (var, t);
3798       if (ret)
3799 	return ret;
3800     }
3801 
3802   return NULL_TREE;
3803 }
3804 
3805 /* Allocate a function structure for FNDECL and set its contents
3806    to the defaults.  */
3807 
3808 void
allocate_struct_function(tree fndecl)3809 allocate_struct_function (tree fndecl)
3810 {
3811   tree result;
3812   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3813 
3814   cfun = ggc_alloc_cleared (sizeof (struct function));
3815 
3816   cfun->stack_alignment_needed = STACK_BOUNDARY;
3817   cfun->preferred_stack_boundary = STACK_BOUNDARY;
3818 
3819   current_function_funcdef_no = funcdef_no++;
3820 
3821   cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3822 
3823   init_eh_for_function ();
3824 
3825   lang_hooks.function.init (cfun);
3826   if (init_machine_status)
3827     cfun->machine = (*init_machine_status) ();
3828 
3829   if (fndecl == NULL)
3830     return;
3831 
3832   DECL_STRUCT_FUNCTION (fndecl) = cfun;
3833   cfun->decl = fndecl;
3834 
3835   result = DECL_RESULT (fndecl);
3836   if (aggregate_value_p (result, fndecl))
3837     {
3838 #ifdef PCC_STATIC_STRUCT_RETURN
3839       current_function_returns_pcc_struct = 1;
3840 #endif
3841       current_function_returns_struct = 1;
3842     }
3843 
3844   current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3845 
3846   current_function_stdarg
3847     = (fntype
3848        && TYPE_ARG_TYPES (fntype) != 0
3849        && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3850 	   != void_type_node));
3851 
3852   /* Assume all registers in stdarg functions need to be saved.  */
3853   cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3854   cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3855 }
3856 
3857 /* Reset cfun, and other non-struct-function variables to defaults as
3858    appropriate for emitting rtl at the start of a function.  */
3859 
3860 static void
prepare_function_start(tree fndecl)3861 prepare_function_start (tree fndecl)
3862 {
3863   if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3864     cfun = DECL_STRUCT_FUNCTION (fndecl);
3865   else
3866     allocate_struct_function (fndecl);
3867   init_emit ();
3868   init_varasm_status (cfun);
3869   init_expr ();
3870 
3871   cse_not_expected = ! optimize;
3872 
3873   /* Caller save not needed yet.  */
3874   caller_save_needed = 0;
3875 
3876   /* We haven't done register allocation yet.  */
3877   reg_renumber = 0;
3878 
3879   /* Indicate that we have not instantiated virtual registers yet.  */
3880   virtuals_instantiated = 0;
3881 
3882   /* Indicate that we want CONCATs now.  */
3883   generating_concat_p = 1;
3884 
3885   /* Indicate we have no need of a frame pointer yet.  */
3886   frame_pointer_needed = 0;
3887 }
3888 
3889 /* Initialize the rtl expansion mechanism so that we can do simple things
3890    like generate sequences.  This is used to provide a context during global
3891    initialization of some passes.  */
3892 void
init_dummy_function_start(void)3893 init_dummy_function_start (void)
3894 {
3895   prepare_function_start (NULL);
3896 }
3897 
3898 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3899    and initialize static variables for generating RTL for the statements
3900    of the function.  */
3901 
3902 void
init_function_start(tree subr)3903 init_function_start (tree subr)
3904 {
3905   prepare_function_start (subr);
3906 
3907   /* Prevent ever trying to delete the first instruction of a
3908      function.  Also tell final how to output a linenum before the
3909      function prologue.  Note linenums could be missing, e.g. when
3910      compiling a Java .class file.  */
3911   if (! DECL_IS_BUILTIN (subr))
3912     emit_line_note (DECL_SOURCE_LOCATION (subr));
3913 
3914   /* Make sure first insn is a note even if we don't want linenums.
3915      This makes sure the first insn will never be deleted.
3916      Also, final expects a note to appear there.  */
3917   emit_note (NOTE_INSN_DELETED);
3918 
3919   /* Warn if this value is an aggregate type,
3920      regardless of which calling convention we are using for it.  */
3921   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3922     warning (OPT_Waggregate_return, "function returns an aggregate");
3923 }
3924 
3925 /* Make sure all values used by the optimization passes have sane
3926    defaults.  */
3927 void
init_function_for_compilation(void)3928 init_function_for_compilation (void)
3929 {
3930   reg_renumber = 0;
3931 
3932   /* No prologue/epilogue insns yet.  Make sure that these vectors are
3933      empty.  */
3934   gcc_assert (VEC_length (int, prologue) == 0);
3935   gcc_assert (VEC_length (int, epilogue) == 0);
3936   gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3937 }
3938 
3939 struct tree_opt_pass pass_init_function =
3940 {
3941   NULL,                                 /* name */
3942   NULL,                                 /* gate */
3943   init_function_for_compilation,        /* execute */
3944   NULL,                                 /* sub */
3945   NULL,                                 /* next */
3946   0,                                    /* static_pass_number */
3947   0,                                    /* tv_id */
3948   0,                                    /* properties_required */
3949   0,                                    /* properties_provided */
3950   0,                                    /* properties_destroyed */
3951   0,                                    /* todo_flags_start */
3952   0,                                    /* todo_flags_finish */
3953   0                                     /* letter */
3954 };
3955 
3956 
3957 void
expand_main_function(void)3958 expand_main_function (void)
3959 {
3960 #if (defined(INVOKE__main)				\
3961      || (!defined(HAS_INIT_SECTION)			\
3962 	 && !defined(INIT_SECTION_ASM_OP)		\
3963 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3964   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3965 #endif
3966 }
3967 
3968 /* Expand code to initialize the stack_protect_guard.  This is invoked at
3969    the beginning of a function to be protected.  */
3970 
3971 #ifndef HAVE_stack_protect_set
3972 # define HAVE_stack_protect_set		0
3973 # define gen_stack_protect_set(x,y)	(gcc_unreachable (), NULL_RTX)
3974 #endif
3975 
3976 void
stack_protect_prologue(void)3977 stack_protect_prologue (void)
3978 {
3979   tree guard_decl = targetm.stack_protect_guard ();
3980   rtx x, y;
3981 
3982   /* Avoid expand_expr here, because we don't want guard_decl pulled
3983      into registers unless absolutely necessary.  And we know that
3984      cfun->stack_protect_guard is a local stack slot, so this skips
3985      all the fluff.  */
3986   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3987   y = validize_mem (DECL_RTL (guard_decl));
3988 
3989   /* Allow the target to copy from Y to X without leaking Y into a
3990      register.  */
3991   if (HAVE_stack_protect_set)
3992     {
3993       rtx insn = gen_stack_protect_set (x, y);
3994       if (insn)
3995 	{
3996 	  emit_insn (insn);
3997 	  return;
3998 	}
3999     }
4000 
4001   /* Otherwise do a straight move.  */
4002   emit_move_insn (x, y);
4003 }
4004 
4005 /* Expand code to verify the stack_protect_guard.  This is invoked at
4006    the end of a function to be protected.  */
4007 
4008 #ifndef HAVE_stack_protect_test
4009 # define HAVE_stack_protect_test		0
4010 # define gen_stack_protect_test(x, y, z)	(gcc_unreachable (), NULL_RTX)
4011 #endif
4012 
4013 void
stack_protect_epilogue(void)4014 stack_protect_epilogue (void)
4015 {
4016   tree guard_decl = targetm.stack_protect_guard ();
4017   rtx label = gen_label_rtx ();
4018   rtx x, y, tmp;
4019 
4020   /* Avoid expand_expr here, because we don't want guard_decl pulled
4021      into registers unless absolutely necessary.  And we know that
4022      cfun->stack_protect_guard is a local stack slot, so this skips
4023      all the fluff.  */
4024   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4025   y = validize_mem (DECL_RTL (guard_decl));
4026 
4027   /* Allow the target to compare Y with X without leaking either into
4028      a register.  */
4029   switch (HAVE_stack_protect_test != 0)
4030     {
4031     case 1:
4032       tmp = gen_stack_protect_test (x, y, label);
4033       if (tmp)
4034 	{
4035 	  emit_insn (tmp);
4036 	  break;
4037 	}
4038       /* FALLTHRU */
4039 
4040     default:
4041       emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4042       break;
4043     }
4044 
4045   /* The noreturn predictor has been moved to the tree level.  The rtl-level
4046      predictors estimate this branch about 20%, which isn't enough to get
4047      things moved out of line.  Since this is the only extant case of adding
4048      a noreturn function at the rtl level, it doesn't seem worth doing ought
4049      except adding the prediction by hand.  */
4050   tmp = get_last_insn ();
4051   if (JUMP_P (tmp))
4052     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4053 
4054   expand_expr_stmt (targetm.stack_protect_fail ());
4055   emit_label (label);
4056 }
4057 
4058 /* Start the RTL for a new function, and set variables used for
4059    emitting RTL.
4060    SUBR is the FUNCTION_DECL node.
4061    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4062    the function's parameters, which must be run at any return statement.  */
4063 
4064 void
expand_function_start(tree subr)4065 expand_function_start (tree subr)
4066 {
4067   /* Make sure volatile mem refs aren't considered
4068      valid operands of arithmetic insns.  */
4069   init_recog_no_volatile ();
4070 
4071   current_function_profile
4072     = (profile_flag
4073        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4074 
4075   current_function_limit_stack
4076     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4077 
4078   /* Make the label for return statements to jump to.  Do not special
4079      case machines with special return instructions -- they will be
4080      handled later during jump, ifcvt, or epilogue creation.  */
4081   return_label = gen_label_rtx ();
4082 
4083   /* Initialize rtx used to return the value.  */
4084   /* Do this before assign_parms so that we copy the struct value address
4085      before any library calls that assign parms might generate.  */
4086 
4087   /* Decide whether to return the value in memory or in a register.  */
4088   if (aggregate_value_p (DECL_RESULT (subr), subr))
4089     {
4090       /* Returning something that won't go in a register.  */
4091       rtx value_address = 0;
4092 
4093 #ifdef PCC_STATIC_STRUCT_RETURN
4094       if (current_function_returns_pcc_struct)
4095 	{
4096 	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4097 	  value_address = assemble_static_space (size);
4098 	}
4099       else
4100 #endif
4101 	{
4102 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4103 	  /* Expect to be passed the address of a place to store the value.
4104 	     If it is passed as an argument, assign_parms will take care of
4105 	     it.  */
4106 	  if (sv)
4107 	    {
4108 	      value_address = gen_reg_rtx (Pmode);
4109 	      emit_move_insn (value_address, sv);
4110 	    }
4111 	}
4112       if (value_address)
4113 	{
4114 	  rtx x = value_address;
4115 	  if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4116 	    {
4117 	      x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4118 	      set_mem_attributes (x, DECL_RESULT (subr), 1);
4119 	    }
4120 	  SET_DECL_RTL (DECL_RESULT (subr), x);
4121 	}
4122     }
4123   else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4124     /* If return mode is void, this decl rtl should not be used.  */
4125     SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4126   else
4127     {
4128       /* Compute the return values into a pseudo reg, which we will copy
4129 	 into the true return register after the cleanups are done.  */
4130       tree return_type = TREE_TYPE (DECL_RESULT (subr));
4131       if (TYPE_MODE (return_type) != BLKmode
4132 	  && targetm.calls.return_in_msb (return_type))
4133 	/* expand_function_end will insert the appropriate padding in
4134 	   this case.  Use the return value's natural (unpadded) mode
4135 	   within the function proper.  */
4136 	SET_DECL_RTL (DECL_RESULT (subr),
4137 		      gen_reg_rtx (TYPE_MODE (return_type)));
4138       else
4139 	{
4140 	  /* In order to figure out what mode to use for the pseudo, we
4141 	     figure out what the mode of the eventual return register will
4142 	     actually be, and use that.  */
4143 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4144 
4145 	  /* Structures that are returned in registers are not
4146 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
4147 	  if (REG_P (hard_reg))
4148 	    SET_DECL_RTL (DECL_RESULT (subr),
4149 			  gen_reg_rtx (GET_MODE (hard_reg)));
4150 	  else
4151 	    {
4152 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4153 	      SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4154 	    }
4155 	}
4156 
4157       /* Set DECL_REGISTER flag so that expand_function_end will copy the
4158 	 result to the real return register(s).  */
4159       DECL_REGISTER (DECL_RESULT (subr)) = 1;
4160     }
4161 
4162   /* Initialize rtx for parameters and local variables.
4163      In some cases this requires emitting insns.  */
4164   assign_parms (subr);
4165 
4166   /* If function gets a static chain arg, store it.  */
4167   if (cfun->static_chain_decl)
4168     {
4169       tree parm = cfun->static_chain_decl;
4170       rtx local = gen_reg_rtx (Pmode);
4171 
4172       set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4173       SET_DECL_RTL (parm, local);
4174       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4175 
4176       emit_move_insn (local, static_chain_incoming_rtx);
4177     }
4178 
4179   /* If the function receives a non-local goto, then store the
4180      bits we need to restore the frame pointer.  */
4181   if (cfun->nonlocal_goto_save_area)
4182     {
4183       tree t_save;
4184       rtx r_save;
4185 
4186       /* ??? We need to do this save early.  Unfortunately here is
4187 	 before the frame variable gets declared.  Help out...  */
4188       expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4189 
4190       t_save = build4 (ARRAY_REF, ptr_type_node,
4191 		       cfun->nonlocal_goto_save_area,
4192 		       integer_zero_node, NULL_TREE, NULL_TREE);
4193       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4194       r_save = convert_memory_address (Pmode, r_save);
4195 
4196       emit_move_insn (r_save, virtual_stack_vars_rtx);
4197       update_nonlocal_goto_save_area ();
4198     }
4199 
4200   /* The following was moved from init_function_start.
4201      The move is supposed to make sdb output more accurate.  */
4202   /* Indicate the beginning of the function body,
4203      as opposed to parm setup.  */
4204   emit_note (NOTE_INSN_FUNCTION_BEG);
4205 
4206   if (!NOTE_P (get_last_insn ()))
4207     emit_note (NOTE_INSN_DELETED);
4208   parm_birth_insn = get_last_insn ();
4209 
4210   if (current_function_profile)
4211     {
4212 #ifdef PROFILE_HOOK
4213       PROFILE_HOOK (current_function_funcdef_no);
4214 #endif
4215     }
4216 
4217   /* After the display initializations is where the tail-recursion label
4218      should go, if we end up needing one.   Ensure we have a NOTE here
4219      since some things (like trampolines) get placed before this.  */
4220   tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4221 
4222   /* Make sure there is a line number after the function entry setup code.  */
4223   force_next_line_note ();
4224 }
4225 
4226 /* Undo the effects of init_dummy_function_start.  */
4227 void
expand_dummy_function_end(void)4228 expand_dummy_function_end (void)
4229 {
4230   /* End any sequences that failed to be closed due to syntax errors.  */
4231   while (in_sequence_p ())
4232     end_sequence ();
4233 
4234   /* Outside function body, can't compute type's actual size
4235      until next function's body starts.  */
4236 
4237   free_after_parsing (cfun);
4238   free_after_compilation (cfun);
4239   cfun = 0;
4240 }
4241 
4242 /* Call DOIT for each hard register used as a return value from
4243    the current function.  */
4244 
4245 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)4246 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4247 {
4248   rtx outgoing = current_function_return_rtx;
4249 
4250   if (! outgoing)
4251     return;
4252 
4253   if (REG_P (outgoing))
4254     (*doit) (outgoing, arg);
4255   else if (GET_CODE (outgoing) == PARALLEL)
4256     {
4257       int i;
4258 
4259       for (i = 0; i < XVECLEN (outgoing, 0); i++)
4260 	{
4261 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4262 
4263 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4264 	    (*doit) (x, arg);
4265 	}
4266     }
4267 }
4268 
4269 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4270 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4271 {
4272   emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4273 }
4274 
4275 void
clobber_return_register(void)4276 clobber_return_register (void)
4277 {
4278   diddle_return_value (do_clobber_return_reg, NULL);
4279 
4280   /* In case we do use pseudo to return value, clobber it too.  */
4281   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4282     {
4283       tree decl_result = DECL_RESULT (current_function_decl);
4284       rtx decl_rtl = DECL_RTL (decl_result);
4285       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4286 	{
4287 	  do_clobber_return_reg (decl_rtl, NULL);
4288 	}
4289     }
4290 }
4291 
4292 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4293 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4294 {
4295   emit_insn (gen_rtx_USE (VOIDmode, reg));
4296 }
4297 
4298 void
use_return_register(void)4299 use_return_register (void)
4300 {
4301   diddle_return_value (do_use_return_reg, NULL);
4302 }
4303 
4304 /* Possibly warn about unused parameters.  */
4305 void
do_warn_unused_parameter(tree fn)4306 do_warn_unused_parameter (tree fn)
4307 {
4308   tree decl;
4309 
4310   for (decl = DECL_ARGUMENTS (fn);
4311        decl; decl = TREE_CHAIN (decl))
4312     if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4313 	&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4314       warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4315 }
4316 
4317 static GTY(()) rtx initial_trampoline;
4318 
4319 /* Generate RTL for the end of the current function.  */
4320 
4321 void
expand_function_end(void)4322 expand_function_end (void)
4323 {
4324   rtx clobber_after;
4325 
4326   /* If arg_pointer_save_area was referenced only from a nested
4327      function, we will not have initialized it yet.  Do that now.  */
4328   if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4329     get_arg_pointer_save_area (cfun);
4330 
4331   /* If we are doing stack checking and this function makes calls,
4332      do a stack probe at the start of the function to ensure we have enough
4333      space for another stack frame.  */
4334   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4335     {
4336       rtx insn, seq;
4337 
4338       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4339 	if (CALL_P (insn))
4340 	  {
4341 	    start_sequence ();
4342 	    probe_stack_range (STACK_CHECK_PROTECT,
4343 			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4344 	    seq = get_insns ();
4345 	    end_sequence ();
4346 	    emit_insn_before (seq, tail_recursion_reentry);
4347 	    break;
4348 	  }
4349     }
4350 
4351   /* Possibly warn about unused parameters.
4352      When frontend does unit-at-a-time, the warning is already
4353      issued at finalization time.  */
4354   if (warn_unused_parameter
4355       && !lang_hooks.callgraph.expand_function)
4356     do_warn_unused_parameter (current_function_decl);
4357 
4358   /* End any sequences that failed to be closed due to syntax errors.  */
4359   while (in_sequence_p ())
4360     end_sequence ();
4361 
4362   clear_pending_stack_adjust ();
4363   do_pending_stack_adjust ();
4364 
4365   /* Mark the end of the function body.
4366      If control reaches this insn, the function can drop through
4367      without returning a value.  */
4368   emit_note (NOTE_INSN_FUNCTION_END);
4369 
4370   /* Must mark the last line number note in the function, so that the test
4371      coverage code can avoid counting the last line twice.  This just tells
4372      the code to ignore the immediately following line note, since there
4373      already exists a copy of this note somewhere above.  This line number
4374      note is still needed for debugging though, so we can't delete it.  */
4375   if (flag_test_coverage)
4376     emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4377 
4378   /* Output a linenumber for the end of the function.
4379      SDB depends on this.  */
4380   force_next_line_note ();
4381   emit_line_note (input_location);
4382 
4383   /* Before the return label (if any), clobber the return
4384      registers so that they are not propagated live to the rest of
4385      the function.  This can only happen with functions that drop
4386      through; if there had been a return statement, there would
4387      have either been a return rtx, or a jump to the return label.
4388 
4389      We delay actual code generation after the current_function_value_rtx
4390      is computed.  */
4391   clobber_after = get_last_insn ();
4392 
4393   /* Output the label for the actual return from the function.  */
4394   emit_label (return_label);
4395 
4396   if (USING_SJLJ_EXCEPTIONS)
4397     {
4398       /* Let except.c know where it should emit the call to unregister
4399 	 the function context for sjlj exceptions.  */
4400       if (flag_exceptions)
4401 	sjlj_emit_function_exit_after (get_last_insn ());
4402     }
4403   else
4404     {
4405       /* @@@ This is a kludge.  We want to ensure that instructions that
4406 	 may trap are not moved into the epilogue by scheduling, because
4407 	 we don't always emit unwind information for the epilogue.
4408 	 However, not all machine descriptions define a blockage insn, so
4409 	 emit an ASM_INPUT to act as one.  */
4410       if (flag_non_call_exceptions)
4411 	emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4412     }
4413 
4414   /* If this is an implementation of throw, do what's necessary to
4415      communicate between __builtin_eh_return and the epilogue.  */
4416   expand_eh_return ();
4417 
4418   /* If scalar return value was computed in a pseudo-reg, or was a named
4419      return value that got dumped to the stack, copy that to the hard
4420      return register.  */
4421   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4422     {
4423       tree decl_result = DECL_RESULT (current_function_decl);
4424       rtx decl_rtl = DECL_RTL (decl_result);
4425 
4426       if (REG_P (decl_rtl)
4427 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4428 	  : DECL_REGISTER (decl_result))
4429 	{
4430 	  rtx real_decl_rtl = current_function_return_rtx;
4431 
4432 	  /* This should be set in assign_parms.  */
4433 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4434 
4435 	  /* If this is a BLKmode structure being returned in registers,
4436 	     then use the mode computed in expand_return.  Note that if
4437 	     decl_rtl is memory, then its mode may have been changed,
4438 	     but that current_function_return_rtx has not.  */
4439 	  if (GET_MODE (real_decl_rtl) == BLKmode)
4440 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4441 
4442 	  /* If a non-BLKmode return value should be padded at the least
4443 	     significant end of the register, shift it left by the appropriate
4444 	     amount.  BLKmode results are handled using the group load/store
4445 	     machinery.  */
4446 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4447 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4448 	    {
4449 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4450 					   REGNO (real_decl_rtl)),
4451 			      decl_rtl);
4452 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4453 	    }
4454 	  /* If a named return value dumped decl_return to memory, then
4455 	     we may need to re-do the PROMOTE_MODE signed/unsigned
4456 	     extension.  */
4457 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4458 	    {
4459 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4460 
4461 	      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4462 		promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4463 			      &unsignedp, 1);
4464 
4465 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
4466 	    }
4467 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
4468 	    {
4469 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
4470 		 move the result to the real return registers.  Otherwise, do
4471 		 a group load from decl_rtl for a named return.  */
4472 	      if (GET_CODE (decl_rtl) == PARALLEL)
4473 		emit_group_move (real_decl_rtl, decl_rtl);
4474 	      else
4475 		emit_group_load (real_decl_rtl, decl_rtl,
4476 				 TREE_TYPE (decl_result),
4477 				 int_size_in_bytes (TREE_TYPE (decl_result)));
4478 	    }
4479 	  /* In the case of complex integer modes smaller than a word, we'll
4480 	     need to generate some non-trivial bitfield insertions.  Do that
4481 	     on a pseudo and not the hard register.  */
4482 	  else if (GET_CODE (decl_rtl) == CONCAT
4483 		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4484 		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4485 	    {
4486 	      int old_generating_concat_p;
4487 	      rtx tmp;
4488 
4489 	      old_generating_concat_p = generating_concat_p;
4490 	      generating_concat_p = 0;
4491 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4492 	      generating_concat_p = old_generating_concat_p;
4493 
4494 	      emit_move_insn (tmp, decl_rtl);
4495 	      emit_move_insn (real_decl_rtl, tmp);
4496 	    }
4497 	  else
4498 	    emit_move_insn (real_decl_rtl, decl_rtl);
4499 	}
4500     }
4501 
4502   /* If returning a structure, arrange to return the address of the value
4503      in a place where debuggers expect to find it.
4504 
4505      If returning a structure PCC style,
4506      the caller also depends on this value.
4507      And current_function_returns_pcc_struct is not necessarily set.  */
4508   if (current_function_returns_struct
4509       || current_function_returns_pcc_struct)
4510     {
4511       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4512       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4513       rtx outgoing;
4514 
4515       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4516 	type = TREE_TYPE (type);
4517       else
4518 	value_address = XEXP (value_address, 0);
4519 
4520       outgoing = targetm.calls.function_value (build_pointer_type (type),
4521 					       current_function_decl, true);
4522 
4523       /* Mark this as a function return value so integrate will delete the
4524 	 assignment and USE below when inlining this function.  */
4525       REG_FUNCTION_VALUE_P (outgoing) = 1;
4526 
4527       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
4528       value_address = convert_memory_address (GET_MODE (outgoing),
4529 					      value_address);
4530 
4531       emit_move_insn (outgoing, value_address);
4532 
4533       /* Show return register used to hold result (in this case the address
4534 	 of the result.  */
4535       current_function_return_rtx = outgoing;
4536     }
4537 
4538   /* Emit the actual code to clobber return register.  */
4539   {
4540     rtx seq;
4541 
4542     start_sequence ();
4543     clobber_return_register ();
4544     expand_naked_return ();
4545     seq = get_insns ();
4546     end_sequence ();
4547 
4548     emit_insn_after (seq, clobber_after);
4549   }
4550 
4551   /* Output the label for the naked return from the function.  */
4552   emit_label (naked_return_label);
4553 
4554   /* If stack protection is enabled for this function, check the guard.  */
4555   if (cfun->stack_protect_guard)
4556     stack_protect_epilogue ();
4557 
4558   /* If we had calls to alloca, and this machine needs
4559      an accurate stack pointer to exit the function,
4560      insert some code to save and restore the stack pointer.  */
4561   if (! EXIT_IGNORE_STACK
4562       && current_function_calls_alloca)
4563     {
4564       rtx tem = 0;
4565 
4566       emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4567       emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4568     }
4569 
4570   /* ??? This should no longer be necessary since stupid is no longer with
4571      us, but there are some parts of the compiler (eg reload_combine, and
4572      sh mach_dep_reorg) that still try and compute their own lifetime info
4573      instead of using the general framework.  */
4574   use_return_register ();
4575 }
4576 
4577 rtx
get_arg_pointer_save_area(struct function * f)4578 get_arg_pointer_save_area (struct function *f)
4579 {
4580   rtx ret = f->x_arg_pointer_save_area;
4581 
4582   if (! ret)
4583     {
4584       ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4585       f->x_arg_pointer_save_area = ret;
4586     }
4587 
4588   if (f == cfun && ! f->arg_pointer_save_area_init)
4589     {
4590       rtx seq;
4591 
4592       /* Save the arg pointer at the beginning of the function.  The
4593 	 generated stack slot may not be a valid memory address, so we
4594 	 have to check it and fix it if necessary.  */
4595       start_sequence ();
4596       emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4597       seq = get_insns ();
4598       end_sequence ();
4599 
4600       push_topmost_sequence ();
4601       emit_insn_after (seq, entry_of_function ());
4602       pop_topmost_sequence ();
4603     }
4604 
4605   return ret;
4606 }
4607 
4608 /* Extend a vector that records the INSN_UIDs of INSNS
4609    (a list of one or more insns).  */
4610 
4611 static void
record_insns(rtx insns,VEC (int,heap)** vecp)4612 record_insns (rtx insns, VEC(int,heap) **vecp)
4613 {
4614   rtx tmp;
4615 
4616   for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4617     VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4618 }
4619 
4620 /* Set the locator of the insn chain starting at INSN to LOC.  */
4621 static void
set_insn_locators(rtx insn,int loc)4622 set_insn_locators (rtx insn, int loc)
4623 {
4624   while (insn != NULL_RTX)
4625     {
4626       if (INSN_P (insn))
4627 	INSN_LOCATOR (insn) = loc;
4628       insn = NEXT_INSN (insn);
4629     }
4630 }
4631 
4632 /* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
4633    be running after reorg, SEQUENCE rtl is possible.  */
4634 
4635 static int
contains(rtx insn,VEC (int,heap)** vec)4636 contains (rtx insn, VEC(int,heap) **vec)
4637 {
4638   int i, j;
4639 
4640   if (NONJUMP_INSN_P (insn)
4641       && GET_CODE (PATTERN (insn)) == SEQUENCE)
4642     {
4643       int count = 0;
4644       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4645 	for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4646 	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4647 	      == VEC_index (int, *vec, j))
4648 	    count++;
4649       return count;
4650     }
4651   else
4652     {
4653       for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4654 	if (INSN_UID (insn) == VEC_index (int, *vec, j))
4655 	  return 1;
4656     }
4657   return 0;
4658 }
4659 
4660 int
prologue_epilogue_contains(rtx insn)4661 prologue_epilogue_contains (rtx insn)
4662 {
4663   if (contains (insn, &prologue))
4664     return 1;
4665   if (contains (insn, &epilogue))
4666     return 1;
4667   return 0;
4668 }
4669 
4670 int
sibcall_epilogue_contains(rtx insn)4671 sibcall_epilogue_contains (rtx insn)
4672 {
4673   if (sibcall_epilogue)
4674     return contains (insn, &sibcall_epilogue);
4675   return 0;
4676 }
4677 
4678 #ifdef HAVE_return
4679 /* Insert gen_return at the end of block BB.  This also means updating
4680    block_for_insn appropriately.  */
4681 
4682 static void
emit_return_into_block(basic_block bb,rtx line_note)4683 emit_return_into_block (basic_block bb, rtx line_note)
4684 {
4685   emit_jump_insn_after (gen_return (), BB_END (bb));
4686   if (line_note)
4687     emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4688 }
4689 #endif /* HAVE_return */
4690 
4691 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4692 
4693 /* These functions convert the epilogue into a variant that does not
4694    modify the stack pointer.  This is used in cases where a function
4695    returns an object whose size is not known until it is computed.
4696    The called function leaves the object on the stack, leaves the
4697    stack depressed, and returns a pointer to the object.
4698 
4699    What we need to do is track all modifications and references to the
4700    stack pointer, deleting the modifications and changing the
4701    references to point to the location the stack pointer would have
4702    pointed to had the modifications taken place.
4703 
4704    These functions need to be portable so we need to make as few
4705    assumptions about the epilogue as we can.  However, the epilogue
4706    basically contains three things: instructions to reset the stack
4707    pointer, instructions to reload registers, possibly including the
4708    frame pointer, and an instruction to return to the caller.
4709 
4710    We must be sure of what a relevant epilogue insn is doing.  We also
4711    make no attempt to validate the insns we make since if they are
4712    invalid, we probably can't do anything valid.  The intent is that
4713    these routines get "smarter" as more and more machines start to use
4714    them and they try operating on different epilogues.
4715 
4716    We use the following structure to track what the part of the
4717    epilogue that we've already processed has done.  We keep two copies
4718    of the SP equivalence, one for use during the insn we are
4719    processing and one for use in the next insn.  The difference is
4720    because one part of a PARALLEL may adjust SP and the other may use
4721    it.  */
4722 
4723 struct epi_info
4724 {
4725   rtx sp_equiv_reg;		/* REG that SP is set from, perhaps SP.  */
4726   HOST_WIDE_INT sp_offset;	/* Offset from SP_EQUIV_REG of present SP.  */
4727   rtx new_sp_equiv_reg;		/* REG to be used at end of insn.  */
4728   HOST_WIDE_INT new_sp_offset;	/* Offset to be used at end of insn.  */
4729   rtx equiv_reg_src;		/* If nonzero, the value that SP_EQUIV_REG
4730 				   should be set to once we no longer need
4731 				   its value.  */
4732   rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4733 					     for registers.  */
4734 };
4735 
4736 static void handle_epilogue_set (rtx, struct epi_info *);
4737 static void update_epilogue_consts (rtx, rtx, void *);
4738 static void emit_equiv_load (struct epi_info *);
4739 
4740 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4741    no modifications to the stack pointer.  Return the new list of insns.  */
4742 
4743 static rtx
keep_stack_depressed(rtx insns)4744 keep_stack_depressed (rtx insns)
4745 {
4746   int j;
4747   struct epi_info info;
4748   rtx insn, next;
4749 
4750   /* If the epilogue is just a single instruction, it must be OK as is.  */
4751   if (NEXT_INSN (insns) == NULL_RTX)
4752     return insns;
4753 
4754   /* Otherwise, start a sequence, initialize the information we have, and
4755      process all the insns we were given.  */
4756   start_sequence ();
4757 
4758   info.sp_equiv_reg = stack_pointer_rtx;
4759   info.sp_offset = 0;
4760   info.equiv_reg_src = 0;
4761 
4762   for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4763     info.const_equiv[j] = 0;
4764 
4765   insn = insns;
4766   next = NULL_RTX;
4767   while (insn != NULL_RTX)
4768     {
4769       next = NEXT_INSN (insn);
4770 
4771       if (!INSN_P (insn))
4772 	{
4773 	  add_insn (insn);
4774 	  insn = next;
4775 	  continue;
4776 	}
4777 
4778       /* If this insn references the register that SP is equivalent to and
4779 	 we have a pending load to that register, we must force out the load
4780 	 first and then indicate we no longer know what SP's equivalent is.  */
4781       if (info.equiv_reg_src != 0
4782 	  && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4783 	{
4784 	  emit_equiv_load (&info);
4785 	  info.sp_equiv_reg = 0;
4786 	}
4787 
4788       info.new_sp_equiv_reg = info.sp_equiv_reg;
4789       info.new_sp_offset = info.sp_offset;
4790 
4791       /* If this is a (RETURN) and the return address is on the stack,
4792 	 update the address and change to an indirect jump.  */
4793       if (GET_CODE (PATTERN (insn)) == RETURN
4794 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4795 	      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4796 	{
4797 	  rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4798 	  rtx base = 0;
4799 	  HOST_WIDE_INT offset = 0;
4800 	  rtx jump_insn, jump_set;
4801 
4802 	  /* If the return address is in a register, we can emit the insn
4803 	     unchanged.  Otherwise, it must be a MEM and we see what the
4804 	     base register and offset are.  In any case, we have to emit any
4805 	     pending load to the equivalent reg of SP, if any.  */
4806 	  if (REG_P (retaddr))
4807 	    {
4808 	      emit_equiv_load (&info);
4809 	      add_insn (insn);
4810 	      insn = next;
4811 	      continue;
4812 	    }
4813 	  else
4814 	    {
4815 	      rtx ret_ptr;
4816 	      gcc_assert (MEM_P (retaddr));
4817 
4818 	      ret_ptr = XEXP (retaddr, 0);
4819 
4820 	      if (REG_P (ret_ptr))
4821 		{
4822 		  base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4823 		  offset = 0;
4824 		}
4825 	      else
4826 		{
4827 		  gcc_assert (GET_CODE (ret_ptr) == PLUS
4828 			      && REG_P (XEXP (ret_ptr, 0))
4829 			      && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4830 		  base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4831 		  offset = INTVAL (XEXP (ret_ptr, 1));
4832 		}
4833 	    }
4834 
4835 	  /* If the base of the location containing the return pointer
4836 	     is SP, we must update it with the replacement address.  Otherwise,
4837 	     just build the necessary MEM.  */
4838 	  retaddr = plus_constant (base, offset);
4839 	  if (base == stack_pointer_rtx)
4840 	    retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4841 					    plus_constant (info.sp_equiv_reg,
4842 							   info.sp_offset));
4843 
4844 	  retaddr = gen_rtx_MEM (Pmode, retaddr);
4845 	  MEM_NOTRAP_P (retaddr) = 1;
4846 
4847 	  /* If there is a pending load to the equivalent register for SP
4848 	     and we reference that register, we must load our address into
4849 	     a scratch register and then do that load.  */
4850 	  if (info.equiv_reg_src
4851 	      && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4852 	    {
4853 	      unsigned int regno;
4854 	      rtx reg;
4855 
4856 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4857 		if (HARD_REGNO_MODE_OK (regno, Pmode)
4858 		    && !fixed_regs[regno]
4859 		    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4860 		    && !REGNO_REG_SET_P
4861 		         (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4862 		    && !refers_to_regno_p (regno,
4863 					   regno + hard_regno_nregs[regno]
4864 								   [Pmode],
4865 					   info.equiv_reg_src, NULL)
4866 		    && info.const_equiv[regno] == 0)
4867 		  break;
4868 
4869 	      gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4870 
4871 	      reg = gen_rtx_REG (Pmode, regno);
4872 	      emit_move_insn (reg, retaddr);
4873 	      retaddr = reg;
4874 	    }
4875 
4876 	  emit_equiv_load (&info);
4877 	  jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4878 
4879 	  /* Show the SET in the above insn is a RETURN.  */
4880 	  jump_set = single_set (jump_insn);
4881 	  gcc_assert (jump_set);
4882 	  SET_IS_RETURN_P (jump_set) = 1;
4883 	}
4884 
4885       /* If SP is not mentioned in the pattern and its equivalent register, if
4886 	 any, is not modified, just emit it.  Otherwise, if neither is set,
4887 	 replace the reference to SP and emit the insn.  If none of those are
4888 	 true, handle each SET individually.  */
4889       else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4890 	       && (info.sp_equiv_reg == stack_pointer_rtx
4891 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4892 	add_insn (insn);
4893       else if (! reg_set_p (stack_pointer_rtx, insn)
4894 	       && (info.sp_equiv_reg == stack_pointer_rtx
4895 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4896 	{
4897 	  int changed;
4898 
4899 	  changed = validate_replace_rtx (stack_pointer_rtx,
4900 					  plus_constant (info.sp_equiv_reg,
4901 							 info.sp_offset),
4902 					  insn);
4903 	  gcc_assert (changed);
4904 
4905 	  add_insn (insn);
4906 	}
4907       else if (GET_CODE (PATTERN (insn)) == SET)
4908 	handle_epilogue_set (PATTERN (insn), &info);
4909       else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4910 	{
4911 	  for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4912 	    if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4913 	      handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4914 	}
4915       else
4916 	add_insn (insn);
4917 
4918       info.sp_equiv_reg = info.new_sp_equiv_reg;
4919       info.sp_offset = info.new_sp_offset;
4920 
4921       /* Now update any constants this insn sets.  */
4922       note_stores (PATTERN (insn), update_epilogue_consts, &info);
4923       insn = next;
4924     }
4925 
4926   insns = get_insns ();
4927   end_sequence ();
4928   return insns;
4929 }
4930 
4931 /* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
4932    structure that contains information about what we've seen so far.  We
4933    process this SET by either updating that data or by emitting one or
4934    more insns.  */
4935 
4936 static void
handle_epilogue_set(rtx set,struct epi_info * p)4937 handle_epilogue_set (rtx set, struct epi_info *p)
4938 {
4939   /* First handle the case where we are setting SP.  Record what it is being
4940      set from, which we must be able to determine  */
4941   if (reg_set_p (stack_pointer_rtx, set))
4942     {
4943       gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4944 
4945       if (GET_CODE (SET_SRC (set)) == PLUS)
4946 	{
4947 	  p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4948 	  if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4949 	    p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4950 	  else
4951 	    {
4952 	      gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4953 			  && (REGNO (XEXP (SET_SRC (set), 1))
4954 			      < FIRST_PSEUDO_REGISTER)
4955 			  && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4956 	      p->new_sp_offset
4957 		= INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4958 	    }
4959 	}
4960       else
4961 	p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4962 
4963       /* If we are adjusting SP, we adjust from the old data.  */
4964       if (p->new_sp_equiv_reg == stack_pointer_rtx)
4965 	{
4966 	  p->new_sp_equiv_reg = p->sp_equiv_reg;
4967 	  p->new_sp_offset += p->sp_offset;
4968 	}
4969 
4970       gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4971 
4972       return;
4973     }
4974 
4975   /* Next handle the case where we are setting SP's equivalent
4976      register.  We must not already have a value to set it to.  We
4977      could update, but there seems little point in handling that case.
4978      Note that we have to allow for the case where we are setting the
4979      register set in the previous part of a PARALLEL inside a single
4980      insn.  But use the old offset for any updates within this insn.
4981      We must allow for the case where the register is being set in a
4982      different (usually wider) mode than Pmode).  */
4983   else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4984     {
4985       gcc_assert (!p->equiv_reg_src
4986 		  && REG_P (p->new_sp_equiv_reg)
4987 		  && REG_P (SET_DEST (set))
4988 		  && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4989 		      <= BITS_PER_WORD)
4990 		  && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4991       p->equiv_reg_src
4992 	= simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4993 				plus_constant (p->sp_equiv_reg,
4994 					       p->sp_offset));
4995     }
4996 
4997   /* Otherwise, replace any references to SP in the insn to its new value
4998      and emit the insn.  */
4999   else
5000     {
5001       SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5002 					    plus_constant (p->sp_equiv_reg,
5003 							   p->sp_offset));
5004       SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5005 					     plus_constant (p->sp_equiv_reg,
5006 							    p->sp_offset));
5007       emit_insn (set);
5008     }
5009 }
5010 
5011 /* Update the tracking information for registers set to constants.  */
5012 
5013 static void
update_epilogue_consts(rtx dest,rtx x,void * data)5014 update_epilogue_consts (rtx dest, rtx x, void *data)
5015 {
5016   struct epi_info *p = (struct epi_info *) data;
5017   rtx new;
5018 
5019   if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5020     return;
5021 
5022   /* If we are either clobbering a register or doing a partial set,
5023      show we don't know the value.  */
5024   else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5025     p->const_equiv[REGNO (dest)] = 0;
5026 
5027   /* If we are setting it to a constant, record that constant.  */
5028   else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5029     p->const_equiv[REGNO (dest)] = SET_SRC (x);
5030 
5031   /* If this is a binary operation between a register we have been tracking
5032      and a constant, see if we can compute a new constant value.  */
5033   else if (ARITHMETIC_P (SET_SRC (x))
5034 	   && REG_P (XEXP (SET_SRC (x), 0))
5035 	   && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5036 	   && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5037 	   && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5038 	   && 0 != (new = simplify_binary_operation
5039 		    (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5040 		     p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5041 		     XEXP (SET_SRC (x), 1)))
5042 	   && GET_CODE (new) == CONST_INT)
5043     p->const_equiv[REGNO (dest)] = new;
5044 
5045   /* Otherwise, we can't do anything with this value.  */
5046   else
5047     p->const_equiv[REGNO (dest)] = 0;
5048 }
5049 
5050 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
5051 
5052 static void
emit_equiv_load(struct epi_info * p)5053 emit_equiv_load (struct epi_info *p)
5054 {
5055   if (p->equiv_reg_src != 0)
5056     {
5057       rtx dest = p->sp_equiv_reg;
5058 
5059       if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5060 	dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5061 			    REGNO (p->sp_equiv_reg));
5062 
5063       emit_move_insn (dest, p->equiv_reg_src);
5064       p->equiv_reg_src = 0;
5065     }
5066 }
5067 #endif
5068 
5069 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5070    this into place with notes indicating where the prologue ends and where
5071    the epilogue begins.  Update the basic block information when possible.  */
5072 
5073 void
thread_prologue_and_epilogue_insns(rtx f ATTRIBUTE_UNUSED)5074 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5075 {
5076   int inserted = 0;
5077   edge e;
5078 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5079   rtx seq;
5080 #endif
5081 #ifdef HAVE_prologue
5082   rtx prologue_end = NULL_RTX;
5083 #endif
5084 #if defined (HAVE_epilogue) || defined(HAVE_return)
5085   rtx epilogue_end = NULL_RTX;
5086 #endif
5087   edge_iterator ei;
5088 
5089 #ifdef HAVE_prologue
5090   if (HAVE_prologue)
5091     {
5092       start_sequence ();
5093       seq = gen_prologue ();
5094       emit_insn (seq);
5095 
5096       /* Retain a map of the prologue insns.  */
5097       record_insns (seq, &prologue);
5098       prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5099 
5100       seq = get_insns ();
5101       end_sequence ();
5102       set_insn_locators (seq, prologue_locator);
5103 
5104       /* Can't deal with multiple successors of the entry block
5105          at the moment.  Function should always have at least one
5106          entry point.  */
5107       gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5108 
5109       insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5110       inserted = 1;
5111     }
5112 #endif
5113 
5114   /* If the exit block has no non-fake predecessors, we don't need
5115      an epilogue.  */
5116   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5117     if ((e->flags & EDGE_FAKE) == 0)
5118       break;
5119   if (e == NULL)
5120     goto epilogue_done;
5121 
5122 #ifdef HAVE_return
5123   if (optimize && HAVE_return)
5124     {
5125       /* If we're allowed to generate a simple return instruction,
5126 	 then by definition we don't need a full epilogue.  Examine
5127 	 the block that falls through to EXIT.   If it does not
5128 	 contain any code, examine its predecessors and try to
5129 	 emit (conditional) return instructions.  */
5130 
5131       basic_block last;
5132       rtx label;
5133 
5134       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5135 	if (e->flags & EDGE_FALLTHRU)
5136 	  break;
5137       if (e == NULL)
5138 	goto epilogue_done;
5139       last = e->src;
5140 
5141       /* Verify that there are no active instructions in the last block.  */
5142       label = BB_END (last);
5143       while (label && !LABEL_P (label))
5144 	{
5145 	  if (active_insn_p (label))
5146 	    break;
5147 	  label = PREV_INSN (label);
5148 	}
5149 
5150       if (BB_HEAD (last) == label && LABEL_P (label))
5151 	{
5152 	  edge_iterator ei2;
5153 	  rtx epilogue_line_note = NULL_RTX;
5154 
5155 	  /* Locate the line number associated with the closing brace,
5156 	     if we can find one.  */
5157 	  for (seq = get_last_insn ();
5158 	       seq && ! active_insn_p (seq);
5159 	       seq = PREV_INSN (seq))
5160 	    if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5161 	      {
5162 		epilogue_line_note = seq;
5163 		break;
5164 	      }
5165 
5166 	  for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5167 	    {
5168 	      basic_block bb = e->src;
5169 	      rtx jump;
5170 
5171 	      if (bb == ENTRY_BLOCK_PTR)
5172 		{
5173 		  ei_next (&ei2);
5174 		  continue;
5175 		}
5176 
5177 	      jump = BB_END (bb);
5178 	      if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5179 		{
5180 		  ei_next (&ei2);
5181 		  continue;
5182 		}
5183 
5184 	      /* If we have an unconditional jump, we can replace that
5185 		 with a simple return instruction.  */
5186 	      if (simplejump_p (jump))
5187 		{
5188 		  emit_return_into_block (bb, epilogue_line_note);
5189 		  delete_insn (jump);
5190 		}
5191 
5192 	      /* If we have a conditional jump, we can try to replace
5193 		 that with a conditional return instruction.  */
5194 	      else if (condjump_p (jump))
5195 		{
5196 		  if (! redirect_jump (jump, 0, 0))
5197 		    {
5198 		      ei_next (&ei2);
5199 		      continue;
5200 		    }
5201 
5202 		  /* If this block has only one successor, it both jumps
5203 		     and falls through to the fallthru block, so we can't
5204 		     delete the edge.  */
5205 		  if (single_succ_p (bb))
5206 		    {
5207 		      ei_next (&ei2);
5208 		      continue;
5209 		    }
5210 		}
5211 	      else
5212 		{
5213 		  ei_next (&ei2);
5214 		  continue;
5215 		}
5216 
5217 	      /* Fix up the CFG for the successful change we just made.  */
5218 	      redirect_edge_succ (e, EXIT_BLOCK_PTR);
5219 	    }
5220 
5221 	  /* Emit a return insn for the exit fallthru block.  Whether
5222 	     this is still reachable will be determined later.  */
5223 
5224 	  emit_barrier_after (BB_END (last));
5225 	  emit_return_into_block (last, epilogue_line_note);
5226 	  epilogue_end = BB_END (last);
5227 	  single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5228 	  goto epilogue_done;
5229 	}
5230     }
5231 #endif
5232   /* Find the edge that falls through to EXIT.  Other edges may exist
5233      due to RETURN instructions, but those don't need epilogues.
5234      There really shouldn't be a mixture -- either all should have
5235      been converted or none, however...  */
5236 
5237   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5238     if (e->flags & EDGE_FALLTHRU)
5239       break;
5240   if (e == NULL)
5241     goto epilogue_done;
5242 
5243 #ifdef HAVE_epilogue
5244   if (HAVE_epilogue)
5245     {
5246       start_sequence ();
5247       epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5248 
5249       seq = gen_epilogue ();
5250 
5251 #ifdef INCOMING_RETURN_ADDR_RTX
5252       /* If this function returns with the stack depressed and we can support
5253 	 it, massage the epilogue to actually do that.  */
5254       if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5255 	  && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5256 	seq = keep_stack_depressed (seq);
5257 #endif
5258 
5259       emit_jump_insn (seq);
5260 
5261       /* Retain a map of the epilogue insns.  */
5262       record_insns (seq, &epilogue);
5263       set_insn_locators (seq, epilogue_locator);
5264 
5265       seq = get_insns ();
5266       end_sequence ();
5267 
5268       insert_insn_on_edge (seq, e);
5269       inserted = 1;
5270     }
5271   else
5272 #endif
5273     {
5274       basic_block cur_bb;
5275 
5276       if (! next_active_insn (BB_END (e->src)))
5277 	goto epilogue_done;
5278       /* We have a fall-through edge to the exit block, the source is not
5279          at the end of the function, and there will be an assembler epilogue
5280          at the end of the function.
5281          We can't use force_nonfallthru here, because that would try to
5282          use return.  Inserting a jump 'by hand' is extremely messy, so
5283 	 we take advantage of cfg_layout_finalize using
5284 	fixup_fallthru_exit_predecessor.  */
5285       cfg_layout_initialize (0);
5286       FOR_EACH_BB (cur_bb)
5287 	if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5288 	  cur_bb->aux = cur_bb->next_bb;
5289       cfg_layout_finalize ();
5290     }
5291 epilogue_done:
5292 
5293   if (inserted)
5294     commit_edge_insertions ();
5295 
5296 #ifdef HAVE_sibcall_epilogue
5297   /* Emit sibling epilogues before any sibling call sites.  */
5298   for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5299     {
5300       basic_block bb = e->src;
5301       rtx insn = BB_END (bb);
5302 
5303       if (!CALL_P (insn)
5304 	  || ! SIBLING_CALL_P (insn))
5305 	{
5306 	  ei_next (&ei);
5307 	  continue;
5308 	}
5309 
5310       start_sequence ();
5311       emit_insn (gen_sibcall_epilogue ());
5312       seq = get_insns ();
5313       end_sequence ();
5314 
5315       /* Retain a map of the epilogue insns.  Used in life analysis to
5316 	 avoid getting rid of sibcall epilogue insns.  Do this before we
5317 	 actually emit the sequence.  */
5318       record_insns (seq, &sibcall_epilogue);
5319       set_insn_locators (seq, epilogue_locator);
5320 
5321       emit_insn_before (seq, insn);
5322       ei_next (&ei);
5323     }
5324 #endif
5325 
5326 #ifdef HAVE_prologue
5327   /* This is probably all useless now that we use locators.  */
5328   if (prologue_end)
5329     {
5330       rtx insn, prev;
5331 
5332       /* GDB handles `break f' by setting a breakpoint on the first
5333 	 line note after the prologue.  Which means (1) that if
5334 	 there are line number notes before where we inserted the
5335 	 prologue we should move them, and (2) we should generate a
5336 	 note before the end of the first basic block, if there isn't
5337 	 one already there.
5338 
5339 	 ??? This behavior is completely broken when dealing with
5340 	 multiple entry functions.  We simply place the note always
5341 	 into first basic block and let alternate entry points
5342 	 to be missed.
5343        */
5344 
5345       for (insn = prologue_end; insn; insn = prev)
5346 	{
5347 	  prev = PREV_INSN (insn);
5348 	  if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5349 	    {
5350 	      /* Note that we cannot reorder the first insn in the
5351 		 chain, since rest_of_compilation relies on that
5352 		 remaining constant.  */
5353 	      if (prev == NULL)
5354 		break;
5355 	      reorder_insns (insn, insn, prologue_end);
5356 	    }
5357 	}
5358 
5359       /* Find the last line number note in the first block.  */
5360       for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5361 	   insn != prologue_end && insn;
5362 	   insn = PREV_INSN (insn))
5363 	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5364 	  break;
5365 
5366       /* If we didn't find one, make a copy of the first line number
5367 	 we run across.  */
5368       if (! insn)
5369 	{
5370 	  for (insn = next_active_insn (prologue_end);
5371 	       insn;
5372 	       insn = PREV_INSN (insn))
5373 	    if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5374 	      {
5375 		emit_note_copy_after (insn, prologue_end);
5376 		break;
5377 	      }
5378 	}
5379     }
5380 #endif
5381 #ifdef HAVE_epilogue
5382   if (epilogue_end)
5383     {
5384       rtx insn, next;
5385 
5386       /* Similarly, move any line notes that appear after the epilogue.
5387          There is no need, however, to be quite so anal about the existence
5388 	 of such a note.  Also move the NOTE_INSN_FUNCTION_END and (possibly)
5389 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5390 	 info generation.  */
5391       for (insn = epilogue_end; insn; insn = next)
5392 	{
5393 	  next = NEXT_INSN (insn);
5394 	  if (NOTE_P (insn)
5395 	      && (NOTE_LINE_NUMBER (insn) > 0
5396 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5397 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5398 	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5399 	}
5400     }
5401 #endif
5402 }
5403 
5404 /* Reposition the prologue-end and epilogue-begin notes after instruction
5405    scheduling and delayed branch scheduling.  */
5406 
5407 void
reposition_prologue_and_epilogue_notes(rtx f ATTRIBUTE_UNUSED)5408 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5409 {
5410 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5411   rtx insn, last, note;
5412   int len;
5413 
5414   if ((len = VEC_length (int, prologue)) > 0)
5415     {
5416       last = 0, note = 0;
5417 
5418       /* Scan from the beginning until we reach the last prologue insn.
5419 	 We apparently can't depend on basic_block_{head,end} after
5420 	 reorg has run.  */
5421       for (insn = f; insn; insn = NEXT_INSN (insn))
5422 	{
5423 	  if (NOTE_P (insn))
5424 	    {
5425 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5426 		note = insn;
5427 	    }
5428 	  else if (contains (insn, &prologue))
5429 	    {
5430 	      last = insn;
5431 	      if (--len == 0)
5432 		break;
5433 	    }
5434 	}
5435 
5436       if (last)
5437 	{
5438 	  /* Find the prologue-end note if we haven't already, and
5439 	     move it to just after the last prologue insn.  */
5440 	  if (note == 0)
5441 	    {
5442 	      for (note = last; (note = NEXT_INSN (note));)
5443 		if (NOTE_P (note)
5444 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5445 		  break;
5446 	    }
5447 
5448 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
5449 	  if (LABEL_P (last))
5450 	    last = NEXT_INSN (last);
5451 	  reorder_insns (note, note, last);
5452 	}
5453     }
5454 
5455   if ((len = VEC_length (int, epilogue)) > 0)
5456     {
5457       last = 0, note = 0;
5458 
5459       /* Scan from the end until we reach the first epilogue insn.
5460 	 We apparently can't depend on basic_block_{head,end} after
5461 	 reorg has run.  */
5462       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5463 	{
5464 	  if (NOTE_P (insn))
5465 	    {
5466 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5467 		note = insn;
5468 	    }
5469 	  else if (contains (insn, &epilogue))
5470 	    {
5471 	      last = insn;
5472 	      if (--len == 0)
5473 		break;
5474 	    }
5475 	}
5476 
5477       if (last)
5478 	{
5479 	  /* Find the epilogue-begin note if we haven't already, and
5480 	     move it to just before the first epilogue insn.  */
5481 	  if (note == 0)
5482 	    {
5483 	      for (note = insn; (note = PREV_INSN (note));)
5484 		if (NOTE_P (note)
5485 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5486 		  break;
5487 	    }
5488 
5489 	  if (PREV_INSN (last) != note)
5490 	    reorder_insns (note, note, PREV_INSN (last));
5491 	}
5492     }
5493 #endif /* HAVE_prologue or HAVE_epilogue */
5494 }
5495 
5496 /* Resets insn_block_boundaries array.  */
5497 
5498 void
reset_block_changes(void)5499 reset_block_changes (void)
5500 {
5501   VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5502   VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5503 }
5504 
5505 /* Record the boundary for BLOCK.  */
5506 void
record_block_change(tree block)5507 record_block_change (tree block)
5508 {
5509   int i, n;
5510   tree last_block;
5511 
5512   if (!block)
5513     return;
5514 
5515   if(!cfun->ib_boundaries_block)
5516     return;
5517 
5518   last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5519   VARRAY_POP (cfun->ib_boundaries_block);
5520   n = get_max_uid ();
5521   for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5522     VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5523 
5524   VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5525 }
5526 
5527 /* Finishes record of boundaries.  */
finalize_block_changes(void)5528 void finalize_block_changes (void)
5529 {
5530   record_block_change (DECL_INITIAL (current_function_decl));
5531 }
5532 
5533 /* For INSN return the BLOCK it belongs to.  */
5534 void
check_block_change(rtx insn,tree * block)5535 check_block_change (rtx insn, tree *block)
5536 {
5537   unsigned uid = INSN_UID (insn);
5538 
5539   if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5540     return;
5541 
5542   *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5543 }
5544 
5545 /* Releases the ib_boundaries_block records.  */
5546 void
free_block_changes(void)5547 free_block_changes (void)
5548 {
5549   cfun->ib_boundaries_block = NULL;
5550 }
5551 
5552 /* Returns the name of the current function.  */
5553 const char *
current_function_name(void)5554 current_function_name (void)
5555 {
5556   return lang_hooks.decl_printable_name (cfun->decl, 2);
5557 }
5558 
5559 
5560 static void
rest_of_handle_check_leaf_regs(void)5561 rest_of_handle_check_leaf_regs (void)
5562 {
5563 #ifdef LEAF_REGISTERS
5564   current_function_uses_only_leaf_regs
5565     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5566 #endif
5567 }
5568 
5569 struct tree_opt_pass pass_leaf_regs =
5570 {
5571   NULL,                                 /* name */
5572   NULL,                                 /* gate */
5573   rest_of_handle_check_leaf_regs,       /* execute */
5574   NULL,                                 /* sub */
5575   NULL,                                 /* next */
5576   0,                                    /* static_pass_number */
5577   0,                                    /* tv_id */
5578   0,                                    /* properties_required */
5579   0,                                    /* properties_provided */
5580   0,                                    /* properties_destroyed */
5581   0,                                    /* todo_flags_start */
5582   0,                                    /* todo_flags_finish */
5583   0                                     /* letter */
5584 };
5585 
5586 
5587 #include "gt-function.h"
5588 
5589 /* begin-TIGCC-local (regparms): explicit register specification for parameters */
5590 /* Return 1 if an argument for the current function was passed in
5591    register REGNO.  */
5592 
5593 int
function_arg_regno_p(int regno)5594 function_arg_regno_p (int regno)
5595 {
5596   tree parm = DECL_ARGUMENTS (current_function_decl);
5597   for (; parm; parm = TREE_CHAIN (parm))
5598     {
5599       rtx incoming = DECL_INCOMING_RTL (parm);
5600       if (GET_CODE (incoming) == REG)
5601 	{
5602 	  int incoming_reg;
5603 	  incoming_reg = REGNO (incoming);
5604 	  if (regno >= incoming_reg &&
5605 	      regno < incoming_reg + HARD_REGNO_NREGS (incoming_reg,
5606 						       GET_MODE (incoming)))
5607 	    return 1;
5608 	}
5609     }
5610   return 0;
5611 }
5612 /* end-TIGCC-local (regparms) */
5613