1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file handles the generation of rtl code from tree structure
21    at the level of the function as a whole.
22    It creates the rtl expressions for parameters and auto variables
23    and has full responsibility for allocating stack slots.
24 
25    `expand_function_start' is called at the beginning of a function,
26    before the function body is parsed, and `expand_function_end' is
27    called after parsing the body.
28 
29    Call `assign_stack_local' to allocate a stack slot for a local variable.
30    This is usually done during the RTL generation for the function body,
31    but it can also be done in the reload pass when a pseudo-register does
32    not get a hard register.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "opts.h"
50 #include "regs.h"
51 #include "emit-rtl.h"
52 #include "recog.h"
53 #include "rtl-error.h"
54 #include "hard-reg-set.h"
55 #include "alias.h"
56 #include "fold-const.h"
57 #include "stor-layout.h"
58 #include "varasm.h"
59 #include "except.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "calls.h"
63 #include "expr.h"
64 #include "optabs-tree.h"
65 #include "output.h"
66 #include "langhooks.h"
67 #include "common/common-target.h"
68 #include "gimplify.h"
69 #include "tree-pass.h"
70 #include "cfgrtl.h"
71 #include "cfganal.h"
72 #include "cfgbuild.h"
73 #include "cfgcleanup.h"
74 #include "cfgexpand.h"
75 #include "shrink-wrap.h"
76 #include "toplev.h"
77 #include "rtl-iter.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 #include "stringpool.h"
81 #include "attribs.h"
82 #include "gimple.h"
83 #include "options.h"
84 #include "function-abi.h"
85 
86 /* So we can assign to cfun in this file.  */
87 #undef cfun
88 
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
91 #endif
92 
93 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
94 
95 /* Round a value to the lowest integer less than it that is a multiple of
96    the required alignment.  Avoid using division in case the value is
97    negative.  Assume the alignment is a power of two.  */
98 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
99 
100 /* Similar, but round to the next highest integer that meets the
101    alignment.  */
102 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
103 
104 /* Nonzero once virtual register instantiation has been done.
105    assign_stack_local uses frame_pointer_rtx when this is nonzero.
106    calls.c:emit_library_call_value_1 uses it to set up
107    post-instantiation libcalls.  */
108 int virtuals_instantiated;
109 
110 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
111 static GTY(()) int funcdef_no;
112 
113 /* These variables hold pointers to functions to create and destroy
114    target specific, per-function data structures.  */
115 struct machine_function * (*init_machine_status) (void);
116 
117 /* The currently compiled function.  */
118 struct function *cfun = 0;
119 
120 /* These hashes record the prologue and epilogue insns.  */
121 
122 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
123 {
hashinsn_cache_hasher124   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher125   static bool equal (rtx a, rtx b) { return a == b; }
126 };
127 
128 static GTY((cache))
129   hash_table<insn_cache_hasher> *prologue_insn_hash;
130 static GTY((cache))
131   hash_table<insn_cache_hasher> *epilogue_insn_hash;
132 
133 
134 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
135 vec<tree, va_gc> *types_used_by_cur_var_decl;
136 
137 /* Forward declarations.  */
138 
139 static class temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141 static void pad_below (struct args_size *, machine_mode, tree);
142 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
143 static int all_blocks (tree, tree *);
144 static tree *get_block_vector (tree, int *);
145 extern tree debug_find_var_in_block_tree (tree, tree);
146 /* We always define `record_insns' even if it's not used so that we
147    can always export `prologue_epilogue_contains'.  */
148 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
149      ATTRIBUTE_UNUSED;
150 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
151 static void prepare_function_start (void);
152 static void do_clobber_return_reg (rtx, void *);
153 static void do_use_return_reg (rtx, void *);
154 
155 
156 /* Stack of nested functions.  */
157 /* Keep track of the cfun stack.  */
158 
159 static vec<function *> function_context_stack;
160 
161 /* Save the current context for compilation of a nested function.
162    This is called from language-specific code.  */
163 
164 void
push_function_context(void)165 push_function_context (void)
166 {
167   if (cfun == 0)
168     allocate_struct_function (NULL, false);
169 
170   function_context_stack.safe_push (cfun);
171   set_cfun (NULL);
172 }
173 
174 /* Restore the last saved context, at the end of a nested function.
175    This function is called from language-specific code.  */
176 
177 void
pop_function_context(void)178 pop_function_context (void)
179 {
180   struct function *p = function_context_stack.pop ();
181   set_cfun (p);
182   current_function_decl = p->decl;
183 
184   /* Reset variables that have known state during rtx generation.  */
185   virtuals_instantiated = 0;
186   generating_concat_p = 1;
187 }
188 
189 /* Clear out all parts of the state in F that can safely be discarded
190    after the function has been parsed, but not compiled, to let
191    garbage collection reclaim the memory.  */
192 
193 void
free_after_parsing(struct function * f)194 free_after_parsing (struct function *f)
195 {
196   f->language = 0;
197 }
198 
199 /* Clear out all parts of the state in F that can safely be discarded
200    after the function has been compiled, to let garbage collection
201    reclaim the memory.  */
202 
203 void
free_after_compilation(struct function * f)204 free_after_compilation (struct function *f)
205 {
206   prologue_insn_hash = NULL;
207   epilogue_insn_hash = NULL;
208 
209   free (crtl->emit.regno_pointer_align);
210 
211   memset (crtl, 0, sizeof (struct rtl_data));
212   f->eh = NULL;
213   f->machine = NULL;
214   f->cfg = NULL;
215   f->curr_properties &= ~PROP_cfg;
216 
217   regno_reg_rtx = NULL;
218 }
219 
220 /* Return size needed for stack frame based on slots so far allocated.
221    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
222    the caller may have to do that.  */
223 
224 poly_int64
get_frame_size(void)225 get_frame_size (void)
226 {
227   if (FRAME_GROWS_DOWNWARD)
228     return -frame_offset;
229   else
230     return frame_offset;
231 }
232 
233 /* Issue an error message and return TRUE if frame OFFSET overflows in
234    the signed target pointer arithmetics for function FUNC.  Otherwise
235    return FALSE.  */
236 
237 bool
frame_offset_overflow(poly_int64 offset,tree func)238 frame_offset_overflow (poly_int64 offset, tree func)
239 {
240   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
241   unsigned HOST_WIDE_INT limit
242     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
243        /* Leave room for the fixed part of the frame.  */
244        - 64 * UNITS_PER_WORD);
245 
246   if (!coeffs_in_range_p (size, 0U, limit))
247     {
248       unsigned HOST_WIDE_INT hwisize;
249       if (size.is_constant (&hwisize))
250 	error_at (DECL_SOURCE_LOCATION (func),
251 		  "total size of local objects %wu exceeds maximum %wu",
252 		  hwisize, limit);
253       else
254 	error_at (DECL_SOURCE_LOCATION (func),
255 		  "total size of local objects exceeds maximum %wu",
256 		  limit);
257       return true;
258     }
259 
260   return false;
261 }
262 
263 /* Return the minimum spill slot alignment for a register of mode MODE.  */
264 
265 unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)266 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
267 {
268   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
269 }
270 
271 /* Return stack slot alignment in bits for TYPE and MODE.  */
272 
273 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)274 get_stack_local_alignment (tree type, machine_mode mode)
275 {
276   unsigned int alignment;
277 
278   if (mode == BLKmode)
279     alignment = BIGGEST_ALIGNMENT;
280   else
281     alignment = GET_MODE_ALIGNMENT (mode);
282 
283   /* Allow the frond-end to (possibly) increase the alignment of this
284      stack slot.  */
285   if (! type)
286     type = lang_hooks.types.type_for_mode (mode, 0);
287 
288   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
289 }
290 
291 /* Determine whether it is possible to fit a stack slot of size SIZE and
292    alignment ALIGNMENT into an area in the stack frame that starts at
293    frame offset START and has a length of LENGTH.  If so, store the frame
294    offset to be used for the stack slot in *POFFSET and return true;
295    return false otherwise.  This function will extend the frame size when
296    given a start/length pair that lies at the end of the frame.  */
297 
298 static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)299 try_fit_stack_local (poly_int64 start, poly_int64 length,
300 		     poly_int64 size, unsigned int alignment,
301 		     poly_int64_pod *poffset)
302 {
303   poly_int64 this_frame_offset;
304   int frame_off, frame_alignment, frame_phase;
305 
306   /* Calculate how many bytes the start of local variables is off from
307      stack alignment.  */
308   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
309   frame_off = targetm.starting_frame_offset () % frame_alignment;
310   frame_phase = frame_off ? frame_alignment - frame_off : 0;
311 
312   /* Round the frame offset to the specified alignment.  */
313 
314   if (FRAME_GROWS_DOWNWARD)
315     this_frame_offset
316       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
317 	 + frame_phase);
318   else
319     this_frame_offset
320       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
321 
322   /* See if it fits.  If this space is at the edge of the frame,
323      consider extending the frame to make it fit.  Our caller relies on
324      this when allocating a new slot.  */
325   if (maybe_lt (this_frame_offset, start))
326     {
327       if (known_eq (frame_offset, start))
328 	frame_offset = this_frame_offset;
329       else
330 	return false;
331     }
332   else if (maybe_gt (this_frame_offset + size, start + length))
333     {
334       if (known_eq (frame_offset, start + length))
335 	frame_offset = this_frame_offset + size;
336       else
337 	return false;
338     }
339 
340   *poffset = this_frame_offset;
341   return true;
342 }
343 
344 /* Create a new frame_space structure describing free space in the stack
345    frame beginning at START and ending at END, and chain it into the
346    function's frame_space_list.  */
347 
348 static void
add_frame_space(poly_int64 start,poly_int64 end)349 add_frame_space (poly_int64 start, poly_int64 end)
350 {
351   class frame_space *space = ggc_alloc<frame_space> ();
352   space->next = crtl->frame_space_list;
353   crtl->frame_space_list = space;
354   space->start = start;
355   space->length = end - start;
356 }
357 
358 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
359    with machine mode MODE.
360 
361    ALIGN controls the amount of alignment for the address of the slot:
362    0 means according to MODE,
363    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
364    -2 means use BITS_PER_UNIT,
365    positive specifies alignment boundary in bits.
366 
367    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
368    alignment and ASLK_RECORD_PAD bit set if we should remember
369    extra space we allocated for alignment purposes.  When we are
370    called from assign_stack_temp_for_type, it is not set so we don't
371    track the same stack slot in two independent lists.
372 
373    We do not round to stack_boundary here.  */
374 
375 rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)376 assign_stack_local_1 (machine_mode mode, poly_int64 size,
377 		      int align, int kind)
378 {
379   rtx x, addr;
380   poly_int64 bigend_correction = 0;
381   poly_int64 slot_offset = 0, old_frame_offset;
382   unsigned int alignment, alignment_in_bits;
383 
384   if (align == 0)
385     {
386       alignment = get_stack_local_alignment (NULL, mode);
387       alignment /= BITS_PER_UNIT;
388     }
389   else if (align == -1)
390     {
391       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
392       size = aligned_upper_bound (size, alignment);
393     }
394   else if (align == -2)
395     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
396   else
397     alignment = align / BITS_PER_UNIT;
398 
399   alignment_in_bits = alignment * BITS_PER_UNIT;
400 
401   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
402   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
403     {
404       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
405       alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
406     }
407 
408   if (SUPPORTS_STACK_ALIGNMENT)
409     {
410       if (crtl->stack_alignment_estimated < alignment_in_bits)
411 	{
412           if (!crtl->stack_realign_processed)
413 	    crtl->stack_alignment_estimated = alignment_in_bits;
414           else
415 	    {
416 	      /* If stack is realigned and stack alignment value
417 		 hasn't been finalized, it is OK not to increase
418 		 stack_alignment_estimated.  The bigger alignment
419 		 requirement is recorded in stack_alignment_needed
420 		 below.  */
421 	      gcc_assert (!crtl->stack_realign_finalized);
422 	      if (!crtl->stack_realign_needed)
423 		{
424 		  /* It is OK to reduce the alignment as long as the
425 		     requested size is 0 or the estimated stack
426 		     alignment >= mode alignment.  */
427 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
428 			      || known_eq (size, 0)
429 			      || (crtl->stack_alignment_estimated
430 				  >= GET_MODE_ALIGNMENT (mode)));
431 		  alignment_in_bits = crtl->stack_alignment_estimated;
432 		  alignment = alignment_in_bits / BITS_PER_UNIT;
433 		}
434 	    }
435 	}
436     }
437 
438   if (crtl->stack_alignment_needed < alignment_in_bits)
439     crtl->stack_alignment_needed = alignment_in_bits;
440   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
441     crtl->max_used_stack_slot_alignment = alignment_in_bits;
442 
443   if (mode != BLKmode || maybe_ne (size, 0))
444     {
445       if (kind & ASLK_RECORD_PAD)
446 	{
447 	  class frame_space **psp;
448 
449 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
450 	    {
451 	      class frame_space *space = *psp;
452 	      if (!try_fit_stack_local (space->start, space->length, size,
453 					alignment, &slot_offset))
454 		continue;
455 	      *psp = space->next;
456 	      if (known_gt (slot_offset, space->start))
457 		add_frame_space (space->start, slot_offset);
458 	      if (known_lt (slot_offset + size, space->start + space->length))
459 		add_frame_space (slot_offset + size,
460 				 space->start + space->length);
461 	      goto found_space;
462 	    }
463 	}
464     }
465   else if (!STACK_ALIGNMENT_NEEDED)
466     {
467       slot_offset = frame_offset;
468       goto found_space;
469     }
470 
471   old_frame_offset = frame_offset;
472 
473   if (FRAME_GROWS_DOWNWARD)
474     {
475       frame_offset -= size;
476       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
477 
478       if (kind & ASLK_RECORD_PAD)
479 	{
480 	  if (known_gt (slot_offset, frame_offset))
481 	    add_frame_space (frame_offset, slot_offset);
482 	  if (known_lt (slot_offset + size, old_frame_offset))
483 	    add_frame_space (slot_offset + size, old_frame_offset);
484 	}
485     }
486   else
487     {
488       frame_offset += size;
489       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
490 
491       if (kind & ASLK_RECORD_PAD)
492 	{
493 	  if (known_gt (slot_offset, old_frame_offset))
494 	    add_frame_space (old_frame_offset, slot_offset);
495 	  if (known_lt (slot_offset + size, frame_offset))
496 	    add_frame_space (slot_offset + size, frame_offset);
497 	}
498     }
499 
500  found_space:
501   /* On a big-endian machine, if we are allocating more space than we will use,
502      use the least significant bytes of those that are allocated.  */
503   if (mode != BLKmode)
504     {
505       /* The slot size can sometimes be smaller than the mode size;
506 	 e.g. the rs6000 port allocates slots with a vector mode
507 	 that have the size of only one element.  However, the slot
508 	 size must always be ordered wrt to the mode size, in the
509 	 same way as for a subreg.  */
510       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
511       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
512 	bigend_correction = size - GET_MODE_SIZE (mode);
513     }
514 
515   /* If we have already instantiated virtual registers, return the actual
516      address relative to the frame pointer.  */
517   if (virtuals_instantiated)
518     addr = plus_constant (Pmode, frame_pointer_rtx,
519 			  trunc_int_for_mode
520 			  (slot_offset + bigend_correction
521 			   + targetm.starting_frame_offset (), Pmode));
522   else
523     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
524 			  trunc_int_for_mode
525 			  (slot_offset + bigend_correction,
526 			   Pmode));
527 
528   x = gen_rtx_MEM (mode, addr);
529   set_mem_align (x, alignment_in_bits);
530   MEM_NOTRAP_P (x) = 1;
531 
532   vec_safe_push (stack_slot_list, x);
533 
534   if (frame_offset_overflow (frame_offset, current_function_decl))
535     frame_offset = 0;
536 
537   return x;
538 }
539 
540 /* Wrap up assign_stack_local_1 with last parameter as false.  */
541 
542 rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)543 assign_stack_local (machine_mode mode, poly_int64 size, int align)
544 {
545   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
546 }
547 
548 /* In order to evaluate some expressions, such as function calls returning
549    structures in memory, we need to temporarily allocate stack locations.
550    We record each allocated temporary in the following structure.
551 
552    Associated with each temporary slot is a nesting level.  When we pop up
553    one level, all temporaries associated with the previous level are freed.
554    Normally, all temporaries are freed after the execution of the statement
555    in which they were created.  However, if we are inside a ({...}) grouping,
556    the result may be in a temporary and hence must be preserved.  If the
557    result could be in a temporary, we preserve it if we can determine which
558    one it is in.  If we cannot determine which temporary may contain the
559    result, all temporaries are preserved.  A temporary is preserved by
560    pretending it was allocated at the previous nesting level.  */
561 
class()562 class GTY(()) temp_slot {
563 public:
564   /* Points to next temporary slot.  */
565   class temp_slot *next;
566   /* Points to previous temporary slot.  */
567   class temp_slot *prev;
568   /* The rtx to used to reference the slot.  */
569   rtx slot;
570   /* The size, in units, of the slot.  */
571   poly_int64 size;
572   /* The type of the object in the slot, or zero if it doesn't correspond
573      to a type.  We use this to determine whether a slot can be reused.
574      It can be reused if objects of the type of the new slot will always
575      conflict with objects of the type of the old slot.  */
576   tree type;
577   /* The alignment (in bits) of the slot.  */
578   unsigned int align;
579   /* Nonzero if this temporary is currently in use.  */
580   char in_use;
581   /* Nesting level at which this slot is being used.  */
582   int level;
583   /* The offset of the slot from the frame_pointer, including extra space
584      for alignment.  This info is for combine_temp_slots.  */
585   poly_int64 base_offset;
586   /* The size of the slot, including extra space for alignment.  This
587      info is for combine_temp_slots.  */
588   poly_int64 full_size;
589 };
590 
591 /* Entry for the below hash table.  */
592 struct GTY((for_user)) temp_slot_address_entry {
593   hashval_t hash;
594   rtx address;
595   class temp_slot *temp_slot;
596 };
597 
598 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
599 {
600   static hashval_t hash (temp_slot_address_entry *);
601   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
602 };
603 
604 /* A table of addresses that represent a stack slot.  The table is a mapping
605    from address RTXen to a temp slot.  */
606 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
607 static size_t n_temp_slots_in_use;
608 
609 /* Removes temporary slot TEMP from LIST.  */
610 
611 static void
cut_slot_from_list(class temp_slot * temp,class temp_slot ** list)612 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
613 {
614   if (temp->next)
615     temp->next->prev = temp->prev;
616   if (temp->prev)
617     temp->prev->next = temp->next;
618   else
619     *list = temp->next;
620 
621   temp->prev = temp->next = NULL;
622 }
623 
624 /* Inserts temporary slot TEMP to LIST.  */
625 
626 static void
insert_slot_to_list(class temp_slot * temp,class temp_slot ** list)627 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
628 {
629   temp->next = *list;
630   if (*list)
631     (*list)->prev = temp;
632   temp->prev = NULL;
633   *list = temp;
634 }
635 
636 /* Returns the list of used temp slots at LEVEL.  */
637 
638 static class temp_slot **
temp_slots_at_level(int level)639 temp_slots_at_level (int level)
640 {
641   if (level >= (int) vec_safe_length (used_temp_slots))
642     vec_safe_grow_cleared (used_temp_slots, level + 1, true);
643 
644   return &(*used_temp_slots)[level];
645 }
646 
647 /* Returns the maximal temporary slot level.  */
648 
649 static int
max_slot_level(void)650 max_slot_level (void)
651 {
652   if (!used_temp_slots)
653     return -1;
654 
655   return used_temp_slots->length () - 1;
656 }
657 
658 /* Moves temporary slot TEMP to LEVEL.  */
659 
660 static void
move_slot_to_level(class temp_slot * temp,int level)661 move_slot_to_level (class temp_slot *temp, int level)
662 {
663   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
664   insert_slot_to_list (temp, temp_slots_at_level (level));
665   temp->level = level;
666 }
667 
668 /* Make temporary slot TEMP available.  */
669 
670 static void
make_slot_available(class temp_slot * temp)671 make_slot_available (class temp_slot *temp)
672 {
673   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
674   insert_slot_to_list (temp, &avail_temp_slots);
675   temp->in_use = 0;
676   temp->level = -1;
677   n_temp_slots_in_use--;
678 }
679 
680 /* Compute the hash value for an address -> temp slot mapping.
681    The value is cached on the mapping entry.  */
682 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)683 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
684 {
685   int do_not_record = 0;
686   return hash_rtx (t->address, GET_MODE (t->address),
687 		   &do_not_record, NULL, false);
688 }
689 
690 /* Return the hash value for an address -> temp slot mapping.  */
691 hashval_t
hash(temp_slot_address_entry * t)692 temp_address_hasher::hash (temp_slot_address_entry *t)
693 {
694   return t->hash;
695 }
696 
697 /* Compare two address -> temp slot mapping entries.  */
698 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)699 temp_address_hasher::equal (temp_slot_address_entry *t1,
700 			    temp_slot_address_entry *t2)
701 {
702   return exp_equiv_p (t1->address, t2->address, 0, true);
703 }
704 
705 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
706 static void
insert_temp_slot_address(rtx address,class temp_slot * temp_slot)707 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
708 {
709   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
710   t->address = copy_rtx (address);
711   t->temp_slot = temp_slot;
712   t->hash = temp_slot_address_compute_hash (t);
713   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
714 }
715 
716 /* Remove an address -> temp slot mapping entry if the temp slot is
717    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
718 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)719 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
720 {
721   const struct temp_slot_address_entry *t = *slot;
722   if (! t->temp_slot->in_use)
723     temp_slot_address_table->clear_slot (slot);
724   return 1;
725 }
726 
727 /* Remove all mappings of addresses to unused temp slots.  */
728 static void
remove_unused_temp_slot_addresses(void)729 remove_unused_temp_slot_addresses (void)
730 {
731   /* Use quicker clearing if there aren't any active temp slots.  */
732   if (n_temp_slots_in_use)
733     temp_slot_address_table->traverse
734       <void *, remove_unused_temp_slot_addresses_1> (NULL);
735   else
736     temp_slot_address_table->empty ();
737 }
738 
739 /* Find the temp slot corresponding to the object at address X.  */
740 
741 static class temp_slot *
find_temp_slot_from_address(rtx x)742 find_temp_slot_from_address (rtx x)
743 {
744   class temp_slot *p;
745   struct temp_slot_address_entry tmp, *t;
746 
747   /* First try the easy way:
748      See if X exists in the address -> temp slot mapping.  */
749   tmp.address = x;
750   tmp.temp_slot = NULL;
751   tmp.hash = temp_slot_address_compute_hash (&tmp);
752   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
753   if (t)
754     return t->temp_slot;
755 
756   /* If we have a sum involving a register, see if it points to a temp
757      slot.  */
758   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
759       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
760     return p;
761   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
762 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
763     return p;
764 
765   /* Last resort: Address is a virtual stack var address.  */
766   poly_int64 offset;
767   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
768     {
769       int i;
770       for (i = max_slot_level (); i >= 0; i--)
771 	for (p = *temp_slots_at_level (i); p; p = p->next)
772 	  if (known_in_range_p (offset, p->base_offset, p->full_size))
773 	    return p;
774     }
775 
776   return NULL;
777 }
778 
779 /* Allocate a temporary stack slot and record it for possible later
780    reuse.
781 
782    MODE is the machine mode to be given to the returned rtx.
783 
784    SIZE is the size in units of the space required.  We do no rounding here
785    since assign_stack_local will do any required rounding.
786 
787    TYPE is the type that will be used for the stack slot.  */
788 
789 rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)790 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
791 {
792   unsigned int align;
793   class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
794   rtx slot;
795 
796   gcc_assert (known_size_p (size));
797 
798   align = get_stack_local_alignment (type, mode);
799 
800   /* Try to find an available, already-allocated temporary of the proper
801      mode which meets the size and alignment requirements.  Choose the
802      smallest one with the closest alignment.
803 
804      If assign_stack_temp is called outside of the tree->rtl expansion,
805      we cannot reuse the stack slots (that may still refer to
806      VIRTUAL_STACK_VARS_REGNUM).  */
807   if (!virtuals_instantiated)
808     {
809       for (p = avail_temp_slots; p; p = p->next)
810 	{
811 	  if (p->align >= align
812 	      && known_ge (p->size, size)
813 	      && GET_MODE (p->slot) == mode
814 	      && objects_must_conflict_p (p->type, type)
815 	      && (best_p == 0
816 		  || (known_eq (best_p->size, p->size)
817 		      ? best_p->align > p->align
818 		      : known_ge (best_p->size, p->size))))
819 	    {
820 	      if (p->align == align && known_eq (p->size, size))
821 		{
822 		  selected = p;
823 		  cut_slot_from_list (selected, &avail_temp_slots);
824 		  best_p = 0;
825 		  break;
826 		}
827 	      best_p = p;
828 	    }
829 	}
830     }
831 
832   /* Make our best, if any, the one to use.  */
833   if (best_p)
834     {
835       selected = best_p;
836       cut_slot_from_list (selected, &avail_temp_slots);
837 
838       /* If there are enough aligned bytes left over, make them into a new
839 	 temp_slot so that the extra bytes don't get wasted.  Do this only
840 	 for BLKmode slots, so that we can be sure of the alignment.  */
841       if (GET_MODE (best_p->slot) == BLKmode)
842 	{
843 	  int alignment = best_p->align / BITS_PER_UNIT;
844 	  poly_int64 rounded_size = aligned_upper_bound (size, alignment);
845 
846 	  if (known_ge (best_p->size - rounded_size, alignment))
847 	    {
848 	      p = ggc_alloc<temp_slot> ();
849 	      p->in_use = 0;
850 	      p->size = best_p->size - rounded_size;
851 	      p->base_offset = best_p->base_offset + rounded_size;
852 	      p->full_size = best_p->full_size - rounded_size;
853 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 	      p->align = best_p->align;
855 	      p->type = best_p->type;
856 	      insert_slot_to_list (p, &avail_temp_slots);
857 
858 	      vec_safe_push (stack_slot_list, p->slot);
859 
860 	      best_p->size = rounded_size;
861 	      best_p->full_size = rounded_size;
862 	    }
863 	}
864     }
865 
866   /* If we still didn't find one, make a new temporary.  */
867   if (selected == 0)
868     {
869       poly_int64 frame_offset_old = frame_offset;
870 
871       p = ggc_alloc<temp_slot> ();
872 
873       /* We are passing an explicit alignment request to assign_stack_local.
874 	 One side effect of that is assign_stack_local will not round SIZE
875 	 to ensure the frame offset remains suitably aligned.
876 
877 	 So for requests which depended on the rounding of SIZE, we go ahead
878 	 and round it now.  We also make sure ALIGNMENT is at least
879 	 BIGGEST_ALIGNMENT.  */
880       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
881       p->slot = assign_stack_local_1 (mode,
882 				      (mode == BLKmode
883 				       ? aligned_upper_bound (size,
884 							      (int) align
885 							      / BITS_PER_UNIT)
886 				       : size),
887 				      align, 0);
888 
889       p->align = align;
890 
891       /* The following slot size computation is necessary because we don't
892 	 know the actual size of the temporary slot until assign_stack_local
893 	 has performed all the frame alignment and size rounding for the
894 	 requested temporary.  Note that extra space added for alignment
895 	 can be either above or below this stack slot depending on which
896 	 way the frame grows.  We include the extra space if and only if it
897 	 is above this slot.  */
898       if (FRAME_GROWS_DOWNWARD)
899 	p->size = frame_offset_old - frame_offset;
900       else
901 	p->size = size;
902 
903       /* Now define the fields used by combine_temp_slots.  */
904       if (FRAME_GROWS_DOWNWARD)
905 	{
906 	  p->base_offset = frame_offset;
907 	  p->full_size = frame_offset_old - frame_offset;
908 	}
909       else
910 	{
911 	  p->base_offset = frame_offset_old;
912 	  p->full_size = frame_offset - frame_offset_old;
913 	}
914 
915       selected = p;
916     }
917 
918   p = selected;
919   p->in_use = 1;
920   p->type = type;
921   p->level = temp_slot_level;
922   n_temp_slots_in_use++;
923 
924   pp = temp_slots_at_level (p->level);
925   insert_slot_to_list (p, pp);
926   insert_temp_slot_address (XEXP (p->slot, 0), p);
927 
928   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
929   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
930   vec_safe_push (stack_slot_list, slot);
931 
932   /* If we know the alias set for the memory that will be used, use
933      it.  If there's no TYPE, then we don't know anything about the
934      alias set for the memory.  */
935   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
936   set_mem_align (slot, align);
937 
938   /* If a type is specified, set the relevant flags.  */
939   if (type != 0)
940     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
941   MEM_NOTRAP_P (slot) = 1;
942 
943   return slot;
944 }
945 
946 /* Allocate a temporary stack slot and record it for possible later
947    reuse.  First two arguments are same as in preceding function.  */
948 
949 rtx
assign_stack_temp(machine_mode mode,poly_int64 size)950 assign_stack_temp (machine_mode mode, poly_int64 size)
951 {
952   return assign_stack_temp_for_type (mode, size, NULL_TREE);
953 }
954 
955 /* Assign a temporary.
956    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
957    and so that should be used in error messages.  In either case, we
958    allocate of the given type.
959    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
960    it is 0 if a register is OK.
961    DONT_PROMOTE is 1 if we should not promote values in register
962    to wider modes.  */
963 
964 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)965 assign_temp (tree type_or_decl, int memory_required,
966 	     int dont_promote ATTRIBUTE_UNUSED)
967 {
968   tree type, decl;
969   machine_mode mode;
970 #ifdef PROMOTE_MODE
971   int unsignedp;
972 #endif
973 
974   if (DECL_P (type_or_decl))
975     decl = type_or_decl, type = TREE_TYPE (decl);
976   else
977     decl = NULL, type = type_or_decl;
978 
979   mode = TYPE_MODE (type);
980 #ifdef PROMOTE_MODE
981   unsignedp = TYPE_UNSIGNED (type);
982 #endif
983 
984   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
985      end.  See also create_tmp_var for the gimplification-time check.  */
986   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
987 
988   if (mode == BLKmode || memory_required)
989     {
990       poly_int64 size;
991       rtx tmp;
992 
993       /* Unfortunately, we don't yet know how to allocate variable-sized
994 	 temporaries.  However, sometimes we can find a fixed upper limit on
995 	 the size, so try that instead.  */
996       if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
997 	size = max_int_size_in_bytes (type);
998 
999       /* Zero sized arrays are a GNU C extension.  Set size to 1 to avoid
1000 	 problems with allocating the stack space.  */
1001       if (known_eq (size, 0))
1002 	size = 1;
1003 
1004       /* The size of the temporary may be too large to fit into an integer.  */
1005       /* ??? Not sure this should happen except for user silliness, so limit
1006 	 this to things that aren't compiler-generated temporaries.  The
1007 	 rest of the time we'll die in assign_stack_temp_for_type.  */
1008       if (decl
1009 	  && !known_size_p (size)
1010 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1011 	{
1012 	  error ("size of variable %q+D is too large", decl);
1013 	  size = 1;
1014 	}
1015 
1016       tmp = assign_stack_temp_for_type (mode, size, type);
1017       return tmp;
1018     }
1019 
1020 #ifdef PROMOTE_MODE
1021   if (! dont_promote)
1022     mode = promote_mode (type, mode, &unsignedp);
1023 #endif
1024 
1025   return gen_reg_rtx (mode);
1026 }
1027 
1028 /* Combine temporary stack slots which are adjacent on the stack.
1029 
1030    This allows for better use of already allocated stack space.  This is only
1031    done for BLKmode slots because we can be sure that we won't have alignment
1032    problems in this case.  */
1033 
1034 static void
combine_temp_slots(void)1035 combine_temp_slots (void)
1036 {
1037   class temp_slot *p, *q, *next, *next_q;
1038   int num_slots;
1039 
1040   /* We can't combine slots, because the information about which slot
1041      is in which alias set will be lost.  */
1042   if (flag_strict_aliasing)
1043     return;
1044 
1045   /* If there are a lot of temp slots, don't do anything unless
1046      high levels of optimization.  */
1047   if (! flag_expensive_optimizations)
1048     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1049       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1050 	return;
1051 
1052   for (p = avail_temp_slots; p; p = next)
1053     {
1054       int delete_p = 0;
1055 
1056       next = p->next;
1057 
1058       if (GET_MODE (p->slot) != BLKmode)
1059 	continue;
1060 
1061       for (q = p->next; q; q = next_q)
1062 	{
1063        	  int delete_q = 0;
1064 
1065 	  next_q = q->next;
1066 
1067 	  if (GET_MODE (q->slot) != BLKmode)
1068 	    continue;
1069 
1070 	  if (known_eq (p->base_offset + p->full_size, q->base_offset))
1071 	    {
1072 	      /* Q comes after P; combine Q into P.  */
1073 	      p->size += q->size;
1074 	      p->full_size += q->full_size;
1075 	      delete_q = 1;
1076 	    }
1077 	  else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1078 	    {
1079 	      /* P comes after Q; combine P into Q.  */
1080 	      q->size += p->size;
1081 	      q->full_size += p->full_size;
1082 	      delete_p = 1;
1083 	      break;
1084 	    }
1085 	  if (delete_q)
1086 	    cut_slot_from_list (q, &avail_temp_slots);
1087 	}
1088 
1089       /* Either delete P or advance past it.  */
1090       if (delete_p)
1091 	cut_slot_from_list (p, &avail_temp_slots);
1092     }
1093 }
1094 
1095 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1096    slot that previously was known by OLD_RTX.  */
1097 
1098 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1099 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1100 {
1101   class temp_slot *p;
1102 
1103   if (rtx_equal_p (old_rtx, new_rtx))
1104     return;
1105 
1106   p = find_temp_slot_from_address (old_rtx);
1107 
1108   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1109      NEW_RTX is a register, see if one operand of the PLUS is a
1110      temporary location.  If so, NEW_RTX points into it.  Otherwise,
1111      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1112      in common between them.  If so, try a recursive call on those
1113      values.  */
1114   if (p == 0)
1115     {
1116       if (GET_CODE (old_rtx) != PLUS)
1117 	return;
1118 
1119       if (REG_P (new_rtx))
1120 	{
1121 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1122 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1123 	  return;
1124 	}
1125       else if (GET_CODE (new_rtx) != PLUS)
1126 	return;
1127 
1128       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1129 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1130       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1131 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1132       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1133 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1134       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1135 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1136 
1137       return;
1138     }
1139 
1140   /* Otherwise add an alias for the temp's address.  */
1141   insert_temp_slot_address (new_rtx, p);
1142 }
1143 
1144 /* If X could be a reference to a temporary slot, mark that slot as
1145    belonging to the to one level higher than the current level.  If X
1146    matched one of our slots, just mark that one.  Otherwise, we can't
1147    easily predict which it is, so upgrade all of them.
1148 
1149    This is called when an ({...}) construct occurs and a statement
1150    returns a value in memory.  */
1151 
1152 void
preserve_temp_slots(rtx x)1153 preserve_temp_slots (rtx x)
1154 {
1155   class temp_slot *p = 0, *next;
1156 
1157   if (x == 0)
1158     return;
1159 
1160   /* If X is a register that is being used as a pointer, see if we have
1161      a temporary slot we know it points to.  */
1162   if (REG_P (x) && REG_POINTER (x))
1163     p = find_temp_slot_from_address (x);
1164 
1165   /* If X is not in memory or is at a constant address, it cannot be in
1166      a temporary slot.  */
1167   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1168     return;
1169 
1170   /* First see if we can find a match.  */
1171   if (p == 0)
1172     p = find_temp_slot_from_address (XEXP (x, 0));
1173 
1174   if (p != 0)
1175     {
1176       if (p->level == temp_slot_level)
1177 	move_slot_to_level (p, temp_slot_level - 1);
1178       return;
1179     }
1180 
1181   /* Otherwise, preserve all non-kept slots at this level.  */
1182   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1183     {
1184       next = p->next;
1185       move_slot_to_level (p, temp_slot_level - 1);
1186     }
1187 }
1188 
1189 /* Free all temporaries used so far.  This is normally called at the
1190    end of generating code for a statement.  */
1191 
1192 void
free_temp_slots(void)1193 free_temp_slots (void)
1194 {
1195   class temp_slot *p, *next;
1196   bool some_available = false;
1197 
1198   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1199     {
1200       next = p->next;
1201       make_slot_available (p);
1202       some_available = true;
1203     }
1204 
1205   if (some_available)
1206     {
1207       remove_unused_temp_slot_addresses ();
1208       combine_temp_slots ();
1209     }
1210 }
1211 
1212 /* Push deeper into the nesting level for stack temporaries.  */
1213 
1214 void
push_temp_slots(void)1215 push_temp_slots (void)
1216 {
1217   temp_slot_level++;
1218 }
1219 
1220 /* Pop a temporary nesting level.  All slots in use in the current level
1221    are freed.  */
1222 
1223 void
pop_temp_slots(void)1224 pop_temp_slots (void)
1225 {
1226   free_temp_slots ();
1227   temp_slot_level--;
1228 }
1229 
1230 /* Initialize temporary slots.  */
1231 
1232 void
init_temp_slots(void)1233 init_temp_slots (void)
1234 {
1235   /* We have not allocated any temporaries yet.  */
1236   avail_temp_slots = 0;
1237   vec_alloc (used_temp_slots, 0);
1238   temp_slot_level = 0;
1239   n_temp_slots_in_use = 0;
1240 
1241   /* Set up the table to map addresses to temp slots.  */
1242   if (! temp_slot_address_table)
1243     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1244   else
1245     temp_slot_address_table->empty ();
1246 }
1247 
1248 /* Functions and data structures to keep track of the values hard regs
1249    had at the start of the function.  */
1250 
1251 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1252    and has_hard_reg_initial_val..  */
1253 struct GTY(()) initial_value_pair {
1254   rtx hard_reg;
1255   rtx pseudo;
1256 };
1257 /* ???  This could be a VEC but there is currently no way to define an
1258    opaque VEC type.  This could be worked around by defining struct
1259    initial_value_pair in function.h.  */
1260 struct GTY(()) initial_value_struct {
1261   int num_entries;
1262   int max_entries;
1263   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1264 };
1265 
1266 /* If a pseudo represents an initial hard reg (or expression), return
1267    it, else return NULL_RTX.  */
1268 
1269 rtx
get_hard_reg_initial_reg(rtx reg)1270 get_hard_reg_initial_reg (rtx reg)
1271 {
1272   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1273   int i;
1274 
1275   if (ivs == 0)
1276     return NULL_RTX;
1277 
1278   for (i = 0; i < ivs->num_entries; i++)
1279     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1280       return ivs->entries[i].hard_reg;
1281 
1282   return NULL_RTX;
1283 }
1284 
1285 /* Make sure that there's a pseudo register of mode MODE that stores the
1286    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1287 
1288 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1289 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1290 {
1291   struct initial_value_struct *ivs;
1292   rtx rv;
1293 
1294   rv = has_hard_reg_initial_val (mode, regno);
1295   if (rv)
1296     return rv;
1297 
1298   ivs = crtl->hard_reg_initial_vals;
1299   if (ivs == 0)
1300     {
1301       ivs = ggc_alloc<initial_value_struct> ();
1302       ivs->num_entries = 0;
1303       ivs->max_entries = 5;
1304       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1305       crtl->hard_reg_initial_vals = ivs;
1306     }
1307 
1308   if (ivs->num_entries >= ivs->max_entries)
1309     {
1310       ivs->max_entries += 5;
1311       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1312 				    ivs->max_entries);
1313     }
1314 
1315   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1316   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1317 
1318   return ivs->entries[ivs->num_entries++].pseudo;
1319 }
1320 
1321 /* See if get_hard_reg_initial_val has been used to create a pseudo
1322    for the initial value of hard register REGNO in mode MODE.  Return
1323    the associated pseudo if so, otherwise return NULL.  */
1324 
1325 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1326 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1327 {
1328   struct initial_value_struct *ivs;
1329   int i;
1330 
1331   ivs = crtl->hard_reg_initial_vals;
1332   if (ivs != 0)
1333     for (i = 0; i < ivs->num_entries; i++)
1334       if (GET_MODE (ivs->entries[i].hard_reg) == mode
1335 	  && REGNO (ivs->entries[i].hard_reg) == regno)
1336 	return ivs->entries[i].pseudo;
1337 
1338   return NULL_RTX;
1339 }
1340 
1341 unsigned int
emit_initial_value_sets(void)1342 emit_initial_value_sets (void)
1343 {
1344   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1345   int i;
1346   rtx_insn *seq;
1347 
1348   if (ivs == 0)
1349     return 0;
1350 
1351   start_sequence ();
1352   for (i = 0; i < ivs->num_entries; i++)
1353     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1354   seq = get_insns ();
1355   end_sequence ();
1356 
1357   emit_insn_at_entry (seq);
1358   return 0;
1359 }
1360 
1361 /* Return the hardreg-pseudoreg initial values pair entry I and
1362    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1363 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1364 initial_value_entry (int i, rtx *hreg, rtx *preg)
1365 {
1366   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1367   if (!ivs || i >= ivs->num_entries)
1368     return false;
1369 
1370   *hreg = ivs->entries[i].hard_reg;
1371   *preg = ivs->entries[i].pseudo;
1372   return true;
1373 }
1374 
1375 /* These routines are responsible for converting virtual register references
1376    to the actual hard register references once RTL generation is complete.
1377 
1378    The following four variables are used for communication between the
1379    routines.  They contain the offsets of the virtual registers from their
1380    respective hard registers.  */
1381 
1382 static poly_int64 in_arg_offset;
1383 static poly_int64 var_offset;
1384 static poly_int64 dynamic_offset;
1385 static poly_int64 out_arg_offset;
1386 static poly_int64 cfa_offset;
1387 
1388 /* In most machines, the stack pointer register is equivalent to the bottom
1389    of the stack.  */
1390 
1391 #ifndef STACK_POINTER_OFFSET
1392 #define STACK_POINTER_OFFSET	0
1393 #endif
1394 
1395 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1396 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1397 #endif
1398 
1399 /* If not defined, pick an appropriate default for the offset of dynamically
1400    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1401    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1402 
1403 #ifndef STACK_DYNAMIC_OFFSET
1404 
1405 /* The bottom of the stack points to the actual arguments.  If
1406    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1407    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1408    stack space for register parameters is not pushed by the caller, but
1409    rather part of the fixed stack areas and hence not included in
1410    `crtl->outgoing_args_size'.  Nevertheless, we must allow
1411    for it when allocating stack dynamic objects.  */
1412 
1413 #ifdef INCOMING_REG_PARM_STACK_SPACE
1414 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1415 ((ACCUMULATE_OUTGOING_ARGS						      \
1416   ? (crtl->outgoing_args_size				      \
1417      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1418 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1419   : 0) + (STACK_POINTER_OFFSET))
1420 #else
1421 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1422   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1423  + (STACK_POINTER_OFFSET))
1424 #endif
1425 #endif
1426 
1427 
1428 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1429    is a virtual register, return the equivalent hard register and set the
1430    offset indirectly through the pointer.  Otherwise, return 0.  */
1431 
1432 static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)1433 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1434 {
1435   rtx new_rtx;
1436   poly_int64 offset;
1437 
1438   if (x == virtual_incoming_args_rtx)
1439     {
1440       if (stack_realign_drap)
1441         {
1442 	  /* Replace virtual_incoming_args_rtx with internal arg
1443 	     pointer if DRAP is used to realign stack.  */
1444           new_rtx = crtl->args.internal_arg_pointer;
1445           offset = 0;
1446         }
1447       else
1448         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1449     }
1450   else if (x == virtual_stack_vars_rtx)
1451     new_rtx = frame_pointer_rtx, offset = var_offset;
1452   else if (x == virtual_stack_dynamic_rtx)
1453     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1454   else if (x == virtual_outgoing_args_rtx)
1455     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1456   else if (x == virtual_cfa_rtx)
1457     {
1458 #ifdef FRAME_POINTER_CFA_OFFSET
1459       new_rtx = frame_pointer_rtx;
1460 #else
1461       new_rtx = arg_pointer_rtx;
1462 #endif
1463       offset = cfa_offset;
1464     }
1465   else if (x == virtual_preferred_stack_boundary_rtx)
1466     {
1467       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1468       offset = 0;
1469     }
1470   else
1471     return NULL_RTX;
1472 
1473   *poffset = offset;
1474   return new_rtx;
1475 }
1476 
1477 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1478    registers present inside of *LOC.  The expression is simplified,
1479    as much as possible, but is not to be considered "valid" in any sense
1480    implied by the target.  Return true if any change is made.  */
1481 
1482 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1483 instantiate_virtual_regs_in_rtx (rtx *loc)
1484 {
1485   if (!*loc)
1486     return false;
1487   bool changed = false;
1488   subrtx_ptr_iterator::array_type array;
1489   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1490     {
1491       rtx *loc = *iter;
1492       if (rtx x = *loc)
1493 	{
1494 	  rtx new_rtx;
1495 	  poly_int64 offset;
1496 	  switch (GET_CODE (x))
1497 	    {
1498 	    case REG:
1499 	      new_rtx = instantiate_new_reg (x, &offset);
1500 	      if (new_rtx)
1501 		{
1502 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1503 		  changed = true;
1504 		}
1505 	      iter.skip_subrtxes ();
1506 	      break;
1507 
1508 	    case PLUS:
1509 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1510 	      if (new_rtx)
1511 		{
1512 		  XEXP (x, 0) = new_rtx;
1513 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1514 		  changed = true;
1515 		  iter.skip_subrtxes ();
1516 		  break;
1517 		}
1518 
1519 	      /* FIXME -- from old code */
1520 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1521 		 we can commute the PLUS and SUBREG because pointers into the
1522 		 frame are well-behaved.  */
1523 	      break;
1524 
1525 	    default:
1526 	      break;
1527 	    }
1528 	}
1529     }
1530   return changed;
1531 }
1532 
1533 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1534    matches the predicate for insn CODE operand OPERAND.  */
1535 
1536 static int
safe_insn_predicate(int code,int operand,rtx x)1537 safe_insn_predicate (int code, int operand, rtx x)
1538 {
1539   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1540 }
1541 
1542 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1543    registers present inside of insn.  The result will be a valid insn.  */
1544 
1545 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1546 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1547 {
1548   poly_int64 offset;
1549   int insn_code, i;
1550   bool any_change = false;
1551   rtx set, new_rtx, x;
1552   rtx_insn *seq;
1553 
1554   /* There are some special cases to be handled first.  */
1555   set = single_set (insn);
1556   if (set)
1557     {
1558       /* We're allowed to assign to a virtual register.  This is interpreted
1559 	 to mean that the underlying register gets assigned the inverse
1560 	 transformation.  This is used, for example, in the handling of
1561 	 non-local gotos.  */
1562       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1563       if (new_rtx)
1564 	{
1565 	  start_sequence ();
1566 
1567 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1568 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1569 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1570 	  x = force_operand (x, new_rtx);
1571 	  if (x != new_rtx)
1572 	    emit_move_insn (new_rtx, x);
1573 
1574 	  seq = get_insns ();
1575 	  end_sequence ();
1576 
1577 	  emit_insn_before (seq, insn);
1578 	  delete_insn (insn);
1579 	  return;
1580 	}
1581 
1582       /* Handle a straight copy from a virtual register by generating a
1583 	 new add insn.  The difference between this and falling through
1584 	 to the generic case is avoiding a new pseudo and eliminating a
1585 	 move insn in the initial rtl stream.  */
1586       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1587       if (new_rtx
1588 	  && maybe_ne (offset, 0)
1589 	  && REG_P (SET_DEST (set))
1590 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1591 	{
1592 	  start_sequence ();
1593 
1594 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1595 				   gen_int_mode (offset,
1596 						 GET_MODE (SET_DEST (set))),
1597 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1598 	  if (x != SET_DEST (set))
1599 	    emit_move_insn (SET_DEST (set), x);
1600 
1601 	  seq = get_insns ();
1602 	  end_sequence ();
1603 
1604 	  emit_insn_before (seq, insn);
1605 	  delete_insn (insn);
1606 	  return;
1607 	}
1608 
1609       extract_insn (insn);
1610       insn_code = INSN_CODE (insn);
1611 
1612       /* Handle a plus involving a virtual register by determining if the
1613 	 operands remain valid if they're modified in place.  */
1614       poly_int64 delta;
1615       if (GET_CODE (SET_SRC (set)) == PLUS
1616 	  && recog_data.n_operands >= 3
1617 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1618 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1619 	  && poly_int_rtx_p (recog_data.operand[2], &delta)
1620 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1621 	{
1622 	  offset += delta;
1623 
1624 	  /* If the sum is zero, then replace with a plain move.  */
1625 	  if (known_eq (offset, 0)
1626 	      && REG_P (SET_DEST (set))
1627 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1628 	    {
1629 	      start_sequence ();
1630 	      emit_move_insn (SET_DEST (set), new_rtx);
1631 	      seq = get_insns ();
1632 	      end_sequence ();
1633 
1634 	      emit_insn_before (seq, insn);
1635 	      delete_insn (insn);
1636 	      return;
1637 	    }
1638 
1639 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1640 
1641 	  /* Using validate_change and apply_change_group here leaves
1642 	     recog_data in an invalid state.  Since we know exactly what
1643 	     we want to check, do those two by hand.  */
1644 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1645 	      && safe_insn_predicate (insn_code, 2, x))
1646 	    {
1647 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1648 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1649 	      any_change = true;
1650 
1651 	      /* Fall through into the regular operand fixup loop in
1652 		 order to take care of operands other than 1 and 2.  */
1653 	    }
1654 	}
1655     }
1656   else
1657     {
1658       extract_insn (insn);
1659       insn_code = INSN_CODE (insn);
1660     }
1661 
1662   /* In the general case, we expect virtual registers to appear only in
1663      operands, and then only as either bare registers or inside memories.  */
1664   for (i = 0; i < recog_data.n_operands; ++i)
1665     {
1666       x = recog_data.operand[i];
1667       switch (GET_CODE (x))
1668 	{
1669 	case MEM:
1670 	  {
1671 	    rtx addr = XEXP (x, 0);
1672 
1673 	    if (!instantiate_virtual_regs_in_rtx (&addr))
1674 	      continue;
1675 
1676 	    start_sequence ();
1677 	    x = replace_equiv_address (x, addr, true);
1678 	    /* It may happen that the address with the virtual reg
1679 	       was valid (e.g. based on the virtual stack reg, which might
1680 	       be acceptable to the predicates with all offsets), whereas
1681 	       the address now isn't anymore, for instance when the address
1682 	       is still offsetted, but the base reg isn't virtual-stack-reg
1683 	       anymore.  Below we would do a force_reg on the whole operand,
1684 	       but this insn might actually only accept memory.  Hence,
1685 	       before doing that last resort, try to reload the address into
1686 	       a register, so this operand stays a MEM.  */
1687 	    if (!safe_insn_predicate (insn_code, i, x))
1688 	      {
1689 		addr = force_reg (GET_MODE (addr), addr);
1690 		x = replace_equiv_address (x, addr, true);
1691 	      }
1692 	    seq = get_insns ();
1693 	    end_sequence ();
1694 	    if (seq)
1695 	      emit_insn_before (seq, insn);
1696 	  }
1697 	  break;
1698 
1699 	case REG:
1700 	  new_rtx = instantiate_new_reg (x, &offset);
1701 	  if (new_rtx == NULL)
1702 	    continue;
1703 	  if (known_eq (offset, 0))
1704 	    x = new_rtx;
1705 	  else
1706 	    {
1707 	      start_sequence ();
1708 
1709 	      /* Careful, special mode predicates may have stuff in
1710 		 insn_data[insn_code].operand[i].mode that isn't useful
1711 		 to us for computing a new value.  */
1712 	      /* ??? Recognize address_operand and/or "p" constraints
1713 		 to see if (plus new offset) is a valid before we put
1714 		 this through expand_simple_binop.  */
1715 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1716 				       gen_int_mode (offset, GET_MODE (x)),
1717 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1718 	      seq = get_insns ();
1719 	      end_sequence ();
1720 	      emit_insn_before (seq, insn);
1721 	    }
1722 	  break;
1723 
1724 	case SUBREG:
1725 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1726 	  if (new_rtx == NULL)
1727 	    continue;
1728 	  if (maybe_ne (offset, 0))
1729 	    {
1730 	      start_sequence ();
1731 	      new_rtx = expand_simple_binop
1732 		(GET_MODE (new_rtx), PLUS, new_rtx,
1733 		 gen_int_mode (offset, GET_MODE (new_rtx)),
1734 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1735 	      seq = get_insns ();
1736 	      end_sequence ();
1737 	      emit_insn_before (seq, insn);
1738 	    }
1739 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1740 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1741 	  gcc_assert (x);
1742 	  break;
1743 
1744 	default:
1745 	  continue;
1746 	}
1747 
1748       /* At this point, X contains the new value for the operand.
1749 	 Validate the new value vs the insn predicate.  Note that
1750 	 asm insns will have insn_code -1 here.  */
1751       if (!safe_insn_predicate (insn_code, i, x))
1752 	{
1753 	  start_sequence ();
1754 	  if (REG_P (x))
1755 	    {
1756 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1757 	      x = copy_to_reg (x);
1758 	    }
1759 	  else
1760 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1761 	  seq = get_insns ();
1762 	  end_sequence ();
1763 	  if (seq)
1764 	    emit_insn_before (seq, insn);
1765 	}
1766 
1767       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1768       any_change = true;
1769     }
1770 
1771   if (any_change)
1772     {
1773       /* Propagate operand changes into the duplicates.  */
1774       for (i = 0; i < recog_data.n_dups; ++i)
1775 	*recog_data.dup_loc[i]
1776 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1777 
1778       /* Force re-recognition of the instruction for validation.  */
1779       INSN_CODE (insn) = -1;
1780     }
1781 
1782   if (asm_noperands (PATTERN (insn)) >= 0)
1783     {
1784       if (!check_asm_operands (PATTERN (insn)))
1785 	{
1786 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1787 	  /* For asm goto, instead of fixing up all the edges
1788 	     just clear the template and clear input and output operands
1789 	     and strip away clobbers.  */
1790 	  if (JUMP_P (insn))
1791 	    {
1792 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1793 	      PATTERN (insn) = asm_op;
1794 	      PUT_MODE (asm_op, VOIDmode);
1795 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1796 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
1797 	      ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
1798 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1799 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1800 	    }
1801 	  else
1802 	    delete_insn (insn);
1803 	}
1804     }
1805   else
1806     {
1807       if (recog_memoized (insn) < 0)
1808 	fatal_insn_not_found (insn);
1809     }
1810 }
1811 
1812 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1813    do any instantiation required.  */
1814 
1815 void
instantiate_decl_rtl(rtx x)1816 instantiate_decl_rtl (rtx x)
1817 {
1818   rtx addr;
1819 
1820   if (x == 0)
1821     return;
1822 
1823   /* If this is a CONCAT, recurse for the pieces.  */
1824   if (GET_CODE (x) == CONCAT)
1825     {
1826       instantiate_decl_rtl (XEXP (x, 0));
1827       instantiate_decl_rtl (XEXP (x, 1));
1828       return;
1829     }
1830 
1831   /* If this is not a MEM, no need to do anything.  Similarly if the
1832      address is a constant or a register that is not a virtual register.  */
1833   if (!MEM_P (x))
1834     return;
1835 
1836   addr = XEXP (x, 0);
1837   if (CONSTANT_P (addr)
1838       || (REG_P (addr)
1839 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1840 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1841     return;
1842 
1843   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1844 }
1845 
1846 /* Helper for instantiate_decls called via walk_tree: Process all decls
1847    in the given DECL_VALUE_EXPR.  */
1848 
1849 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1850 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1851 {
1852   tree t = *tp;
1853   if (! EXPR_P (t))
1854     {
1855       *walk_subtrees = 0;
1856       if (DECL_P (t))
1857 	{
1858 	  if (DECL_RTL_SET_P (t))
1859 	    instantiate_decl_rtl (DECL_RTL (t));
1860 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1861 	      && DECL_INCOMING_RTL (t))
1862 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1863 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1864 	      && DECL_HAS_VALUE_EXPR_P (t))
1865 	    {
1866 	      tree v = DECL_VALUE_EXPR (t);
1867 	      walk_tree (&v, instantiate_expr, NULL, NULL);
1868 	    }
1869 	}
1870     }
1871   return NULL;
1872 }
1873 
1874 /* Subroutine of instantiate_decls: Process all decls in the given
1875    BLOCK node and all its subblocks.  */
1876 
1877 static void
instantiate_decls_1(tree let)1878 instantiate_decls_1 (tree let)
1879 {
1880   tree t;
1881 
1882   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1883     {
1884       if (DECL_RTL_SET_P (t))
1885 	instantiate_decl_rtl (DECL_RTL (t));
1886       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1887 	{
1888 	  tree v = DECL_VALUE_EXPR (t);
1889 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1890 	}
1891     }
1892 
1893   /* Process all subblocks.  */
1894   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1895     instantiate_decls_1 (t);
1896 }
1897 
1898 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1899    all virtual registers in their DECL_RTL's.  */
1900 
1901 static void
instantiate_decls(tree fndecl)1902 instantiate_decls (tree fndecl)
1903 {
1904   tree decl;
1905   unsigned ix;
1906 
1907   /* Process all parameters of the function.  */
1908   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1909     {
1910       instantiate_decl_rtl (DECL_RTL (decl));
1911       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1912       if (DECL_HAS_VALUE_EXPR_P (decl))
1913 	{
1914 	  tree v = DECL_VALUE_EXPR (decl);
1915 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1916 	}
1917     }
1918 
1919   if ((decl = DECL_RESULT (fndecl))
1920       && TREE_CODE (decl) == RESULT_DECL)
1921     {
1922       if (DECL_RTL_SET_P (decl))
1923 	instantiate_decl_rtl (DECL_RTL (decl));
1924       if (DECL_HAS_VALUE_EXPR_P (decl))
1925 	{
1926 	  tree v = DECL_VALUE_EXPR (decl);
1927 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1928 	}
1929     }
1930 
1931   /* Process the saved static chain if it exists.  */
1932   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1933   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1934     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1935 
1936   /* Now process all variables defined in the function or its subblocks.  */
1937   if (DECL_INITIAL (fndecl))
1938     instantiate_decls_1 (DECL_INITIAL (fndecl));
1939 
1940   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1941     if (DECL_RTL_SET_P (decl))
1942       instantiate_decl_rtl (DECL_RTL (decl));
1943   vec_free (cfun->local_decls);
1944 }
1945 
1946 /* Pass through the INSNS of function FNDECL and convert virtual register
1947    references to hard register references.  */
1948 
1949 static unsigned int
instantiate_virtual_regs(void)1950 instantiate_virtual_regs (void)
1951 {
1952   rtx_insn *insn;
1953 
1954   /* Compute the offsets to use for this function.  */
1955   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1956   var_offset = targetm.starting_frame_offset ();
1957   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1958   out_arg_offset = STACK_POINTER_OFFSET;
1959 #ifdef FRAME_POINTER_CFA_OFFSET
1960   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1961 #else
1962   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1963 #endif
1964 
1965   /* Initialize recognition, indicating that volatile is OK.  */
1966   init_recog ();
1967 
1968   /* Scan through all the insns, instantiating every virtual register still
1969      present.  */
1970   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1971     if (INSN_P (insn))
1972       {
1973 	/* These patterns in the instruction stream can never be recognized.
1974 	   Fortunately, they shouldn't contain virtual registers either.  */
1975         if (GET_CODE (PATTERN (insn)) == USE
1976 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1977 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT
1978 	    || DEBUG_MARKER_INSN_P (insn))
1979 	  continue;
1980 	else if (DEBUG_BIND_INSN_P (insn))
1981 	  instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1982 	else
1983 	  instantiate_virtual_regs_in_insn (insn);
1984 
1985 	if (insn->deleted ())
1986 	  continue;
1987 
1988 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1989 
1990 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1991 	if (CALL_P (insn))
1992 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1993       }
1994 
1995   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1996   instantiate_decls (current_function_decl);
1997 
1998   targetm.instantiate_decls ();
1999 
2000   /* Indicate that, from now on, assign_stack_local should use
2001      frame_pointer_rtx.  */
2002   virtuals_instantiated = 1;
2003 
2004   return 0;
2005 }
2006 
2007 namespace {
2008 
2009 const pass_data pass_data_instantiate_virtual_regs =
2010 {
2011   RTL_PASS, /* type */
2012   "vregs", /* name */
2013   OPTGROUP_NONE, /* optinfo_flags */
2014   TV_NONE, /* tv_id */
2015   0, /* properties_required */
2016   0, /* properties_provided */
2017   0, /* properties_destroyed */
2018   0, /* todo_flags_start */
2019   0, /* todo_flags_finish */
2020 };
2021 
2022 class pass_instantiate_virtual_regs : public rtl_opt_pass
2023 {
2024 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)2025   pass_instantiate_virtual_regs (gcc::context *ctxt)
2026     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2027   {}
2028 
2029   /* opt_pass methods: */
execute(function *)2030   virtual unsigned int execute (function *)
2031     {
2032       return instantiate_virtual_regs ();
2033     }
2034 
2035 }; // class pass_instantiate_virtual_regs
2036 
2037 } // anon namespace
2038 
2039 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2040 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2041 {
2042   return new pass_instantiate_virtual_regs (ctxt);
2043 }
2044 
2045 
2046 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2047    This means a type for which function calls must pass an address to the
2048    function or get an address back from the function.
2049    EXP may be a type node or an expression (whose type is tested).  */
2050 
2051 int
aggregate_value_p(const_tree exp,const_tree fntype)2052 aggregate_value_p (const_tree exp, const_tree fntype)
2053 {
2054   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2055   int i, regno, nregs;
2056   rtx reg;
2057 
2058   if (fntype)
2059     switch (TREE_CODE (fntype))
2060       {
2061       case CALL_EXPR:
2062 	{
2063 	  tree fndecl = get_callee_fndecl (fntype);
2064 	  if (fndecl)
2065 	    fntype = TREE_TYPE (fndecl);
2066 	  else if (CALL_EXPR_FN (fntype))
2067 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2068 	  else
2069 	    /* For internal functions, assume nothing needs to be
2070 	       returned in memory.  */
2071 	    return 0;
2072 	}
2073 	break;
2074       case FUNCTION_DECL:
2075 	fntype = TREE_TYPE (fntype);
2076 	break;
2077       case FUNCTION_TYPE:
2078       case METHOD_TYPE:
2079         break;
2080       case IDENTIFIER_NODE:
2081 	fntype = NULL_TREE;
2082 	break;
2083       default:
2084 	/* We don't expect other tree types here.  */
2085 	gcc_unreachable ();
2086       }
2087 
2088   if (VOID_TYPE_P (type))
2089     return 0;
2090 
2091   /* If a record should be passed the same as its first (and only) member
2092      don't pass it as an aggregate.  */
2093   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2094     return aggregate_value_p (first_field (type), fntype);
2095 
2096   /* If the front end has decided that this needs to be passed by
2097      reference, do so.  */
2098   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2099       && DECL_BY_REFERENCE (exp))
2100     return 1;
2101 
2102   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2103   if (fntype && TREE_ADDRESSABLE (fntype))
2104     return 1;
2105 
2106   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2107      and thus can't be returned in registers.  */
2108   if (TREE_ADDRESSABLE (type))
2109     return 1;
2110 
2111   if (TYPE_EMPTY_P (type))
2112     return 0;
2113 
2114   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2115     return 1;
2116 
2117   if (targetm.calls.return_in_memory (type, fntype))
2118     return 1;
2119 
2120   /* Make sure we have suitable call-clobbered regs to return
2121      the value in; if not, we must return it in memory.  */
2122   reg = hard_function_value (type, 0, fntype, 0);
2123 
2124   /* If we have something other than a REG (e.g. a PARALLEL), then assume
2125      it is OK.  */
2126   if (!REG_P (reg))
2127     return 0;
2128 
2129   /* Use the default ABI if the type of the function isn't known.
2130      The scheme for handling interoperability between different ABIs
2131      requires us to be able to tell when we're calling a function with
2132      a nondefault ABI.  */
2133   const predefined_function_abi &abi = (fntype
2134 					? fntype_abi (fntype)
2135 					: default_function_abi);
2136   regno = REGNO (reg);
2137   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2138   for (i = 0; i < nregs; i++)
2139     if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2140       return 1;
2141 
2142   return 0;
2143 }
2144 
2145 /* Return true if we should assign DECL a pseudo register; false if it
2146    should live on the local stack.  */
2147 
2148 bool
use_register_for_decl(const_tree decl)2149 use_register_for_decl (const_tree decl)
2150 {
2151   if (TREE_CODE (decl) == SSA_NAME)
2152     {
2153       /* We often try to use the SSA_NAME, instead of its underlying
2154 	 decl, to get type information and guide decisions, to avoid
2155 	 differences of behavior between anonymous and named
2156 	 variables, but in this one case we have to go for the actual
2157 	 variable if there is one.  The main reason is that, at least
2158 	 at -O0, we want to place user variables on the stack, but we
2159 	 don't mind using pseudos for anonymous or ignored temps.
2160 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2161 	 should go in pseudos, whereas their corresponding variables
2162 	 might have to go on the stack.  So, disregarding the decl
2163 	 here would negatively impact debug info at -O0, enable
2164 	 coalescing between SSA_NAMEs that ought to get different
2165 	 stack/pseudo assignments, and get the incoming argument
2166 	 processing thoroughly confused by PARM_DECLs expected to live
2167 	 in stack slots but assigned to pseudos.  */
2168       if (!SSA_NAME_VAR (decl))
2169 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2170 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2171 
2172       decl = SSA_NAME_VAR (decl);
2173     }
2174 
2175   /* Honor volatile.  */
2176   if (TREE_SIDE_EFFECTS (decl))
2177     return false;
2178 
2179   /* Honor addressability.  */
2180   if (TREE_ADDRESSABLE (decl))
2181     return false;
2182 
2183   /* RESULT_DECLs are a bit special in that they're assigned without
2184      regard to use_register_for_decl, but we generally only store in
2185      them.  If we coalesce their SSA NAMEs, we'd better return a
2186      result that matches the assignment in expand_function_start.  */
2187   if (TREE_CODE (decl) == RESULT_DECL)
2188     {
2189       /* If it's not an aggregate, we're going to use a REG or a
2190 	 PARALLEL containing a REG.  */
2191       if (!aggregate_value_p (decl, current_function_decl))
2192 	return true;
2193 
2194       /* If expand_function_start determines the return value, we'll
2195 	 use MEM if it's not by reference.  */
2196       if (cfun->returns_pcc_struct
2197 	  || (targetm.calls.struct_value_rtx
2198 	      (TREE_TYPE (current_function_decl), 1)))
2199 	return DECL_BY_REFERENCE (decl);
2200 
2201       /* Otherwise, we're taking an extra all.function_result_decl
2202 	 argument.  It's set up in assign_parms_augmented_arg_list,
2203 	 under the (negated) conditions above, and then it's used to
2204 	 set up the RESULT_DECL rtl in assign_params, after looping
2205 	 over all parameters.  Now, if the RESULT_DECL is not by
2206 	 reference, we'll use a MEM either way.  */
2207       if (!DECL_BY_REFERENCE (decl))
2208 	return false;
2209 
2210       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2211 	 the function_result_decl's assignment.  Since it's a pointer,
2212 	 we can short-circuit a number of the tests below, and we must
2213 	 duplicate them because we don't have the function_result_decl
2214 	 to test.  */
2215       if (!targetm.calls.allocate_stack_slots_for_args ())
2216 	return true;
2217       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
2218       if (optimize)
2219 	return true;
2220       if (cfun->tail_call_marked)
2221 	return true;
2222       /* We don't set DECL_REGISTER for the function_result_decl.  */
2223       return false;
2224     }
2225 
2226   /* Only register-like things go in registers.  */
2227   if (DECL_MODE (decl) == BLKmode)
2228     return false;
2229 
2230   /* If -ffloat-store specified, don't put explicit float variables
2231      into registers.  */
2232   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2233      propagates values across these stores, and it probably shouldn't.  */
2234   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2235     return false;
2236 
2237   if (!targetm.calls.allocate_stack_slots_for_args ())
2238     return true;
2239 
2240   /* If we're not interested in tracking debugging information for
2241      this decl, then we can certainly put it in a register.  */
2242   if (DECL_IGNORED_P (decl))
2243     return true;
2244 
2245   if (optimize)
2246     return true;
2247 
2248   /* Thunks force a tail call even at -O0 so we need to avoid creating a
2249      dangling reference in case the parameter is passed by reference.  */
2250   if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
2251     return true;
2252 
2253   if (!DECL_REGISTER (decl))
2254     return false;
2255 
2256   /* When not optimizing, disregard register keyword for types that
2257      could have methods, otherwise the methods won't be callable from
2258      the debugger.  */
2259   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2260     return false;
2261 
2262   return true;
2263 }
2264 
2265 /* Structures to communicate between the subroutines of assign_parms.
2266    The first holds data persistent across all parameters, the second
2267    is cleared out for each parameter.  */
2268 
2269 struct assign_parm_data_all
2270 {
2271   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2272      should become a job of the target or otherwise encapsulated.  */
2273   CUMULATIVE_ARGS args_so_far_v;
2274   cumulative_args_t args_so_far;
2275   struct args_size stack_args_size;
2276   tree function_result_decl;
2277   tree orig_fnargs;
2278   rtx_insn *first_conversion_insn;
2279   rtx_insn *last_conversion_insn;
2280   HOST_WIDE_INT pretend_args_size;
2281   HOST_WIDE_INT extra_pretend_bytes;
2282   int reg_parm_stack_space;
2283 };
2284 
2285 struct assign_parm_data_one
2286 {
2287   tree nominal_type;
2288   function_arg_info arg;
2289   rtx entry_parm;
2290   rtx stack_parm;
2291   machine_mode nominal_mode;
2292   machine_mode passed_mode;
2293   struct locate_and_pad_arg_data locate;
2294   int partial;
2295 };
2296 
2297 /* A subroutine of assign_parms.  Initialize ALL.  */
2298 
2299 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2300 assign_parms_initialize_all (struct assign_parm_data_all *all)
2301 {
2302   tree fntype ATTRIBUTE_UNUSED;
2303 
2304   memset (all, 0, sizeof (*all));
2305 
2306   fntype = TREE_TYPE (current_function_decl);
2307 
2308 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2309   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2310 #else
2311   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2312 			current_function_decl, -1);
2313 #endif
2314   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2315 
2316 #ifdef INCOMING_REG_PARM_STACK_SPACE
2317   all->reg_parm_stack_space
2318     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2319 #endif
2320 }
2321 
2322 /* If ARGS contains entries with complex types, split the entry into two
2323    entries of the component type.  Return a new list of substitutions are
2324    needed, else the old list.  */
2325 
2326 static void
split_complex_args(vec<tree> * args)2327 split_complex_args (vec<tree> *args)
2328 {
2329   unsigned i;
2330   tree p;
2331 
2332   FOR_EACH_VEC_ELT (*args, i, p)
2333     {
2334       tree type = TREE_TYPE (p);
2335       if (TREE_CODE (type) == COMPLEX_TYPE
2336 	  && targetm.calls.split_complex_arg (type))
2337 	{
2338 	  tree decl;
2339 	  tree subtype = TREE_TYPE (type);
2340 	  bool addressable = TREE_ADDRESSABLE (p);
2341 
2342 	  /* Rewrite the PARM_DECL's type with its component.  */
2343 	  p = copy_node (p);
2344 	  TREE_TYPE (p) = subtype;
2345 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2346 	  SET_DECL_MODE (p, VOIDmode);
2347 	  DECL_SIZE (p) = NULL;
2348 	  DECL_SIZE_UNIT (p) = NULL;
2349 	  /* If this arg must go in memory, put it in a pseudo here.
2350 	     We can't allow it to go in memory as per normal parms,
2351 	     because the usual place might not have the imag part
2352 	     adjacent to the real part.  */
2353 	  DECL_ARTIFICIAL (p) = addressable;
2354 	  DECL_IGNORED_P (p) = addressable;
2355 	  TREE_ADDRESSABLE (p) = 0;
2356 	  layout_decl (p, 0);
2357 	  (*args)[i] = p;
2358 
2359 	  /* Build a second synthetic decl.  */
2360 	  decl = build_decl (EXPR_LOCATION (p),
2361 			     PARM_DECL, NULL_TREE, subtype);
2362 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2363 	  DECL_ARTIFICIAL (decl) = addressable;
2364 	  DECL_IGNORED_P (decl) = addressable;
2365 	  layout_decl (decl, 0);
2366 	  args->safe_insert (++i, decl);
2367 	}
2368     }
2369 }
2370 
2371 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2372    the hidden struct return argument, and (abi willing) complex args.
2373    Return the new parameter list.  */
2374 
2375 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2376 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2377 {
2378   tree fndecl = current_function_decl;
2379   tree fntype = TREE_TYPE (fndecl);
2380   vec<tree> fnargs = vNULL;
2381   tree arg;
2382 
2383   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2384     fnargs.safe_push (arg);
2385 
2386   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2387 
2388   /* If struct value address is treated as the first argument, make it so.  */
2389   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2390       && ! cfun->returns_pcc_struct
2391       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2392     {
2393       tree type = build_pointer_type (TREE_TYPE (fntype));
2394       tree decl;
2395 
2396       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2397 			 PARM_DECL, get_identifier (".result_ptr"), type);
2398       DECL_ARG_TYPE (decl) = type;
2399       DECL_ARTIFICIAL (decl) = 1;
2400       DECL_NAMELESS (decl) = 1;
2401       TREE_CONSTANT (decl) = 1;
2402       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
2403 	 changes, the end of the RESULT_DECL handling block in
2404 	 use_register_for_decl must be adjusted to match.  */
2405 
2406       DECL_CHAIN (decl) = all->orig_fnargs;
2407       all->orig_fnargs = decl;
2408       fnargs.safe_insert (0, decl);
2409 
2410       all->function_result_decl = decl;
2411     }
2412 
2413   /* If the target wants to split complex arguments into scalars, do so.  */
2414   if (targetm.calls.split_complex_arg)
2415     split_complex_args (&fnargs);
2416 
2417   return fnargs;
2418 }
2419 
2420 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2421    data for the parameter.  Incorporate ABI specifics such as pass-by-
2422    reference and type promotion.  */
2423 
2424 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2425 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2426 			     struct assign_parm_data_one *data)
2427 {
2428   int unsignedp;
2429 
2430 #ifndef BROKEN_VALUE_INITIALIZATION
2431   *data = assign_parm_data_one ();
2432 #else
2433   /* Old versions of GCC used to miscompile the above by only initializing
2434      the members with explicit constructors and copying garbage
2435      to the other members.  */
2436   assign_parm_data_one zero_data = {};
2437   *data = zero_data;
2438 #endif
2439 
2440   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2441   if (!cfun->stdarg)
2442     data->arg.named = 1;  /* No variadic parms.  */
2443   else if (DECL_CHAIN (parm))
2444     data->arg.named = 1;  /* Not the last non-variadic parm. */
2445   else if (targetm.calls.strict_argument_naming (all->args_so_far))
2446     data->arg.named = 1;  /* Only variadic ones are unnamed.  */
2447   else
2448     data->arg.named = 0;  /* Treat as variadic.  */
2449 
2450   data->nominal_type = TREE_TYPE (parm);
2451   data->arg.type = DECL_ARG_TYPE (parm);
2452 
2453   /* Look out for errors propagating this far.  Also, if the parameter's
2454      type is void then its value doesn't matter.  */
2455   if (TREE_TYPE (parm) == error_mark_node
2456       /* This can happen after weird syntax errors
2457 	 or if an enum type is defined among the parms.  */
2458       || TREE_CODE (parm) != PARM_DECL
2459       || data->arg.type == NULL
2460       || VOID_TYPE_P (data->nominal_type))
2461     {
2462       data->nominal_type = data->arg.type = void_type_node;
2463       data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2464       return;
2465     }
2466 
2467   /* Find mode of arg as it is passed, and mode of arg as it should be
2468      during execution of this function.  */
2469   data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2470   data->nominal_mode = TYPE_MODE (data->nominal_type);
2471 
2472   /* If the parm is to be passed as a transparent union or record, use the
2473      type of the first field for the tests below.  We have already verified
2474      that the modes are the same.  */
2475   if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2476       && TYPE_TRANSPARENT_AGGR (data->arg.type))
2477     data->arg.type = TREE_TYPE (first_field (data->arg.type));
2478 
2479   /* See if this arg was passed by invisible reference.  */
2480   if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2481     {
2482       data->nominal_type = data->arg.type;
2483       data->passed_mode = data->nominal_mode = data->arg.mode;
2484     }
2485 
2486   /* Find mode as it is passed by the ABI.  */
2487   unsignedp = TYPE_UNSIGNED (data->arg.type);
2488   data->arg.mode
2489     = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2490 			     TREE_TYPE (current_function_decl), 0);
2491 }
2492 
2493 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2494 
2495 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2496 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2497 			    struct assign_parm_data_one *data, bool no_rtl)
2498 {
2499   int varargs_pretend_bytes = 0;
2500 
2501   function_arg_info last_named_arg = data->arg;
2502   last_named_arg.named = true;
2503   targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2504 					&varargs_pretend_bytes, no_rtl);
2505 
2506   /* If the back-end has requested extra stack space, record how much is
2507      needed.  Do not change pretend_args_size otherwise since it may be
2508      nonzero from an earlier partial argument.  */
2509   if (varargs_pretend_bytes > 0)
2510     all->pretend_args_size = varargs_pretend_bytes;
2511 }
2512 
2513 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2514    the incoming location of the current parameter.  */
2515 
2516 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2517 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2518 			    struct assign_parm_data_one *data)
2519 {
2520   HOST_WIDE_INT pretend_bytes = 0;
2521   rtx entry_parm;
2522   bool in_regs;
2523 
2524   if (data->arg.mode == VOIDmode)
2525     {
2526       data->entry_parm = data->stack_parm = const0_rtx;
2527       return;
2528     }
2529 
2530   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2531 					    data->arg.type);
2532 
2533   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2534 						    data->arg);
2535   if (entry_parm == 0)
2536     data->arg.mode = data->passed_mode;
2537 
2538   /* Determine parm's home in the stack, in case it arrives in the stack
2539      or we should pretend it did.  Compute the stack position and rtx where
2540      the argument arrives and its size.
2541 
2542      There is one complexity here:  If this was a parameter that would
2543      have been passed in registers, but wasn't only because it is
2544      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2545      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2546      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2547      as it was the previous time.  */
2548   in_regs = (entry_parm != 0);
2549 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2550   in_regs = true;
2551 #endif
2552   if (!in_regs && !data->arg.named)
2553     {
2554       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2555 	{
2556 	  rtx tem;
2557 	  function_arg_info named_arg = data->arg;
2558 	  named_arg.named = true;
2559 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2560 						     named_arg);
2561 	  in_regs = tem != NULL;
2562 	}
2563     }
2564 
2565   /* If this parameter was passed both in registers and in the stack, use
2566      the copy on the stack.  */
2567   if (targetm.calls.must_pass_in_stack (data->arg))
2568     entry_parm = 0;
2569 
2570   if (entry_parm)
2571     {
2572       int partial;
2573 
2574       partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2575       data->partial = partial;
2576 
2577       /* The caller might already have allocated stack space for the
2578 	 register parameters.  */
2579       if (partial != 0 && all->reg_parm_stack_space == 0)
2580 	{
2581 	  /* Part of this argument is passed in registers and part
2582 	     is passed on the stack.  Ask the prologue code to extend
2583 	     the stack part so that we can recreate the full value.
2584 
2585 	     PRETEND_BYTES is the size of the registers we need to store.
2586 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2587 	     stack space that the prologue should allocate.
2588 
2589 	     Internally, gcc assumes that the argument pointer is aligned
2590 	     to STACK_BOUNDARY bits.  This is used both for alignment
2591 	     optimizations (see init_emit) and to locate arguments that are
2592 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2593 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2594 	     a stack boundary.  */
2595 
2596 	  /* We assume at most one partial arg, and it must be the first
2597 	     argument on the stack.  */
2598 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2599 
2600 	  pretend_bytes = partial;
2601 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2602 
2603 	  /* We want to align relative to the actual stack pointer, so
2604 	     don't include this in the stack size until later.  */
2605 	  all->extra_pretend_bytes = all->pretend_args_size;
2606 	}
2607     }
2608 
2609   locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2610 		       all->reg_parm_stack_space,
2611 		       entry_parm ? data->partial : 0, current_function_decl,
2612 		       &all->stack_args_size, &data->locate);
2613 
2614   /* Update parm_stack_boundary if this parameter is passed in the
2615      stack.  */
2616   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2617     crtl->parm_stack_boundary = data->locate.boundary;
2618 
2619   /* Adjust offsets to include the pretend args.  */
2620   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2621   data->locate.slot_offset.constant += pretend_bytes;
2622   data->locate.offset.constant += pretend_bytes;
2623 
2624   data->entry_parm = entry_parm;
2625 }
2626 
2627 /* A subroutine of assign_parms.  If there is actually space on the stack
2628    for this parm, count it in stack_args_size and return true.  */
2629 
2630 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2631 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2632 			   struct assign_parm_data_one *data)
2633 {
2634   /* Trivially true if we've no incoming register.  */
2635   if (data->entry_parm == NULL)
2636     ;
2637   /* Also true if we're partially in registers and partially not,
2638      since we've arranged to drop the entire argument on the stack.  */
2639   else if (data->partial != 0)
2640     ;
2641   /* Also true if the target says that it's passed in both registers
2642      and on the stack.  */
2643   else if (GET_CODE (data->entry_parm) == PARALLEL
2644 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2645     ;
2646   /* Also true if the target says that there's stack allocated for
2647      all register parameters.  */
2648   else if (all->reg_parm_stack_space > 0)
2649     ;
2650   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2651   else
2652     return false;
2653 
2654   all->stack_args_size.constant += data->locate.size.constant;
2655   if (data->locate.size.var)
2656     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2657 
2658   return true;
2659 }
2660 
2661 /* A subroutine of assign_parms.  Given that this parameter is allocated
2662    stack space by the ABI, find it.  */
2663 
2664 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2665 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2666 {
2667   rtx offset_rtx, stack_parm;
2668   unsigned int align, boundary;
2669 
2670   /* If we're passing this arg using a reg, make its stack home the
2671      aligned stack slot.  */
2672   if (data->entry_parm)
2673     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2674   else
2675     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2676 
2677   stack_parm = crtl->args.internal_arg_pointer;
2678   if (offset_rtx != const0_rtx)
2679     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2680   stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2681 
2682   if (!data->arg.pass_by_reference)
2683     {
2684       set_mem_attributes (stack_parm, parm, 1);
2685       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2686 	 while promoted mode's size is needed.  */
2687       if (data->arg.mode != BLKmode
2688 	  && data->arg.mode != DECL_MODE (parm))
2689 	{
2690 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2691 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2692 	    {
2693 	      poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2694 							 data->arg.mode);
2695 	      if (maybe_ne (offset, 0))
2696 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2697 	    }
2698 	}
2699     }
2700 
2701   boundary = data->locate.boundary;
2702   align = BITS_PER_UNIT;
2703 
2704   /* If we're padding upward, we know that the alignment of the slot
2705      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2706      intentionally forcing upward padding.  Otherwise we have to come
2707      up with a guess at the alignment based on OFFSET_RTX.  */
2708   poly_int64 offset;
2709   if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2710     align = boundary;
2711   else if (data->locate.where_pad == PAD_UPWARD)
2712     {
2713       align = boundary;
2714       /* If the argument offset is actually more aligned than the nominal
2715 	 stack slot boundary, take advantage of that excess alignment.
2716 	 Don't make any assumptions if STACK_POINTER_OFFSET is in use.  */
2717       if (poly_int_rtx_p (offset_rtx, &offset)
2718 	  && known_eq (STACK_POINTER_OFFSET, 0))
2719 	{
2720 	  unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2721 	  if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2722 	    offset_align = STACK_BOUNDARY;
2723 	  align = MAX (align, offset_align);
2724 	}
2725     }
2726   else if (poly_int_rtx_p (offset_rtx, &offset))
2727     {
2728       align = least_bit_hwi (boundary);
2729       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2730       if (offset_align != 0)
2731 	align = MIN (align, offset_align);
2732     }
2733   set_mem_align (stack_parm, align);
2734 
2735   if (data->entry_parm)
2736     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2737 
2738   data->stack_parm = stack_parm;
2739 }
2740 
2741 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2742    always valid and contiguous.  */
2743 
2744 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2745 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2746 {
2747   rtx entry_parm = data->entry_parm;
2748   rtx stack_parm = data->stack_parm;
2749 
2750   /* If this parm was passed part in regs and part in memory, pretend it
2751      arrived entirely in memory by pushing the register-part onto the stack.
2752      In the special case of a DImode or DFmode that is split, we could put
2753      it together in a pseudoreg directly, but for now that's not worth
2754      bothering with.  */
2755   if (data->partial != 0)
2756     {
2757       /* Handle calls that pass values in multiple non-contiguous
2758 	 locations.  The Irix 6 ABI has examples of this.  */
2759       if (GET_CODE (entry_parm) == PARALLEL)
2760 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2761 			  data->arg.type, int_size_in_bytes (data->arg.type));
2762       else
2763 	{
2764 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2765 	  move_block_from_reg (REGNO (entry_parm),
2766 			       validize_mem (copy_rtx (stack_parm)),
2767 			       data->partial / UNITS_PER_WORD);
2768 	}
2769 
2770       entry_parm = stack_parm;
2771     }
2772 
2773   /* If we didn't decide this parm came in a register, by default it came
2774      on the stack.  */
2775   else if (entry_parm == NULL)
2776     entry_parm = stack_parm;
2777 
2778   /* When an argument is passed in multiple locations, we can't make use
2779      of this information, but we can save some copying if the whole argument
2780      is passed in a single register.  */
2781   else if (GET_CODE (entry_parm) == PARALLEL
2782 	   && data->nominal_mode != BLKmode
2783 	   && data->passed_mode != BLKmode)
2784     {
2785       size_t i, len = XVECLEN (entry_parm, 0);
2786 
2787       for (i = 0; i < len; i++)
2788 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2789 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2790 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2791 		== data->passed_mode)
2792 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2793 	  {
2794 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2795 	    break;
2796 	  }
2797     }
2798 
2799   data->entry_parm = entry_parm;
2800 }
2801 
2802 /* A subroutine of assign_parms.  Reconstitute any values which were
2803    passed in multiple registers and would fit in a single register.  */
2804 
2805 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2806 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2807 {
2808   rtx entry_parm = data->entry_parm;
2809 
2810   /* Convert the PARALLEL to a REG of the same mode as the parallel.
2811      This can be done with register operations rather than on the
2812      stack, even if we will store the reconstituted parameter on the
2813      stack later.  */
2814   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2815     {
2816       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2817       emit_group_store (parmreg, entry_parm, data->arg.type,
2818 			GET_MODE_SIZE (GET_MODE (entry_parm)));
2819       entry_parm = parmreg;
2820     }
2821 
2822   data->entry_parm = entry_parm;
2823 }
2824 
2825 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2826    always valid and properly aligned.  */
2827 
2828 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2829 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2830 {
2831   rtx stack_parm = data->stack_parm;
2832 
2833   /* If we can't trust the parm stack slot to be aligned enough for its
2834      ultimate type, don't use that slot after entry.  We'll make another
2835      stack slot, if we need one.  */
2836   if (stack_parm
2837       && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2838 	   && ((optab_handler (movmisalign_optab, data->nominal_mode)
2839 		!= CODE_FOR_nothing)
2840 	       || targetm.slow_unaligned_access (data->nominal_mode,
2841 						 MEM_ALIGN (stack_parm))))
2842 	  || (data->nominal_type
2843 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2844 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2845     stack_parm = NULL;
2846 
2847   /* If parm was passed in memory, and we need to convert it on entry,
2848      don't store it back in that same slot.  */
2849   else if (data->entry_parm == stack_parm
2850 	   && data->nominal_mode != BLKmode
2851 	   && data->nominal_mode != data->passed_mode)
2852     stack_parm = NULL;
2853 
2854   /* If stack protection is in effect for this function, don't leave any
2855      pointers in their passed stack slots.  */
2856   else if (crtl->stack_protect_guard
2857 	   && (flag_stack_protect == SPCT_FLAG_ALL
2858 	       || data->arg.pass_by_reference
2859 	       || POINTER_TYPE_P (data->nominal_type)))
2860     stack_parm = NULL;
2861 
2862   data->stack_parm = stack_parm;
2863 }
2864 
2865 /* A subroutine of assign_parms.  Return true if the current parameter
2866    should be stored as a BLKmode in the current frame.  */
2867 
2868 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2869 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2870 {
2871   if (data->nominal_mode == BLKmode)
2872     return true;
2873   if (GET_MODE (data->entry_parm) == BLKmode)
2874     return true;
2875 
2876 #ifdef BLOCK_REG_PADDING
2877   /* Only assign_parm_setup_block knows how to deal with register arguments
2878      that are padded at the least significant end.  */
2879   if (REG_P (data->entry_parm)
2880       && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2881       && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2882 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2883     return true;
2884 #endif
2885 
2886   return false;
2887 }
2888 
2889 /* A subroutine of assign_parms.  Arrange for the parameter to be
2890    present and valid in DATA->STACK_RTL.  */
2891 
2892 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2893 assign_parm_setup_block (struct assign_parm_data_all *all,
2894 			 tree parm, struct assign_parm_data_one *data)
2895 {
2896   rtx entry_parm = data->entry_parm;
2897   rtx stack_parm = data->stack_parm;
2898   rtx target_reg = NULL_RTX;
2899   bool in_conversion_seq = false;
2900   HOST_WIDE_INT size;
2901   HOST_WIDE_INT size_stored;
2902 
2903   if (GET_CODE (entry_parm) == PARALLEL)
2904     entry_parm = emit_group_move_into_temps (entry_parm);
2905 
2906   /* If we want the parameter in a pseudo, don't use a stack slot.  */
2907   if (is_gimple_reg (parm) && use_register_for_decl (parm))
2908     {
2909       tree def = ssa_default_def (cfun, parm);
2910       gcc_assert (def);
2911       machine_mode mode = promote_ssa_mode (def, NULL);
2912       rtx reg = gen_reg_rtx (mode);
2913       if (GET_CODE (reg) != CONCAT)
2914 	stack_parm = reg;
2915       else
2916 	{
2917 	  target_reg = reg;
2918 	  /* Avoid allocating a stack slot, if there isn't one
2919 	     preallocated by the ABI.  It might seem like we should
2920 	     always prefer a pseudo, but converting between
2921 	     floating-point and integer modes goes through the stack
2922 	     on various machines, so it's better to use the reserved
2923 	     stack slot than to risk wasting it and allocating more
2924 	     for the conversion.  */
2925 	  if (stack_parm == NULL_RTX)
2926 	    {
2927 	      int save = generating_concat_p;
2928 	      generating_concat_p = 0;
2929 	      stack_parm = gen_reg_rtx (mode);
2930 	      generating_concat_p = save;
2931 	    }
2932 	}
2933       data->stack_parm = NULL;
2934     }
2935 
2936   size = int_size_in_bytes (data->arg.type);
2937   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2938   if (stack_parm == 0)
2939     {
2940       HOST_WIDE_INT parm_align
2941 	= (STRICT_ALIGNMENT
2942 	   ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2943 
2944       SET_DECL_ALIGN (parm, parm_align);
2945       if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2946 	{
2947 	  rtx allocsize = gen_int_mode (size_stored, Pmode);
2948 	  get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2949 	  stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2950 					   MAX_SUPPORTED_STACK_ALIGNMENT);
2951 	  rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2952 					    DECL_ALIGN (parm));
2953 	  mark_reg_pointer (addr, DECL_ALIGN (parm));
2954 	  stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2955 	  MEM_NOTRAP_P (stack_parm) = 1;
2956 	}
2957       else
2958 	stack_parm = assign_stack_local (BLKmode, size_stored,
2959 					 DECL_ALIGN (parm));
2960       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2961 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2962       set_mem_attributes (stack_parm, parm, 1);
2963     }
2964 
2965   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2966      calls that pass values in multiple non-contiguous locations.  */
2967   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2968     {
2969       rtx mem;
2970 
2971       /* Note that we will be storing an integral number of words.
2972 	 So we have to be careful to ensure that we allocate an
2973 	 integral number of words.  We do this above when we call
2974 	 assign_stack_local if space was not allocated in the argument
2975 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2976 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2977 	 if it becomes a problem.  Exception is when BLKmode arrives
2978 	 with arguments not conforming to word_mode.  */
2979 
2980       if (data->stack_parm == 0)
2981 	;
2982       else if (GET_CODE (entry_parm) == PARALLEL)
2983 	;
2984       else
2985 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2986 
2987       mem = validize_mem (copy_rtx (stack_parm));
2988 
2989       /* Handle values in multiple non-contiguous locations.  */
2990       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2991 	emit_group_store (mem, entry_parm, data->arg.type, size);
2992       else if (GET_CODE (entry_parm) == PARALLEL)
2993 	{
2994 	  push_to_sequence2 (all->first_conversion_insn,
2995 			     all->last_conversion_insn);
2996 	  emit_group_store (mem, entry_parm, data->arg.type, size);
2997 	  all->first_conversion_insn = get_insns ();
2998 	  all->last_conversion_insn = get_last_insn ();
2999 	  end_sequence ();
3000 	  in_conversion_seq = true;
3001 	}
3002 
3003       else if (size == 0)
3004 	;
3005 
3006       /* If SIZE is that of a mode no bigger than a word, just use
3007 	 that mode's store operation.  */
3008       else if (size <= UNITS_PER_WORD)
3009 	{
3010 	  unsigned int bits = size * BITS_PER_UNIT;
3011 	  machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3012 
3013 	  if (mode != BLKmode
3014 #ifdef BLOCK_REG_PADDING
3015 	      && (size == UNITS_PER_WORD
3016 		  || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3017 		      != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3018 #endif
3019 	      )
3020 	    {
3021 	      rtx reg;
3022 
3023 	      /* We are really truncating a word_mode value containing
3024 		 SIZE bytes into a value of mode MODE.  If such an
3025 		 operation requires no actual instructions, we can refer
3026 		 to the value directly in mode MODE, otherwise we must
3027 		 start with the register in word_mode and explicitly
3028 		 convert it.  */
3029 	      if (mode == word_mode
3030 		  || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
3031 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
3032 	      else
3033 		{
3034 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3035 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3036 		}
3037 	      emit_move_insn (change_address (mem, mode, 0), reg);
3038 	    }
3039 
3040 #ifdef BLOCK_REG_PADDING
3041 	  /* Storing the register in memory as a full word, as
3042 	     move_block_from_reg below would do, and then using the
3043 	     MEM in a smaller mode, has the effect of shifting right
3044 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
3045 	     shifting must be explicit.  */
3046 	  else if (!MEM_P (mem))
3047 	    {
3048 	      rtx x;
3049 
3050 	      /* If the assert below fails, we should have taken the
3051 		 mode != BLKmode path above, unless we have downward
3052 		 padding of smaller-than-word arguments on a machine
3053 		 with little-endian bytes, which would likely require
3054 		 additional changes to work correctly.  */
3055 	      gcc_checking_assert (BYTES_BIG_ENDIAN
3056 				   && (BLOCK_REG_PADDING (mode,
3057 							  data->arg.type, 1)
3058 				       == PAD_UPWARD));
3059 
3060 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3061 
3062 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3063 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3064 				NULL_RTX, 1);
3065 	      x = force_reg (word_mode, x);
3066 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3067 
3068 	      emit_move_insn (mem, x);
3069 	    }
3070 #endif
3071 
3072 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3073 	     machine must be aligned to the left before storing
3074 	     to memory.  Note that the previous test doesn't
3075 	     handle all cases (e.g. SIZE == 3).  */
3076 	  else if (size != UNITS_PER_WORD
3077 #ifdef BLOCK_REG_PADDING
3078 		   && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3079 		       == PAD_DOWNWARD)
3080 #else
3081 		   && BYTES_BIG_ENDIAN
3082 #endif
3083 		   )
3084 	    {
3085 	      rtx tem, x;
3086 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3087 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3088 
3089 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3090 	      tem = change_address (mem, word_mode, 0);
3091 	      emit_move_insn (tem, x);
3092 	    }
3093 	  else
3094 	    move_block_from_reg (REGNO (entry_parm), mem,
3095 				 size_stored / UNITS_PER_WORD);
3096 	}
3097       else if (!MEM_P (mem))
3098 	{
3099 	  gcc_checking_assert (size > UNITS_PER_WORD);
3100 #ifdef BLOCK_REG_PADDING
3101 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3102 						  data->arg.type, 0)
3103 			       == PAD_UPWARD);
3104 #endif
3105 	  emit_move_insn (mem, entry_parm);
3106 	}
3107       else
3108 	move_block_from_reg (REGNO (entry_parm), mem,
3109 			     size_stored / UNITS_PER_WORD);
3110     }
3111   else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3112     {
3113       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3114       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3115 		       BLOCK_OP_NORMAL);
3116       all->first_conversion_insn = get_insns ();
3117       all->last_conversion_insn = get_last_insn ();
3118       end_sequence ();
3119       in_conversion_seq = true;
3120     }
3121 
3122   if (target_reg)
3123     {
3124       if (!in_conversion_seq)
3125 	emit_move_insn (target_reg, stack_parm);
3126       else
3127 	{
3128 	  push_to_sequence2 (all->first_conversion_insn,
3129 			     all->last_conversion_insn);
3130 	  emit_move_insn (target_reg, stack_parm);
3131 	  all->first_conversion_insn = get_insns ();
3132 	  all->last_conversion_insn = get_last_insn ();
3133 	  end_sequence ();
3134 	}
3135       stack_parm = target_reg;
3136     }
3137 
3138   data->stack_parm = stack_parm;
3139   set_parm_rtl (parm, stack_parm);
3140 }
3141 
3142 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3143    parameter.  Get it there.  Perform all ABI specified conversions.  */
3144 
3145 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3146 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3147 		       struct assign_parm_data_one *data)
3148 {
3149   rtx parmreg, validated_mem;
3150   rtx equiv_stack_parm;
3151   machine_mode promoted_nominal_mode;
3152   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3153   bool did_conversion = false;
3154   bool need_conversion, moved;
3155   enum insn_code icode;
3156   rtx rtl;
3157 
3158   /* Store the parm in a pseudoregister during the function, but we may
3159      need to do it in a wider mode.  Using 2 here makes the result
3160      consistent with promote_decl_mode and thus expand_expr_real_1.  */
3161   promoted_nominal_mode
3162     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3163 			     TREE_TYPE (current_function_decl), 2);
3164 
3165   parmreg = gen_reg_rtx (promoted_nominal_mode);
3166   if (!DECL_ARTIFICIAL (parm))
3167     mark_user_reg (parmreg);
3168 
3169   /* If this was an item that we received a pointer to,
3170      set rtl appropriately.  */
3171   if (data->arg.pass_by_reference)
3172     {
3173       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3174       set_mem_attributes (rtl, parm, 1);
3175     }
3176   else
3177     rtl = parmreg;
3178 
3179   assign_parm_remove_parallels (data);
3180 
3181   /* Copy the value into the register, thus bridging between
3182      assign_parm_find_data_types and expand_expr_real_1.  */
3183 
3184   equiv_stack_parm = data->stack_parm;
3185   validated_mem = validize_mem (copy_rtx (data->entry_parm));
3186 
3187   need_conversion = (data->nominal_mode != data->passed_mode
3188 		     || promoted_nominal_mode != data->arg.mode);
3189   moved = false;
3190 
3191   if (need_conversion
3192       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3193       && data->nominal_mode == data->passed_mode
3194       && data->nominal_mode == GET_MODE (data->entry_parm))
3195     {
3196       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3197 	 mode, by the caller.  We now have to convert it to
3198 	 NOMINAL_MODE, if different.  However, PARMREG may be in
3199 	 a different mode than NOMINAL_MODE if it is being stored
3200 	 promoted.
3201 
3202 	 If ENTRY_PARM is a hard register, it might be in a register
3203 	 not valid for operating in its mode (e.g., an odd-numbered
3204 	 register for a DFmode).  In that case, moves are the only
3205 	 thing valid, so we can't do a convert from there.  This
3206 	 occurs when the calling sequence allow such misaligned
3207 	 usages.
3208 
3209 	 In addition, the conversion may involve a call, which could
3210 	 clobber parameters which haven't been copied to pseudo
3211 	 registers yet.
3212 
3213 	 First, we try to emit an insn which performs the necessary
3214 	 conversion.  We verify that this insn does not clobber any
3215 	 hard registers.  */
3216 
3217       rtx op0, op1;
3218 
3219       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3220 			    unsignedp);
3221 
3222       op0 = parmreg;
3223       op1 = validated_mem;
3224       if (icode != CODE_FOR_nothing
3225 	  && insn_operand_matches (icode, 0, op0)
3226 	  && insn_operand_matches (icode, 1, op1))
3227 	{
3228 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3229 	  rtx_insn *insn, *insns;
3230 	  rtx t = op1;
3231 	  HARD_REG_SET hardregs;
3232 
3233 	  start_sequence ();
3234 	  /* If op1 is a hard register that is likely spilled, first
3235 	     force it into a pseudo, otherwise combiner might extend
3236 	     its lifetime too much.  */
3237 	  if (GET_CODE (t) == SUBREG)
3238 	    t = SUBREG_REG (t);
3239 	  if (REG_P (t)
3240 	      && HARD_REGISTER_P (t)
3241 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3242 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3243 	    {
3244 	      t = gen_reg_rtx (GET_MODE (op1));
3245 	      emit_move_insn (t, op1);
3246 	    }
3247 	  else
3248 	    t = op1;
3249 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3250 					   data->passed_mode, unsignedp);
3251 	  emit_insn (pat);
3252 	  insns = get_insns ();
3253 
3254 	  moved = true;
3255 	  CLEAR_HARD_REG_SET (hardregs);
3256 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3257 	    {
3258 	      if (INSN_P (insn))
3259 		note_stores (insn, record_hard_reg_sets, &hardregs);
3260 	      if (!hard_reg_set_empty_p (hardregs))
3261 		moved = false;
3262 	    }
3263 
3264 	  end_sequence ();
3265 
3266 	  if (moved)
3267 	    {
3268 	      emit_insn (insns);
3269 	      if (equiv_stack_parm != NULL_RTX)
3270 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3271 						  equiv_stack_parm);
3272 	    }
3273 	}
3274     }
3275 
3276   if (moved)
3277     /* Nothing to do.  */
3278     ;
3279   else if (need_conversion)
3280     {
3281       /* We did not have an insn to convert directly, or the sequence
3282 	 generated appeared unsafe.  We must first copy the parm to a
3283 	 pseudo reg, and save the conversion until after all
3284 	 parameters have been moved.  */
3285 
3286       int save_tree_used;
3287       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3288 
3289       emit_move_insn (tempreg, validated_mem);
3290 
3291       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3292       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3293 
3294       if (partial_subreg_p (tempreg)
3295 	  && GET_MODE (tempreg) == data->nominal_mode
3296 	  && REG_P (SUBREG_REG (tempreg))
3297 	  && data->nominal_mode == data->passed_mode
3298 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3299 	{
3300 	  /* The argument is already sign/zero extended, so note it
3301 	     into the subreg.  */
3302 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3303 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3304 	}
3305 
3306       /* TREE_USED gets set erroneously during expand_assignment.  */
3307       save_tree_used = TREE_USED (parm);
3308       SET_DECL_RTL (parm, rtl);
3309       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3310       SET_DECL_RTL (parm, NULL_RTX);
3311       TREE_USED (parm) = save_tree_used;
3312       all->first_conversion_insn = get_insns ();
3313       all->last_conversion_insn = get_last_insn ();
3314       end_sequence ();
3315 
3316       did_conversion = true;
3317     }
3318   else if (MEM_P (data->entry_parm)
3319 	   && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3320 	      > MEM_ALIGN (data->entry_parm)
3321 	   && (((icode = optab_handler (movmisalign_optab,
3322 					promoted_nominal_mode))
3323 		!= CODE_FOR_nothing)
3324 	       || targetm.slow_unaligned_access (promoted_nominal_mode,
3325 						 MEM_ALIGN (data->entry_parm))))
3326     {
3327       if (icode != CODE_FOR_nothing)
3328 	emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3329       else
3330 	rtl = parmreg = extract_bit_field (validated_mem,
3331 			GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3332 			unsignedp, parmreg,
3333 			promoted_nominal_mode, VOIDmode, false, NULL);
3334     }
3335   else
3336     emit_move_insn (parmreg, validated_mem);
3337 
3338   /* If we were passed a pointer but the actual value can live in a register,
3339      retrieve it and use it directly.  Note that we cannot use nominal_mode,
3340      because it will have been set to Pmode above, we must use the actual mode
3341      of the parameter instead.  */
3342   if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3343     {
3344       /* Use a stack slot for debugging purposes if possible.  */
3345       if (use_register_for_decl (parm))
3346 	{
3347 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3348 	  mark_user_reg (parmreg);
3349 	}
3350       else
3351 	{
3352 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3353 					    TYPE_MODE (TREE_TYPE (parm)),
3354 					    TYPE_ALIGN (TREE_TYPE (parm)));
3355 	  parmreg
3356 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3357 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3358 				  align);
3359 	  set_mem_attributes (parmreg, parm, 1);
3360 	}
3361 
3362       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3363 	 the debug info in case it is not legitimate.  */
3364       if (GET_MODE (parmreg) != GET_MODE (rtl))
3365 	{
3366 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3367 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3368 
3369 	  push_to_sequence2 (all->first_conversion_insn,
3370 			     all->last_conversion_insn);
3371 	  emit_move_insn (tempreg, rtl);
3372 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3373 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3374 			  tempreg);
3375 	  all->first_conversion_insn = get_insns ();
3376 	  all->last_conversion_insn = get_last_insn ();
3377 	  end_sequence ();
3378 
3379 	  did_conversion = true;
3380 	}
3381       else
3382 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3383 
3384       rtl = parmreg;
3385 
3386       /* STACK_PARM is the pointer, not the parm, and PARMREG is
3387 	 now the parm.  */
3388       data->stack_parm = NULL;
3389     }
3390 
3391   set_parm_rtl (parm, rtl);
3392 
3393   /* Mark the register as eliminable if we did no conversion and it was
3394      copied from memory at a fixed offset, and the arg pointer was not
3395      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3396      offset formed an invalid address, such memory-equivalences as we
3397      make here would screw up life analysis for it.  */
3398   if (data->nominal_mode == data->passed_mode
3399       && !did_conversion
3400       && data->stack_parm != 0
3401       && MEM_P (data->stack_parm)
3402       && data->locate.offset.var == 0
3403       && reg_mentioned_p (virtual_incoming_args_rtx,
3404 			  XEXP (data->stack_parm, 0)))
3405     {
3406       rtx_insn *linsn = get_last_insn ();
3407       rtx_insn *sinsn;
3408       rtx set;
3409 
3410       /* Mark complex types separately.  */
3411       if (GET_CODE (parmreg) == CONCAT)
3412 	{
3413 	  scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3414 	  int regnor = REGNO (XEXP (parmreg, 0));
3415 	  int regnoi = REGNO (XEXP (parmreg, 1));
3416 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3417 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3418 					  GET_MODE_SIZE (submode));
3419 
3420 	  /* Scan backwards for the set of the real and
3421 	     imaginary parts.  */
3422 	  for (sinsn = linsn; sinsn != 0;
3423 	       sinsn = prev_nonnote_insn (sinsn))
3424 	    {
3425 	      set = single_set (sinsn);
3426 	      if (set == 0)
3427 		continue;
3428 
3429 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3430 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3431 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3432 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3433 	    }
3434 	}
3435       else
3436 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3437     }
3438 
3439   /* For pointer data type, suggest pointer register.  */
3440   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3441     mark_reg_pointer (parmreg,
3442 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3443 }
3444 
3445 /* A subroutine of assign_parms.  Allocate stack space to hold the current
3446    parameter.  Get it there.  Perform all ABI specified conversions.  */
3447 
3448 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3449 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3450 		         struct assign_parm_data_one *data)
3451 {
3452   /* Value must be stored in the stack slot STACK_PARM during function
3453      execution.  */
3454   bool to_conversion = false;
3455 
3456   assign_parm_remove_parallels (data);
3457 
3458   if (data->arg.mode != data->nominal_mode)
3459     {
3460       /* Conversion is required.  */
3461       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3462 
3463       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3464 
3465       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3466       to_conversion = true;
3467 
3468       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3469 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3470 
3471       if (data->stack_parm)
3472 	{
3473 	  poly_int64 offset
3474 	    = subreg_lowpart_offset (data->nominal_mode,
3475 				     GET_MODE (data->stack_parm));
3476 	  /* ??? This may need a big-endian conversion on sparc64.  */
3477 	  data->stack_parm
3478 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3479 	  if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3480 	    set_mem_offset (data->stack_parm,
3481 			    MEM_OFFSET (data->stack_parm) + offset);
3482 	}
3483     }
3484 
3485   if (data->entry_parm != data->stack_parm)
3486     {
3487       rtx src, dest;
3488 
3489       if (data->stack_parm == 0)
3490 	{
3491 	  int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3492 					    GET_MODE (data->entry_parm),
3493 					    TYPE_ALIGN (data->arg.type));
3494 	  if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3495 	      && ((optab_handler (movmisalign_optab,
3496 				  GET_MODE (data->entry_parm))
3497 		   != CODE_FOR_nothing)
3498 		  || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3499 						    align)))
3500 	    align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3501 	  data->stack_parm
3502 	    = assign_stack_local (GET_MODE (data->entry_parm),
3503 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3504 				  align);
3505 	  align = MEM_ALIGN (data->stack_parm);
3506 	  set_mem_attributes (data->stack_parm, parm, 1);
3507 	  set_mem_align (data->stack_parm, align);
3508 	}
3509 
3510       dest = validize_mem (copy_rtx (data->stack_parm));
3511       src = validize_mem (copy_rtx (data->entry_parm));
3512 
3513       if (TYPE_EMPTY_P (data->arg.type))
3514 	/* Empty types don't really need to be copied.  */;
3515       else if (MEM_P (src))
3516 	{
3517 	  /* Use a block move to handle potentially misaligned entry_parm.  */
3518 	  if (!to_conversion)
3519 	    push_to_sequence2 (all->first_conversion_insn,
3520 			       all->last_conversion_insn);
3521 	  to_conversion = true;
3522 
3523 	  emit_block_move (dest, src,
3524 			   GEN_INT (int_size_in_bytes (data->arg.type)),
3525 			   BLOCK_OP_NORMAL);
3526 	}
3527       else
3528 	{
3529 	  if (!REG_P (src))
3530 	    src = force_reg (GET_MODE (src), src);
3531 	  emit_move_insn (dest, src);
3532 	}
3533     }
3534 
3535   if (to_conversion)
3536     {
3537       all->first_conversion_insn = get_insns ();
3538       all->last_conversion_insn = get_last_insn ();
3539       end_sequence ();
3540     }
3541 
3542   set_parm_rtl (parm, data->stack_parm);
3543 }
3544 
3545 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3546    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3547 
3548 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3549 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3550 			      vec<tree> fnargs)
3551 {
3552   tree parm;
3553   tree orig_fnargs = all->orig_fnargs;
3554   unsigned i = 0;
3555 
3556   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3557     {
3558       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3559 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3560 	{
3561 	  rtx tmp, real, imag;
3562 	  scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3563 
3564 	  real = DECL_RTL (fnargs[i]);
3565 	  imag = DECL_RTL (fnargs[i + 1]);
3566 	  if (inner != GET_MODE (real))
3567 	    {
3568 	      real = gen_lowpart_SUBREG (inner, real);
3569 	      imag = gen_lowpart_SUBREG (inner, imag);
3570 	    }
3571 
3572 	  if (TREE_ADDRESSABLE (parm))
3573 	    {
3574 	      rtx rmem, imem;
3575 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3576 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3577 						DECL_MODE (parm),
3578 						TYPE_ALIGN (TREE_TYPE (parm)));
3579 
3580 	      /* split_complex_arg put the real and imag parts in
3581 		 pseudos.  Move them to memory.  */
3582 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3583 	      set_mem_attributes (tmp, parm, 1);
3584 	      rmem = adjust_address_nv (tmp, inner, 0);
3585 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3586 	      push_to_sequence2 (all->first_conversion_insn,
3587 				 all->last_conversion_insn);
3588 	      emit_move_insn (rmem, real);
3589 	      emit_move_insn (imem, imag);
3590 	      all->first_conversion_insn = get_insns ();
3591 	      all->last_conversion_insn = get_last_insn ();
3592 	      end_sequence ();
3593 	    }
3594 	  else
3595 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3596 	  set_parm_rtl (parm, tmp);
3597 
3598 	  real = DECL_INCOMING_RTL (fnargs[i]);
3599 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3600 	  if (inner != GET_MODE (real))
3601 	    {
3602 	      real = gen_lowpart_SUBREG (inner, real);
3603 	      imag = gen_lowpart_SUBREG (inner, imag);
3604 	    }
3605 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3606 	  set_decl_incoming_rtl (parm, tmp, false);
3607 	  i++;
3608 	}
3609     }
3610 }
3611 
3612 /* Assign RTL expressions to the function's parameters.  This may involve
3613    copying them into registers and using those registers as the DECL_RTL.  */
3614 
3615 static void
assign_parms(tree fndecl)3616 assign_parms (tree fndecl)
3617 {
3618   struct assign_parm_data_all all;
3619   tree parm;
3620   vec<tree> fnargs;
3621   unsigned i;
3622 
3623   crtl->args.internal_arg_pointer
3624     = targetm.calls.internal_arg_pointer ();
3625 
3626   assign_parms_initialize_all (&all);
3627   fnargs = assign_parms_augmented_arg_list (&all);
3628 
3629   FOR_EACH_VEC_ELT (fnargs, i, parm)
3630     {
3631       struct assign_parm_data_one data;
3632 
3633       /* Extract the type of PARM; adjust it according to ABI.  */
3634       assign_parm_find_data_types (&all, parm, &data);
3635 
3636       /* Early out for errors and void parameters.  */
3637       if (data.passed_mode == VOIDmode)
3638 	{
3639 	  SET_DECL_RTL (parm, const0_rtx);
3640 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3641 	  continue;
3642 	}
3643 
3644       /* Estimate stack alignment from parameter alignment.  */
3645       if (SUPPORTS_STACK_ALIGNMENT)
3646         {
3647           unsigned int align
3648 	    = targetm.calls.function_arg_boundary (data.arg.mode,
3649 						   data.arg.type);
3650 	  align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3651 	  if (TYPE_ALIGN (data.nominal_type) > align)
3652 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3653 				       TYPE_MODE (data.nominal_type),
3654 				       TYPE_ALIGN (data.nominal_type));
3655 	  if (crtl->stack_alignment_estimated < align)
3656 	    {
3657 	      gcc_assert (!crtl->stack_realign_processed);
3658 	      crtl->stack_alignment_estimated = align;
3659 	    }
3660 	}
3661 
3662       /* Find out where the parameter arrives in this function.  */
3663       assign_parm_find_entry_rtl (&all, &data);
3664 
3665       /* Find out where stack space for this parameter might be.  */
3666       if (assign_parm_is_stack_parm (&all, &data))
3667 	{
3668 	  assign_parm_find_stack_rtl (parm, &data);
3669 	  assign_parm_adjust_entry_rtl (&data);
3670 	  /* For arguments that occupy no space in the parameter
3671 	     passing area, have non-zero size and have address taken,
3672 	     force creation of a stack slot so that they have distinct
3673 	     address from other parameters.  */
3674 	  if (TYPE_EMPTY_P (data.arg.type)
3675 	      && TREE_ADDRESSABLE (parm)
3676 	      && data.entry_parm == data.stack_parm
3677 	      && MEM_P (data.entry_parm)
3678 	      && int_size_in_bytes (data.arg.type))
3679 	    data.stack_parm = NULL_RTX;
3680 	}
3681       /* Record permanently how this parm was passed.  */
3682       if (data.arg.pass_by_reference)
3683 	{
3684 	  rtx incoming_rtl
3685 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3686 			   data.entry_parm);
3687 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3688 	}
3689       else
3690 	set_decl_incoming_rtl (parm, data.entry_parm, false);
3691 
3692       assign_parm_adjust_stack_rtl (&data);
3693 
3694       if (assign_parm_setup_block_p (&data))
3695 	assign_parm_setup_block (&all, parm, &data);
3696       else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3697 	assign_parm_setup_reg (&all, parm, &data);
3698       else
3699 	assign_parm_setup_stack (&all, parm, &data);
3700 
3701       if (cfun->stdarg && !DECL_CHAIN (parm))
3702 	assign_parms_setup_varargs (&all, &data, false);
3703 
3704       /* Update info on where next arg arrives in registers.  */
3705       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3706     }
3707 
3708   if (targetm.calls.split_complex_arg)
3709     assign_parms_unsplit_complex (&all, fnargs);
3710 
3711   fnargs.release ();
3712 
3713   /* Output all parameter conversion instructions (possibly including calls)
3714      now that all parameters have been copied out of hard registers.  */
3715   emit_insn (all.first_conversion_insn);
3716 
3717   /* Estimate reload stack alignment from scalar return mode.  */
3718   if (SUPPORTS_STACK_ALIGNMENT)
3719     {
3720       if (DECL_RESULT (fndecl))
3721 	{
3722 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3723 	  machine_mode mode = TYPE_MODE (type);
3724 
3725 	  if (mode != BLKmode
3726 	      && mode != VOIDmode
3727 	      && !AGGREGATE_TYPE_P (type))
3728 	    {
3729 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3730 	      if (crtl->stack_alignment_estimated < align)
3731 		{
3732 		  gcc_assert (!crtl->stack_realign_processed);
3733 		  crtl->stack_alignment_estimated = align;
3734 		}
3735 	    }
3736 	}
3737     }
3738 
3739   /* If we are receiving a struct value address as the first argument, set up
3740      the RTL for the function result. As this might require code to convert
3741      the transmitted address to Pmode, we do this here to ensure that possible
3742      preliminary conversions of the address have been emitted already.  */
3743   if (all.function_result_decl)
3744     {
3745       tree result = DECL_RESULT (current_function_decl);
3746       rtx addr = DECL_RTL (all.function_result_decl);
3747       rtx x;
3748 
3749       if (DECL_BY_REFERENCE (result))
3750 	{
3751 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3752 	  x = addr;
3753 	}
3754       else
3755 	{
3756 	  SET_DECL_VALUE_EXPR (result,
3757 			       build1 (INDIRECT_REF, TREE_TYPE (result),
3758 				       all.function_result_decl));
3759 	  addr = convert_memory_address (Pmode, addr);
3760 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3761 	  set_mem_attributes (x, result, 1);
3762 	}
3763 
3764       DECL_HAS_VALUE_EXPR_P (result) = 1;
3765 
3766       set_parm_rtl (result, x);
3767     }
3768 
3769   /* We have aligned all the args, so add space for the pretend args.  */
3770   crtl->args.pretend_args_size = all.pretend_args_size;
3771   all.stack_args_size.constant += all.extra_pretend_bytes;
3772   crtl->args.size = all.stack_args_size.constant;
3773 
3774   /* Adjust function incoming argument size for alignment and
3775      minimum length.  */
3776 
3777   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3778   crtl->args.size = aligned_upper_bound (crtl->args.size,
3779 					 PARM_BOUNDARY / BITS_PER_UNIT);
3780 
3781   if (ARGS_GROW_DOWNWARD)
3782     {
3783       crtl->args.arg_offset_rtx
3784 	= (all.stack_args_size.var == 0
3785 	   ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3786 	   : expand_expr (size_diffop (all.stack_args_size.var,
3787 				       size_int (-all.stack_args_size.constant)),
3788 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
3789     }
3790   else
3791     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3792 
3793   /* See how many bytes, if any, of its args a function should try to pop
3794      on return.  */
3795 
3796   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3797 							 TREE_TYPE (fndecl),
3798 							 crtl->args.size);
3799 
3800   /* For stdarg.h function, save info about
3801      regs and stack space used by the named args.  */
3802 
3803   crtl->args.info = all.args_so_far_v;
3804 
3805   /* Set the rtx used for the function return value.  Put this in its
3806      own variable so any optimizers that need this information don't have
3807      to include tree.h.  Do this here so it gets done when an inlined
3808      function gets output.  */
3809 
3810   crtl->return_rtx
3811     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3812        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3813 
3814   /* If scalar return value was computed in a pseudo-reg, or was a named
3815      return value that got dumped to the stack, copy that to the hard
3816      return register.  */
3817   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3818     {
3819       tree decl_result = DECL_RESULT (fndecl);
3820       rtx decl_rtl = DECL_RTL (decl_result);
3821 
3822       if (REG_P (decl_rtl)
3823 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3824 	  : DECL_REGISTER (decl_result))
3825 	{
3826 	  rtx real_decl_rtl;
3827 
3828 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3829 							fndecl, true);
3830 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3831 	  /* The delay slot scheduler assumes that crtl->return_rtx
3832 	     holds the hard register containing the return value, not a
3833 	     temporary pseudo.  */
3834 	  crtl->return_rtx = real_decl_rtl;
3835 	}
3836     }
3837 }
3838 
3839 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3840    For all seen types, gimplify their sizes.  */
3841 
3842 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3843 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3844 {
3845   tree t = *tp;
3846 
3847   *walk_subtrees = 0;
3848   if (TYPE_P (t))
3849     {
3850       if (POINTER_TYPE_P (t))
3851 	*walk_subtrees = 1;
3852       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3853 	       && !TYPE_SIZES_GIMPLIFIED (t))
3854 	{
3855 	  gimplify_type_sizes (t, (gimple_seq *) data);
3856 	  *walk_subtrees = 1;
3857 	}
3858     }
3859 
3860   return NULL;
3861 }
3862 
3863 /* Gimplify the parameter list for current_function_decl.  This involves
3864    evaluating SAVE_EXPRs of variable sized parameters and generating code
3865    to implement callee-copies reference parameters.  Returns a sequence of
3866    statements to add to the beginning of the function.  */
3867 
3868 gimple_seq
gimplify_parameters(gimple_seq * cleanup)3869 gimplify_parameters (gimple_seq *cleanup)
3870 {
3871   struct assign_parm_data_all all;
3872   tree parm;
3873   gimple_seq stmts = NULL;
3874   vec<tree> fnargs;
3875   unsigned i;
3876 
3877   assign_parms_initialize_all (&all);
3878   fnargs = assign_parms_augmented_arg_list (&all);
3879 
3880   FOR_EACH_VEC_ELT (fnargs, i, parm)
3881     {
3882       struct assign_parm_data_one data;
3883 
3884       /* Extract the type of PARM; adjust it according to ABI.  */
3885       assign_parm_find_data_types (&all, parm, &data);
3886 
3887       /* Early out for errors and void parameters.  */
3888       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3889 	continue;
3890 
3891       /* Update info on where next arg arrives in registers.  */
3892       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3893 
3894       /* ??? Once upon a time variable_size stuffed parameter list
3895 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3896 	 turned out to be less than manageable in the gimple world.
3897 	 Now we have to hunt them down ourselves.  */
3898       walk_tree_without_duplicates (&data.arg.type,
3899 				    gimplify_parm_type, &stmts);
3900 
3901       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3902 	{
3903 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3904 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3905 	}
3906 
3907       if (data.arg.pass_by_reference)
3908 	{
3909 	  tree type = TREE_TYPE (data.arg.type);
3910 	  function_arg_info orig_arg (type, data.arg.named);
3911 	  if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3912 	    {
3913 	      tree local, t;
3914 
3915 	      /* For constant-sized objects, this is trivial; for
3916 		 variable-sized objects, we have to play games.  */
3917 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3918 		  && !(flag_stack_check == GENERIC_STACK_CHECK
3919 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
3920 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
3921 		{
3922 		  local = create_tmp_var (type, get_name (parm));
3923 		  DECL_IGNORED_P (local) = 0;
3924 		  /* If PARM was addressable, move that flag over
3925 		     to the local copy, as its address will be taken,
3926 		     not the PARMs.  Keep the parms address taken
3927 		     as we'll query that flag during gimplification.  */
3928 		  if (TREE_ADDRESSABLE (parm))
3929 		    TREE_ADDRESSABLE (local) = 1;
3930 		  if (DECL_NOT_GIMPLE_REG_P (parm))
3931 		    DECL_NOT_GIMPLE_REG_P (local) = 1;
3932 
3933 		  if (!is_gimple_reg (local)
3934 		      && flag_stack_reuse != SR_NONE)
3935 		    {
3936 		      tree clobber = build_clobber (type);
3937 		      gimple *clobber_stmt;
3938 		      clobber_stmt = gimple_build_assign (local, clobber);
3939 		      gimple_seq_add_stmt (cleanup, clobber_stmt);
3940 		    }
3941 		}
3942 	      else
3943 		{
3944 		  tree ptr_type, addr;
3945 
3946 		  ptr_type = build_pointer_type (type);
3947 		  addr = create_tmp_reg (ptr_type, get_name (parm));
3948 		  DECL_IGNORED_P (addr) = 0;
3949 		  local = build_fold_indirect_ref (addr);
3950 
3951 		  t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3952 					      DECL_ALIGN (parm),
3953 					      max_int_size_in_bytes (type));
3954 		  /* The call has been built for a variable-sized object.  */
3955 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
3956 		  t = fold_convert (ptr_type, t);
3957 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3958 		  gimplify_and_add (t, &stmts);
3959 		}
3960 
3961 	      gimplify_assign (local, parm, &stmts);
3962 
3963 	      SET_DECL_VALUE_EXPR (parm, local);
3964 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3965 	    }
3966 	}
3967     }
3968 
3969   fnargs.release ();
3970 
3971   return stmts;
3972 }
3973 
3974 /* Compute the size and offset from the start of the stacked arguments for a
3975    parm passed in mode PASSED_MODE and with type TYPE.
3976 
3977    INITIAL_OFFSET_PTR points to the current offset into the stacked
3978    arguments.
3979 
3980    The starting offset and size for this parm are returned in
3981    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3982    nonzero, the offset is that of stack slot, which is returned in
3983    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3984    padding required from the initial offset ptr to the stack slot.
3985 
3986    IN_REGS is nonzero if the argument will be passed in registers.  It will
3987    never be set if REG_PARM_STACK_SPACE is not defined.
3988 
3989    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3990    for arguments which are passed in registers.
3991 
3992    FNDECL is the function in which the argument was defined.
3993 
3994    There are two types of rounding that are done.  The first, controlled by
3995    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3996    argument list to be aligned to the specific boundary (in bits).  This
3997    rounding affects the initial and starting offsets, but not the argument
3998    size.
3999 
4000    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4001    optionally rounds the size of the parm to PARM_BOUNDARY.  The
4002    initial offset is not affected by this rounding, while the size always
4003    is and the starting offset may be.  */
4004 
4005 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4006     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4007     callers pass in the total size of args so far as
4008     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
4009 
4010 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)4011 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4012 		     int reg_parm_stack_space, int partial,
4013 		     tree fndecl ATTRIBUTE_UNUSED,
4014 		     struct args_size *initial_offset_ptr,
4015 		     struct locate_and_pad_arg_data *locate)
4016 {
4017   tree sizetree;
4018   pad_direction where_pad;
4019   unsigned int boundary, round_boundary;
4020   int part_size_in_regs;
4021 
4022   /* If we have found a stack parm before we reach the end of the
4023      area reserved for registers, skip that area.  */
4024   if (! in_regs)
4025     {
4026       if (reg_parm_stack_space > 0)
4027 	{
4028 	  if (initial_offset_ptr->var
4029 	      || !ordered_p (initial_offset_ptr->constant,
4030 			     reg_parm_stack_space))
4031 	    {
4032 	      initial_offset_ptr->var
4033 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4034 			      ssize_int (reg_parm_stack_space));
4035 	      initial_offset_ptr->constant = 0;
4036 	    }
4037 	  else
4038 	    initial_offset_ptr->constant
4039 	      = ordered_max (initial_offset_ptr->constant,
4040 			     reg_parm_stack_space);
4041 	}
4042     }
4043 
4044   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4045 
4046   sizetree = (type
4047 	      ? arg_size_in_bytes (type)
4048 	      : size_int (GET_MODE_SIZE (passed_mode)));
4049   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4050   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4051   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4052 							      type);
4053   locate->where_pad = where_pad;
4054 
4055   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
4056   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4057     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4058 
4059   locate->boundary = boundary;
4060 
4061   if (SUPPORTS_STACK_ALIGNMENT)
4062     {
4063       /* stack_alignment_estimated can't change after stack has been
4064 	 realigned.  */
4065       if (crtl->stack_alignment_estimated < boundary)
4066         {
4067           if (!crtl->stack_realign_processed)
4068 	    crtl->stack_alignment_estimated = boundary;
4069 	  else
4070 	    {
4071 	      /* If stack is realigned and stack alignment value
4072 		 hasn't been finalized, it is OK not to increase
4073 		 stack_alignment_estimated.  The bigger alignment
4074 		 requirement is recorded in stack_alignment_needed
4075 		 below.  */
4076 	      gcc_assert (!crtl->stack_realign_finalized
4077 			  && crtl->stack_realign_needed);
4078 	    }
4079 	}
4080     }
4081 
4082   if (ARGS_GROW_DOWNWARD)
4083     {
4084       locate->slot_offset.constant = -initial_offset_ptr->constant;
4085       if (initial_offset_ptr->var)
4086 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4087 					      initial_offset_ptr->var);
4088 
4089       {
4090 	tree s2 = sizetree;
4091 	if (where_pad != PAD_NONE
4092 	    && (!tree_fits_uhwi_p (sizetree)
4093 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4094 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4095 	SUB_PARM_SIZE (locate->slot_offset, s2);
4096       }
4097 
4098       locate->slot_offset.constant += part_size_in_regs;
4099 
4100       if (!in_regs || reg_parm_stack_space > 0)
4101 	pad_to_arg_alignment (&locate->slot_offset, boundary,
4102 			      &locate->alignment_pad);
4103 
4104       locate->size.constant = (-initial_offset_ptr->constant
4105 			       - locate->slot_offset.constant);
4106       if (initial_offset_ptr->var)
4107 	locate->size.var = size_binop (MINUS_EXPR,
4108 				       size_binop (MINUS_EXPR,
4109 						   ssize_int (0),
4110 						   initial_offset_ptr->var),
4111 				       locate->slot_offset.var);
4112 
4113       /* Pad_below needs the pre-rounded size to know how much to pad
4114 	 below.  */
4115       locate->offset = locate->slot_offset;
4116       if (where_pad == PAD_DOWNWARD)
4117 	pad_below (&locate->offset, passed_mode, sizetree);
4118 
4119     }
4120   else
4121     {
4122       if (!in_regs || reg_parm_stack_space > 0)
4123 	pad_to_arg_alignment (initial_offset_ptr, boundary,
4124 			      &locate->alignment_pad);
4125       locate->slot_offset = *initial_offset_ptr;
4126 
4127 #ifdef PUSH_ROUNDING
4128       if (passed_mode != BLKmode)
4129 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4130 #endif
4131 
4132       /* Pad_below needs the pre-rounded size to know how much to pad below
4133 	 so this must be done before rounding up.  */
4134       locate->offset = locate->slot_offset;
4135       if (where_pad == PAD_DOWNWARD)
4136 	pad_below (&locate->offset, passed_mode, sizetree);
4137 
4138       if (where_pad != PAD_NONE
4139 	  && (!tree_fits_uhwi_p (sizetree)
4140 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4141 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4142 
4143       ADD_PARM_SIZE (locate->size, sizetree);
4144 
4145       locate->size.constant -= part_size_in_regs;
4146     }
4147 
4148   locate->offset.constant
4149     += targetm.calls.function_arg_offset (passed_mode, type);
4150 }
4151 
4152 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4153    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4154 
4155 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4156 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4157 		      struct args_size *alignment_pad)
4158 {
4159   tree save_var = NULL_TREE;
4160   poly_int64 save_constant = 0;
4161   int boundary_in_bytes = boundary / BITS_PER_UNIT;
4162   poly_int64 sp_offset = STACK_POINTER_OFFSET;
4163 
4164 #ifdef SPARC_STACK_BOUNDARY_HACK
4165   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4166      the real alignment of %sp.  However, when it does this, the
4167      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4168   if (SPARC_STACK_BOUNDARY_HACK)
4169     sp_offset = 0;
4170 #endif
4171 
4172   if (boundary > PARM_BOUNDARY)
4173     {
4174       save_var = offset_ptr->var;
4175       save_constant = offset_ptr->constant;
4176     }
4177 
4178   alignment_pad->var = NULL_TREE;
4179   alignment_pad->constant = 0;
4180 
4181   if (boundary > BITS_PER_UNIT)
4182     {
4183       int misalign;
4184       if (offset_ptr->var
4185 	  || !known_misalignment (offset_ptr->constant + sp_offset,
4186 				  boundary_in_bytes, &misalign))
4187 	{
4188 	  tree sp_offset_tree = ssize_int (sp_offset);
4189 	  tree offset = size_binop (PLUS_EXPR,
4190 				    ARGS_SIZE_TREE (*offset_ptr),
4191 				    sp_offset_tree);
4192 	  tree rounded;
4193 	  if (ARGS_GROW_DOWNWARD)
4194 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
4195 	  else
4196 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4197 
4198 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4199 	  /* ARGS_SIZE_TREE includes constant term.  */
4200 	  offset_ptr->constant = 0;
4201 	  if (boundary > PARM_BOUNDARY)
4202 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4203 					     save_var);
4204 	}
4205       else
4206 	{
4207 	  if (ARGS_GROW_DOWNWARD)
4208 	    offset_ptr->constant -= misalign;
4209 	  else
4210 	    offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4211 
4212 	  if (boundary > PARM_BOUNDARY)
4213 	    alignment_pad->constant = offset_ptr->constant - save_constant;
4214 	}
4215     }
4216 }
4217 
4218 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4219 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4220 {
4221   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4222   int misalign;
4223   if (passed_mode != BLKmode
4224       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4225     offset_ptr->constant += -misalign & (align - 1);
4226   else
4227     {
4228       if (TREE_CODE (sizetree) != INTEGER_CST
4229 	  || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4230 	{
4231 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4232 	  tree s2 = round_up (sizetree, align);
4233 	  /* Add it in.  */
4234 	  ADD_PARM_SIZE (*offset_ptr, s2);
4235 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4236 	}
4237     }
4238 }
4239 
4240 
4241 /* True if register REGNO was alive at a place where `setjmp' was
4242    called and was set more than once or is an argument.  Such regs may
4243    be clobbered by `longjmp'.  */
4244 
4245 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4246 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4247 {
4248   /* There appear to be cases where some local vars never reach the
4249      backend but have bogus regnos.  */
4250   if (regno >= max_reg_num ())
4251     return false;
4252 
4253   return ((REG_N_SETS (regno) > 1
4254 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4255 			       regno))
4256 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4257 }
4258 
4259 /* Walk the tree of blocks describing the binding levels within a
4260    function and warn about variables the might be killed by setjmp or
4261    vfork.  This is done after calling flow_analysis before register
4262    allocation since that will clobber the pseudo-regs to hard
4263    regs.  */
4264 
4265 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4266 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4267 {
4268   tree decl, sub;
4269 
4270   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4271     {
4272       if (VAR_P (decl)
4273 	  && DECL_RTL_SET_P (decl)
4274 	  && REG_P (DECL_RTL (decl))
4275 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4276 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4277                  " %<longjmp%> or %<vfork%>", decl);
4278     }
4279 
4280   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4281     setjmp_vars_warning (setjmp_crosses, sub);
4282 }
4283 
4284 /* Do the appropriate part of setjmp_vars_warning
4285    but for arguments instead of local variables.  */
4286 
4287 static void
setjmp_args_warning(bitmap setjmp_crosses)4288 setjmp_args_warning (bitmap setjmp_crosses)
4289 {
4290   tree decl;
4291   for (decl = DECL_ARGUMENTS (current_function_decl);
4292        decl; decl = DECL_CHAIN (decl))
4293     if (DECL_RTL (decl) != 0
4294 	&& REG_P (DECL_RTL (decl))
4295 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4296       warning (OPT_Wclobbered,
4297                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4298 	       decl);
4299 }
4300 
4301 /* Generate warning messages for variables live across setjmp.  */
4302 
4303 void
generate_setjmp_warnings(void)4304 generate_setjmp_warnings (void)
4305 {
4306   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4307 
4308   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4309       || bitmap_empty_p (setjmp_crosses))
4310     return;
4311 
4312   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4313   setjmp_args_warning (setjmp_crosses);
4314 }
4315 
4316 
4317 /* Reverse the order of elements in the fragment chain T of blocks,
4318    and return the new head of the chain (old last element).
4319    In addition to that clear BLOCK_SAME_RANGE flags when needed
4320    and adjust BLOCK_SUPERCONTEXT from the super fragment to
4321    its super fragment origin.  */
4322 
4323 static tree
block_fragments_nreverse(tree t)4324 block_fragments_nreverse (tree t)
4325 {
4326   tree prev = 0, block, next, prev_super = 0;
4327   tree super = BLOCK_SUPERCONTEXT (t);
4328   if (BLOCK_FRAGMENT_ORIGIN (super))
4329     super = BLOCK_FRAGMENT_ORIGIN (super);
4330   for (block = t; block; block = next)
4331     {
4332       next = BLOCK_FRAGMENT_CHAIN (block);
4333       BLOCK_FRAGMENT_CHAIN (block) = prev;
4334       if ((prev && !BLOCK_SAME_RANGE (prev))
4335 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4336 	      != prev_super))
4337 	BLOCK_SAME_RANGE (block) = 0;
4338       prev_super = BLOCK_SUPERCONTEXT (block);
4339       BLOCK_SUPERCONTEXT (block) = super;
4340       prev = block;
4341     }
4342   t = BLOCK_FRAGMENT_ORIGIN (t);
4343   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4344       != prev_super)
4345     BLOCK_SAME_RANGE (t) = 0;
4346   BLOCK_SUPERCONTEXT (t) = super;
4347   return prev;
4348 }
4349 
4350 /* Reverse the order of elements in the chain T of blocks,
4351    and return the new head of the chain (old last element).
4352    Also do the same on subblocks and reverse the order of elements
4353    in BLOCK_FRAGMENT_CHAIN as well.  */
4354 
4355 static tree
blocks_nreverse_all(tree t)4356 blocks_nreverse_all (tree t)
4357 {
4358   tree prev = 0, block, next;
4359   for (block = t; block; block = next)
4360     {
4361       next = BLOCK_CHAIN (block);
4362       BLOCK_CHAIN (block) = prev;
4363       if (BLOCK_FRAGMENT_CHAIN (block)
4364 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4365 	{
4366 	  BLOCK_FRAGMENT_CHAIN (block)
4367 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4368 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4369 	    BLOCK_SAME_RANGE (block) = 0;
4370 	}
4371       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4372       prev = block;
4373     }
4374   return prev;
4375 }
4376 
4377 
4378 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4379    and create duplicate blocks.  */
4380 /* ??? Need an option to either create block fragments or to create
4381    abstract origin duplicates of a source block.  It really depends
4382    on what optimization has been performed.  */
4383 
4384 void
reorder_blocks(void)4385 reorder_blocks (void)
4386 {
4387   tree block = DECL_INITIAL (current_function_decl);
4388 
4389   if (block == NULL_TREE)
4390     return;
4391 
4392   auto_vec<tree, 10> block_stack;
4393 
4394   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4395   clear_block_marks (block);
4396 
4397   /* Prune the old trees away, so that they don't get in the way.  */
4398   BLOCK_SUBBLOCKS (block) = NULL_TREE;
4399   BLOCK_CHAIN (block) = NULL_TREE;
4400 
4401   /* Recreate the block tree from the note nesting.  */
4402   reorder_blocks_1 (get_insns (), block, &block_stack);
4403   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4404 }
4405 
4406 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4407 
4408 void
clear_block_marks(tree block)4409 clear_block_marks (tree block)
4410 {
4411   while (block)
4412     {
4413       TREE_ASM_WRITTEN (block) = 0;
4414       clear_block_marks (BLOCK_SUBBLOCKS (block));
4415       block = BLOCK_CHAIN (block);
4416     }
4417 }
4418 
4419 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4420 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4421 		  vec<tree> *p_block_stack)
4422 {
4423   rtx_insn *insn;
4424   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4425 
4426   for (insn = insns; insn; insn = NEXT_INSN (insn))
4427     {
4428       if (NOTE_P (insn))
4429 	{
4430 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4431 	    {
4432 	      tree block = NOTE_BLOCK (insn);
4433 	      tree origin;
4434 
4435 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4436 	      origin = block;
4437 
4438 	      if (prev_end)
4439 		BLOCK_SAME_RANGE (prev_end) = 0;
4440 	      prev_end = NULL_TREE;
4441 
4442 	      /* If we have seen this block before, that means it now
4443 		 spans multiple address regions.  Create a new fragment.  */
4444 	      if (TREE_ASM_WRITTEN (block))
4445 		{
4446 		  tree new_block = copy_node (block);
4447 
4448 		  BLOCK_SAME_RANGE (new_block) = 0;
4449 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4450 		  BLOCK_FRAGMENT_CHAIN (new_block)
4451 		    = BLOCK_FRAGMENT_CHAIN (origin);
4452 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4453 
4454 		  NOTE_BLOCK (insn) = new_block;
4455 		  block = new_block;
4456 		}
4457 
4458 	      if (prev_beg == current_block && prev_beg)
4459 		BLOCK_SAME_RANGE (block) = 1;
4460 
4461 	      prev_beg = origin;
4462 
4463 	      BLOCK_SUBBLOCKS (block) = 0;
4464 	      TREE_ASM_WRITTEN (block) = 1;
4465 	      /* When there's only one block for the entire function,
4466 		 current_block == block and we mustn't do this, it
4467 		 will cause infinite recursion.  */
4468 	      if (block != current_block)
4469 		{
4470 		  tree super;
4471 		  if (block != origin)
4472 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4473 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4474 								      (origin))
4475 				   == current_block);
4476 		  if (p_block_stack->is_empty ())
4477 		    super = current_block;
4478 		  else
4479 		    {
4480 		      super = p_block_stack->last ();
4481 		      gcc_assert (super == current_block
4482 				  || BLOCK_FRAGMENT_ORIGIN (super)
4483 				     == current_block);
4484 		    }
4485 		  BLOCK_SUPERCONTEXT (block) = super;
4486 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4487 		  BLOCK_SUBBLOCKS (current_block) = block;
4488 		  current_block = origin;
4489 		}
4490 	      p_block_stack->safe_push (block);
4491 	    }
4492 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4493 	    {
4494 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4495 	      current_block = BLOCK_SUPERCONTEXT (current_block);
4496 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4497 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4498 	      prev_beg = NULL_TREE;
4499 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4500 			 ? NOTE_BLOCK (insn) : NULL_TREE;
4501 	    }
4502 	}
4503       else
4504 	{
4505 	  prev_beg = NULL_TREE;
4506 	  if (prev_end)
4507 	    BLOCK_SAME_RANGE (prev_end) = 0;
4508 	  prev_end = NULL_TREE;
4509 	}
4510     }
4511 }
4512 
4513 /* Reverse the order of elements in the chain T of blocks,
4514    and return the new head of the chain (old last element).  */
4515 
4516 tree
blocks_nreverse(tree t)4517 blocks_nreverse (tree t)
4518 {
4519   tree prev = 0, block, next;
4520   for (block = t; block; block = next)
4521     {
4522       next = BLOCK_CHAIN (block);
4523       BLOCK_CHAIN (block) = prev;
4524       prev = block;
4525     }
4526   return prev;
4527 }
4528 
4529 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4530    by modifying the last node in chain 1 to point to chain 2.  */
4531 
4532 tree
block_chainon(tree op1,tree op2)4533 block_chainon (tree op1, tree op2)
4534 {
4535   tree t1;
4536 
4537   if (!op1)
4538     return op2;
4539   if (!op2)
4540     return op1;
4541 
4542   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4543     continue;
4544   BLOCK_CHAIN (t1) = op2;
4545 
4546 #ifdef ENABLE_TREE_CHECKING
4547   {
4548     tree t2;
4549     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4550       gcc_assert (t2 != t1);
4551   }
4552 #endif
4553 
4554   return op1;
4555 }
4556 
4557 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4558    non-NULL, list them all into VECTOR, in a depth-first preorder
4559    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4560    blocks.  */
4561 
4562 static int
all_blocks(tree block,tree * vector)4563 all_blocks (tree block, tree *vector)
4564 {
4565   int n_blocks = 0;
4566 
4567   while (block)
4568     {
4569       TREE_ASM_WRITTEN (block) = 0;
4570 
4571       /* Record this block.  */
4572       if (vector)
4573 	vector[n_blocks] = block;
4574 
4575       ++n_blocks;
4576 
4577       /* Record the subblocks, and their subblocks...  */
4578       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4579 			      vector ? vector + n_blocks : 0);
4580       block = BLOCK_CHAIN (block);
4581     }
4582 
4583   return n_blocks;
4584 }
4585 
4586 /* Return a vector containing all the blocks rooted at BLOCK.  The
4587    number of elements in the vector is stored in N_BLOCKS_P.  The
4588    vector is dynamically allocated; it is the caller's responsibility
4589    to call `free' on the pointer returned.  */
4590 
4591 static tree *
get_block_vector(tree block,int * n_blocks_p)4592 get_block_vector (tree block, int *n_blocks_p)
4593 {
4594   tree *block_vector;
4595 
4596   *n_blocks_p = all_blocks (block, NULL);
4597   block_vector = XNEWVEC (tree, *n_blocks_p);
4598   all_blocks (block, block_vector);
4599 
4600   return block_vector;
4601 }
4602 
4603 static GTY(()) int next_block_index = 2;
4604 
4605 /* Set BLOCK_NUMBER for all the blocks in FN.  */
4606 
4607 void
number_blocks(tree fn)4608 number_blocks (tree fn)
4609 {
4610   int i;
4611   int n_blocks;
4612   tree *block_vector;
4613 
4614   /* For XCOFF debugging output, we start numbering the blocks
4615      from 1 within each function, rather than keeping a running
4616      count.  */
4617 #if defined (XCOFF_DEBUGGING_INFO)
4618   if (write_symbols == XCOFF_DEBUG)
4619     next_block_index = 1;
4620 #endif
4621 
4622   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4623 
4624   /* The top-level BLOCK isn't numbered at all.  */
4625   for (i = 1; i < n_blocks; ++i)
4626     /* We number the blocks from two.  */
4627     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4628 
4629   free (block_vector);
4630 
4631   return;
4632 }
4633 
4634 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
4635 
4636 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4637 debug_find_var_in_block_tree (tree var, tree block)
4638 {
4639   tree t;
4640 
4641   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4642     if (t == var)
4643       return block;
4644 
4645   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4646     {
4647       tree ret = debug_find_var_in_block_tree (var, t);
4648       if (ret)
4649 	return ret;
4650     }
4651 
4652   return NULL_TREE;
4653 }
4654 
4655 /* Keep track of whether we're in a dummy function context.  If we are,
4656    we don't want to invoke the set_current_function hook, because we'll
4657    get into trouble if the hook calls target_reinit () recursively or
4658    when the initial initialization is not yet complete.  */
4659 
4660 static bool in_dummy_function;
4661 
4662 /* Invoke the target hook when setting cfun.  Update the optimization options
4663    if the function uses different options than the default.  */
4664 
4665 static void
invoke_set_current_function_hook(tree fndecl)4666 invoke_set_current_function_hook (tree fndecl)
4667 {
4668   if (!in_dummy_function)
4669     {
4670       tree opts = ((fndecl)
4671 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4672 		   : optimization_default_node);
4673 
4674       if (!opts)
4675 	opts = optimization_default_node;
4676 
4677       /* Change optimization options if needed.  */
4678       if (optimization_current_node != opts)
4679 	{
4680 	  optimization_current_node = opts;
4681 	  cl_optimization_restore (&global_options, &global_options_set,
4682 				   TREE_OPTIMIZATION (opts));
4683 	}
4684 
4685       targetm.set_current_function (fndecl);
4686       this_fn_optabs = this_target_optabs;
4687 
4688       /* Initialize global alignment variables after op.  */
4689       parse_alignment_opts ();
4690 
4691       if (opts != optimization_default_node)
4692 	{
4693 	  init_tree_optimization_optabs (opts);
4694 	  if (TREE_OPTIMIZATION_OPTABS (opts))
4695 	    this_fn_optabs = (struct target_optabs *)
4696 	      TREE_OPTIMIZATION_OPTABS (opts);
4697 	}
4698     }
4699 }
4700 
4701 /* cfun should never be set directly; use this function.  */
4702 
4703 void
set_cfun(struct function * new_cfun,bool force)4704 set_cfun (struct function *new_cfun, bool force)
4705 {
4706   if (cfun != new_cfun || force)
4707     {
4708       cfun = new_cfun;
4709       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4710       redirect_edge_var_map_empty ();
4711     }
4712 }
4713 
4714 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
4715 
4716 static vec<function *> cfun_stack;
4717 
4718 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4719    current_function_decl accordingly.  */
4720 
4721 void
push_cfun(struct function * new_cfun)4722 push_cfun (struct function *new_cfun)
4723 {
4724   gcc_assert ((!cfun && !current_function_decl)
4725 	      || (cfun && current_function_decl == cfun->decl));
4726   cfun_stack.safe_push (cfun);
4727   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4728   set_cfun (new_cfun);
4729 }
4730 
4731 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4732 
4733 void
pop_cfun(void)4734 pop_cfun (void)
4735 {
4736   struct function *new_cfun = cfun_stack.pop ();
4737   /* When in_dummy_function, we do have a cfun but current_function_decl is
4738      NULL.  We also allow pushing NULL cfun and subsequently changing
4739      current_function_decl to something else and have both restored by
4740      pop_cfun.  */
4741   gcc_checking_assert (in_dummy_function
4742 		       || !cfun
4743 		       || current_function_decl == cfun->decl);
4744   set_cfun (new_cfun);
4745   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4746 }
4747 
4748 /* Return value of funcdef and increase it.  */
4749 int
get_next_funcdef_no(void)4750 get_next_funcdef_no (void)
4751 {
4752   return funcdef_no++;
4753 }
4754 
4755 /* Return value of funcdef.  */
4756 int
get_last_funcdef_no(void)4757 get_last_funcdef_no (void)
4758 {
4759   return funcdef_no;
4760 }
4761 
4762 /* Allocate and initialize the stack usage info data structure for the
4763    current function.  */
4764 static void
allocate_stack_usage_info(void)4765 allocate_stack_usage_info (void)
4766 {
4767   gcc_assert (!cfun->su);
4768   cfun->su = ggc_cleared_alloc<stack_usage> ();
4769   cfun->su->static_stack_size = -1;
4770 }
4771 
4772 /* Allocate a function structure for FNDECL and set its contents
4773    to the defaults.  Set cfun to the newly-allocated object.
4774    Some of the helper functions invoked during initialization assume
4775    that cfun has already been set.  Therefore, assign the new object
4776    directly into cfun and invoke the back end hook explicitly at the
4777    very end, rather than initializing a temporary and calling set_cfun
4778    on it.
4779 
4780    ABSTRACT_P is true if this is a function that will never be seen by
4781    the middle-end.  Such functions are front-end concepts (like C++
4782    function templates) that do not correspond directly to functions
4783    placed in object files.  */
4784 
4785 void
allocate_struct_function(tree fndecl,bool abstract_p)4786 allocate_struct_function (tree fndecl, bool abstract_p)
4787 {
4788   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4789 
4790   cfun = ggc_cleared_alloc<function> ();
4791 
4792   init_eh_for_function ();
4793 
4794   if (init_machine_status)
4795     cfun->machine = (*init_machine_status) ();
4796 
4797 #ifdef OVERRIDE_ABI_FORMAT
4798   OVERRIDE_ABI_FORMAT (fndecl);
4799 #endif
4800 
4801   if (fndecl != NULL_TREE)
4802     {
4803       DECL_STRUCT_FUNCTION (fndecl) = cfun;
4804       cfun->decl = fndecl;
4805       current_function_funcdef_no = get_next_funcdef_no ();
4806     }
4807 
4808   invoke_set_current_function_hook (fndecl);
4809 
4810   if (fndecl != NULL_TREE)
4811     {
4812       tree result = DECL_RESULT (fndecl);
4813 
4814       if (!abstract_p)
4815 	{
4816 	  /* Now that we have activated any function-specific attributes
4817 	     that might affect layout, particularly vector modes, relayout
4818 	     each of the parameters and the result.  */
4819 	  relayout_decl (result);
4820 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4821 	       parm = DECL_CHAIN (parm))
4822 	    relayout_decl (parm);
4823 
4824 	  /* Similarly relayout the function decl.  */
4825 	  targetm.target_option.relayout_function (fndecl);
4826 	}
4827 
4828       if (!abstract_p && aggregate_value_p (result, fndecl))
4829 	{
4830 #ifdef PCC_STATIC_STRUCT_RETURN
4831 	  cfun->returns_pcc_struct = 1;
4832 #endif
4833 	  cfun->returns_struct = 1;
4834 	}
4835 
4836       cfun->stdarg = stdarg_p (fntype);
4837 
4838       /* Assume all registers in stdarg functions need to be saved.  */
4839       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4840       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4841 
4842       /* ??? This could be set on a per-function basis by the front-end
4843          but is this worth the hassle?  */
4844       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4845       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4846 
4847       if (!profile_flag && !flag_instrument_function_entry_exit)
4848 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4849 
4850       if (flag_callgraph_info)
4851 	allocate_stack_usage_info ();
4852     }
4853 
4854   /* Don't enable begin stmt markers if var-tracking at assignments is
4855      disabled.  The markers make little sense without the variable
4856      binding annotations among them.  */
4857   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4858     && MAY_HAVE_DEBUG_MARKER_STMTS;
4859 }
4860 
4861 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4862    instead of just setting it.  */
4863 
4864 void
push_struct_function(tree fndecl)4865 push_struct_function (tree fndecl)
4866 {
4867   /* When in_dummy_function we might be in the middle of a pop_cfun and
4868      current_function_decl and cfun may not match.  */
4869   gcc_assert (in_dummy_function
4870 	      || (!cfun && !current_function_decl)
4871 	      || (cfun && current_function_decl == cfun->decl));
4872   cfun_stack.safe_push (cfun);
4873   current_function_decl = fndecl;
4874   allocate_struct_function (fndecl, false);
4875 }
4876 
4877 /* Reset crtl and other non-struct-function variables to defaults as
4878    appropriate for emitting rtl at the start of a function.  */
4879 
4880 static void
prepare_function_start(void)4881 prepare_function_start (void)
4882 {
4883   gcc_assert (!get_last_insn ());
4884 
4885   if (in_dummy_function)
4886     crtl->abi = &default_function_abi;
4887   else
4888     crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4889 
4890   init_temp_slots ();
4891   init_emit ();
4892   init_varasm_status ();
4893   init_expr ();
4894   default_rtl_profile ();
4895 
4896   if (flag_stack_usage_info && !flag_callgraph_info)
4897     allocate_stack_usage_info ();
4898 
4899   cse_not_expected = ! optimize;
4900 
4901   /* Caller save not needed yet.  */
4902   caller_save_needed = 0;
4903 
4904   /* We haven't done register allocation yet.  */
4905   reg_renumber = 0;
4906 
4907   /* Indicate that we have not instantiated virtual registers yet.  */
4908   virtuals_instantiated = 0;
4909 
4910   /* Indicate that we want CONCATs now.  */
4911   generating_concat_p = 1;
4912 
4913   /* Indicate we have no need of a frame pointer yet.  */
4914   frame_pointer_needed = 0;
4915 }
4916 
4917 void
push_dummy_function(bool with_decl)4918 push_dummy_function (bool with_decl)
4919 {
4920   tree fn_decl, fn_type, fn_result_decl;
4921 
4922   gcc_assert (!in_dummy_function);
4923   in_dummy_function = true;
4924 
4925   if (with_decl)
4926     {
4927       fn_type = build_function_type_list (void_type_node, NULL_TREE);
4928       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4929 			    fn_type);
4930       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4931 					 NULL_TREE, void_type_node);
4932       DECL_RESULT (fn_decl) = fn_result_decl;
4933       DECL_ARTIFICIAL (fn_decl) = 1;
4934       tree fn_name = get_identifier (" ");
4935       SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
4936     }
4937   else
4938     fn_decl = NULL_TREE;
4939 
4940   push_struct_function (fn_decl);
4941 }
4942 
4943 /* Initialize the rtl expansion mechanism so that we can do simple things
4944    like generate sequences.  This is used to provide a context during global
4945    initialization of some passes.  You must call expand_dummy_function_end
4946    to exit this context.  */
4947 
4948 void
init_dummy_function_start(void)4949 init_dummy_function_start (void)
4950 {
4951   push_dummy_function (false);
4952   prepare_function_start ();
4953 }
4954 
4955 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4956    and initialize static variables for generating RTL for the statements
4957    of the function.  */
4958 
4959 void
init_function_start(tree subr)4960 init_function_start (tree subr)
4961 {
4962   /* Initialize backend, if needed.  */
4963   initialize_rtl ();
4964 
4965   prepare_function_start ();
4966   decide_function_section (subr);
4967 
4968   /* Warn if this value is an aggregate type,
4969      regardless of which calling convention we are using for it.  */
4970   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4971     warning (OPT_Waggregate_return, "function returns an aggregate");
4972 }
4973 
4974 /* Expand code to verify the stack_protect_guard.  This is invoked at
4975    the end of a function to be protected.  */
4976 
4977 void
stack_protect_epilogue(void)4978 stack_protect_epilogue (void)
4979 {
4980   tree guard_decl = crtl->stack_protect_guard_decl;
4981   rtx_code_label *label = gen_label_rtx ();
4982   rtx x, y;
4983   rtx_insn *seq = NULL;
4984 
4985   x = expand_normal (crtl->stack_protect_guard);
4986 
4987   if (targetm.have_stack_protect_combined_test () && guard_decl)
4988     {
4989       gcc_assert (DECL_P (guard_decl));
4990       y = DECL_RTL (guard_decl);
4991       /* Allow the target to compute address of Y and compare it with X without
4992 	 leaking Y into a register.  This combined address + compare pattern
4993 	 allows the target to prevent spilling of any intermediate results by
4994 	 splitting it after register allocator.  */
4995       seq = targetm.gen_stack_protect_combined_test (x, y, label);
4996     }
4997   else
4998     {
4999       if (guard_decl)
5000 	y = expand_normal (guard_decl);
5001       else
5002 	y = const0_rtx;
5003 
5004       /* Allow the target to compare Y with X without leaking either into
5005 	 a register.  */
5006       if (targetm.have_stack_protect_test ())
5007 	seq = targetm.gen_stack_protect_test (x, y, label);
5008     }
5009 
5010   if (seq)
5011     emit_insn (seq);
5012   else
5013     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5014 
5015   /* The noreturn predictor has been moved to the tree level.  The rtl-level
5016      predictors estimate this branch about 20%, which isn't enough to get
5017      things moved out of line.  Since this is the only extant case of adding
5018      a noreturn function at the rtl level, it doesn't seem worth doing ought
5019      except adding the prediction by hand.  */
5020   rtx_insn *tmp = get_last_insn ();
5021   if (JUMP_P (tmp))
5022     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5023 
5024   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5025   free_temp_slots ();
5026   emit_label (label);
5027 }
5028 
5029 /* Start the RTL for a new function, and set variables used for
5030    emitting RTL.
5031    SUBR is the FUNCTION_DECL node.
5032    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5033    the function's parameters, which must be run at any return statement.  */
5034 
5035 void
expand_function_start(tree subr)5036 expand_function_start (tree subr)
5037 {
5038   /* Make sure volatile mem refs aren't considered
5039      valid operands of arithmetic insns.  */
5040   init_recog_no_volatile ();
5041 
5042   crtl->profile
5043     = (profile_flag
5044        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5045 
5046   crtl->limit_stack
5047     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5048 
5049   /* Make the label for return statements to jump to.  Do not special
5050      case machines with special return instructions -- they will be
5051      handled later during jump, ifcvt, or epilogue creation.  */
5052   return_label = gen_label_rtx ();
5053 
5054   /* Initialize rtx used to return the value.  */
5055   /* Do this before assign_parms so that we copy the struct value address
5056      before any library calls that assign parms might generate.  */
5057 
5058   /* Decide whether to return the value in memory or in a register.  */
5059   tree res = DECL_RESULT (subr);
5060   if (aggregate_value_p (res, subr))
5061     {
5062       /* Returning something that won't go in a register.  */
5063       rtx value_address = 0;
5064 
5065 #ifdef PCC_STATIC_STRUCT_RETURN
5066       if (cfun->returns_pcc_struct)
5067 	{
5068 	  int size = int_size_in_bytes (TREE_TYPE (res));
5069 	  value_address = assemble_static_space (size);
5070 	}
5071       else
5072 #endif
5073 	{
5074 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5075 	  /* Expect to be passed the address of a place to store the value.
5076 	     If it is passed as an argument, assign_parms will take care of
5077 	     it.  */
5078 	  if (sv)
5079 	    {
5080 	      value_address = gen_reg_rtx (Pmode);
5081 	      emit_move_insn (value_address, sv);
5082 	    }
5083 	}
5084       if (value_address)
5085 	{
5086 	  rtx x = value_address;
5087 	  if (!DECL_BY_REFERENCE (res))
5088 	    {
5089 	      x = gen_rtx_MEM (DECL_MODE (res), x);
5090 	      set_mem_attributes (x, res, 1);
5091 	    }
5092 	  set_parm_rtl (res, x);
5093 	}
5094     }
5095   else if (DECL_MODE (res) == VOIDmode)
5096     /* If return mode is void, this decl rtl should not be used.  */
5097     set_parm_rtl (res, NULL_RTX);
5098   else
5099     {
5100       /* Compute the return values into a pseudo reg, which we will copy
5101 	 into the true return register after the cleanups are done.  */
5102       tree return_type = TREE_TYPE (res);
5103 
5104       /* If we may coalesce this result, make sure it has the expected mode
5105 	 in case it was promoted.  But we need not bother about BLKmode.  */
5106       machine_mode promoted_mode
5107 	= flag_tree_coalesce_vars && is_gimple_reg (res)
5108 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5109 	  : BLKmode;
5110 
5111       if (promoted_mode != BLKmode)
5112 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5113       else if (TYPE_MODE (return_type) != BLKmode
5114 	       && targetm.calls.return_in_msb (return_type))
5115 	/* expand_function_end will insert the appropriate padding in
5116 	   this case.  Use the return value's natural (unpadded) mode
5117 	   within the function proper.  */
5118 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5119       else
5120 	{
5121 	  /* In order to figure out what mode to use for the pseudo, we
5122 	     figure out what the mode of the eventual return register will
5123 	     actually be, and use that.  */
5124 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5125 
5126 	  /* Structures that are returned in registers are not
5127 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5128 	  if (REG_P (hard_reg))
5129 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5130 	  else
5131 	    {
5132 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5133 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
5134 	    }
5135 	}
5136 
5137       /* Set DECL_REGISTER flag so that expand_function_end will copy the
5138 	 result to the real return register(s).  */
5139       DECL_REGISTER (res) = 1;
5140     }
5141 
5142   /* Initialize rtx for parameters and local variables.
5143      In some cases this requires emitting insns.  */
5144   assign_parms (subr);
5145 
5146   /* If function gets a static chain arg, store it.  */
5147   if (cfun->static_chain_decl)
5148     {
5149       tree parm = cfun->static_chain_decl;
5150       rtx local, chain;
5151       rtx_insn *insn;
5152       int unsignedp;
5153 
5154       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5155       chain = targetm.calls.static_chain (current_function_decl, true);
5156 
5157       set_decl_incoming_rtl (parm, chain, false);
5158       set_parm_rtl (parm, local);
5159       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5160 
5161       if (GET_MODE (local) != GET_MODE (chain))
5162 	{
5163 	  convert_move (local, chain, unsignedp);
5164 	  insn = get_last_insn ();
5165 	}
5166       else
5167 	insn = emit_move_insn (local, chain);
5168 
5169       /* Mark the register as eliminable, similar to parameters.  */
5170       if (MEM_P (chain)
5171 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5172 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5173 
5174       /* If we aren't optimizing, save the static chain onto the stack.  */
5175       if (!optimize)
5176 	{
5177 	  tree saved_static_chain_decl
5178 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5179 			  DECL_NAME (parm), TREE_TYPE (parm));
5180 	  rtx saved_static_chain_rtx
5181 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5182 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5183 	  emit_move_insn (saved_static_chain_rtx, chain);
5184 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5185 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5186 	}
5187     }
5188 
5189   /* The following was moved from init_function_start.
5190      The move was supposed to make sdb output more accurate.  */
5191   /* Indicate the beginning of the function body,
5192      as opposed to parm setup.  */
5193   emit_note (NOTE_INSN_FUNCTION_BEG);
5194 
5195   gcc_assert (NOTE_P (get_last_insn ()));
5196 
5197   parm_birth_insn = get_last_insn ();
5198 
5199   /* If the function receives a non-local goto, then store the
5200      bits we need to restore the frame pointer.  */
5201   if (cfun->nonlocal_goto_save_area)
5202     {
5203       tree t_save;
5204       rtx r_save;
5205 
5206       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5207       gcc_assert (DECL_RTL_SET_P (var));
5208 
5209       t_save = build4 (ARRAY_REF,
5210 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5211 		       cfun->nonlocal_goto_save_area,
5212 		       integer_zero_node, NULL_TREE, NULL_TREE);
5213       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5214       gcc_assert (GET_MODE (r_save) == Pmode);
5215 
5216       emit_move_insn (r_save, hard_frame_pointer_rtx);
5217       update_nonlocal_goto_save_area ();
5218     }
5219 
5220   if (crtl->profile)
5221     {
5222 #ifdef PROFILE_HOOK
5223       PROFILE_HOOK (current_function_funcdef_no);
5224 #endif
5225     }
5226 
5227   /* If we are doing generic stack checking, the probe should go here.  */
5228   if (flag_stack_check == GENERIC_STACK_CHECK)
5229     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5230 }
5231 
5232 void
pop_dummy_function(void)5233 pop_dummy_function (void)
5234 {
5235   pop_cfun ();
5236   in_dummy_function = false;
5237 }
5238 
5239 /* Undo the effects of init_dummy_function_start.  */
5240 void
expand_dummy_function_end(void)5241 expand_dummy_function_end (void)
5242 {
5243   gcc_assert (in_dummy_function);
5244 
5245   /* End any sequences that failed to be closed due to syntax errors.  */
5246   while (in_sequence_p ())
5247     end_sequence ();
5248 
5249   /* Outside function body, can't compute type's actual size
5250      until next function's body starts.  */
5251 
5252   free_after_parsing (cfun);
5253   free_after_compilation (cfun);
5254   pop_dummy_function ();
5255 }
5256 
5257 /* Helper for diddle_return_value.  */
5258 
5259 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5260 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5261 {
5262   if (! outgoing)
5263     return;
5264 
5265   if (REG_P (outgoing))
5266     (*doit) (outgoing, arg);
5267   else if (GET_CODE (outgoing) == PARALLEL)
5268     {
5269       int i;
5270 
5271       for (i = 0; i < XVECLEN (outgoing, 0); i++)
5272 	{
5273 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5274 
5275 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5276 	    (*doit) (x, arg);
5277 	}
5278     }
5279 }
5280 
5281 /* Call DOIT for each hard register used as a return value from
5282    the current function.  */
5283 
5284 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5285 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5286 {
5287   diddle_return_value_1 (doit, arg, crtl->return_rtx);
5288 }
5289 
5290 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5291 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5292 {
5293   emit_clobber (reg);
5294 }
5295 
5296 void
clobber_return_register(void)5297 clobber_return_register (void)
5298 {
5299   diddle_return_value (do_clobber_return_reg, NULL);
5300 
5301   /* In case we do use pseudo to return value, clobber it too.  */
5302   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5303     {
5304       tree decl_result = DECL_RESULT (current_function_decl);
5305       rtx decl_rtl = DECL_RTL (decl_result);
5306       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5307 	{
5308 	  do_clobber_return_reg (decl_rtl, NULL);
5309 	}
5310     }
5311 }
5312 
5313 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5314 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5315 {
5316   emit_use (reg);
5317 }
5318 
5319 static void
use_return_register(void)5320 use_return_register (void)
5321 {
5322   diddle_return_value (do_use_return_reg, NULL);
5323 }
5324 
5325 /* Generate RTL for the end of the current function.  */
5326 
5327 void
expand_function_end(void)5328 expand_function_end (void)
5329 {
5330   /* If arg_pointer_save_area was referenced only from a nested
5331      function, we will not have initialized it yet.  Do that now.  */
5332   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5333     get_arg_pointer_save_area ();
5334 
5335   /* If we are doing generic stack checking and this function makes calls,
5336      do a stack probe at the start of the function to ensure we have enough
5337      space for another stack frame.  */
5338   if (flag_stack_check == GENERIC_STACK_CHECK)
5339     {
5340       rtx_insn *insn, *seq;
5341 
5342       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5343 	if (CALL_P (insn))
5344 	  {
5345 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5346 	    start_sequence ();
5347 	    if (STACK_CHECK_MOVING_SP)
5348 	      anti_adjust_stack_and_probe (max_frame_size, true);
5349 	    else
5350 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5351 	    seq = get_insns ();
5352 	    end_sequence ();
5353 	    set_insn_locations (seq, prologue_location);
5354 	    emit_insn_before (seq, stack_check_probe_note);
5355 	    break;
5356 	  }
5357     }
5358 
5359   /* End any sequences that failed to be closed due to syntax errors.  */
5360   while (in_sequence_p ())
5361     end_sequence ();
5362 
5363   clear_pending_stack_adjust ();
5364   do_pending_stack_adjust ();
5365 
5366   /* Output a linenumber for the end of the function.
5367      SDB depended on this.  */
5368   set_curr_insn_location (input_location);
5369 
5370   /* Before the return label (if any), clobber the return
5371      registers so that they are not propagated live to the rest of
5372      the function.  This can only happen with functions that drop
5373      through; if there had been a return statement, there would
5374      have either been a return rtx, or a jump to the return label.
5375 
5376      We delay actual code generation after the current_function_value_rtx
5377      is computed.  */
5378   rtx_insn *clobber_after = get_last_insn ();
5379 
5380   /* Output the label for the actual return from the function.  */
5381   emit_label (return_label);
5382 
5383   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5384     {
5385       /* Let except.c know where it should emit the call to unregister
5386 	 the function context for sjlj exceptions.  */
5387       if (flag_exceptions)
5388 	sjlj_emit_function_exit_after (get_last_insn ());
5389     }
5390 
5391   /* If this is an implementation of throw, do what's necessary to
5392      communicate between __builtin_eh_return and the epilogue.  */
5393   expand_eh_return ();
5394 
5395   /* If stack protection is enabled for this function, check the guard.  */
5396   if (crtl->stack_protect_guard
5397       && targetm.stack_protect_runtime_enabled_p ()
5398       && naked_return_label == NULL_RTX)
5399     stack_protect_epilogue ();
5400 
5401   /* If scalar return value was computed in a pseudo-reg, or was a named
5402      return value that got dumped to the stack, copy that to the hard
5403      return register.  */
5404   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5405     {
5406       tree decl_result = DECL_RESULT (current_function_decl);
5407       rtx decl_rtl = DECL_RTL (decl_result);
5408 
5409       if (REG_P (decl_rtl)
5410 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5411 	  : DECL_REGISTER (decl_result))
5412 	{
5413 	  rtx real_decl_rtl = crtl->return_rtx;
5414 	  complex_mode cmode;
5415 
5416 	  /* This should be set in assign_parms.  */
5417 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5418 
5419 	  /* If this is a BLKmode structure being returned in registers,
5420 	     then use the mode computed in expand_return.  Note that if
5421 	     decl_rtl is memory, then its mode may have been changed,
5422 	     but that crtl->return_rtx has not.  */
5423 	  if (GET_MODE (real_decl_rtl) == BLKmode)
5424 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5425 
5426 	  /* If a non-BLKmode return value should be padded at the least
5427 	     significant end of the register, shift it left by the appropriate
5428 	     amount.  BLKmode results are handled using the group load/store
5429 	     machinery.  */
5430 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5431 	      && REG_P (real_decl_rtl)
5432 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5433 	    {
5434 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5435 					   REGNO (real_decl_rtl)),
5436 			      decl_rtl);
5437 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5438 	    }
5439 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5440 	    {
5441 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5442 		 move the result to the real return registers.  Otherwise, do
5443 		 a group load from decl_rtl for a named return.  */
5444 	      if (GET_CODE (decl_rtl) == PARALLEL)
5445 		emit_group_move (real_decl_rtl, decl_rtl);
5446 	      else
5447 		emit_group_load (real_decl_rtl, decl_rtl,
5448 				 TREE_TYPE (decl_result),
5449 				 int_size_in_bytes (TREE_TYPE (decl_result)));
5450 	    }
5451 	  /* In the case of complex integer modes smaller than a word, we'll
5452 	     need to generate some non-trivial bitfield insertions.  Do that
5453 	     on a pseudo and not the hard register.  */
5454 	  else if (GET_CODE (decl_rtl) == CONCAT
5455 		   && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5456 		   && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5457 	    {
5458 	      int old_generating_concat_p;
5459 	      rtx tmp;
5460 
5461 	      old_generating_concat_p = generating_concat_p;
5462 	      generating_concat_p = 0;
5463 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5464 	      generating_concat_p = old_generating_concat_p;
5465 
5466 	      emit_move_insn (tmp, decl_rtl);
5467 	      emit_move_insn (real_decl_rtl, tmp);
5468 	    }
5469 	  /* If a named return value dumped decl_return to memory, then
5470 	     we may need to re-do the PROMOTE_MODE signed/unsigned
5471 	     extension.  */
5472 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5473 	    {
5474 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5475 	      promote_function_mode (TREE_TYPE (decl_result),
5476 				     GET_MODE (decl_rtl), &unsignedp,
5477 				     TREE_TYPE (current_function_decl), 1);
5478 
5479 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5480 	    }
5481 	  else
5482 	    emit_move_insn (real_decl_rtl, decl_rtl);
5483 	}
5484     }
5485 
5486   /* If returning a structure, arrange to return the address of the value
5487      in a place where debuggers expect to find it.
5488 
5489      If returning a structure PCC style,
5490      the caller also depends on this value.
5491      And cfun->returns_pcc_struct is not necessarily set.  */
5492   if ((cfun->returns_struct || cfun->returns_pcc_struct)
5493       && !targetm.calls.omit_struct_return_reg)
5494     {
5495       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5496       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5497       rtx outgoing;
5498 
5499       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5500 	type = TREE_TYPE (type);
5501       else
5502 	value_address = XEXP (value_address, 0);
5503 
5504       outgoing = targetm.calls.function_value (build_pointer_type (type),
5505 					       current_function_decl, true);
5506 
5507       /* Mark this as a function return value so integrate will delete the
5508 	 assignment and USE below when inlining this function.  */
5509       REG_FUNCTION_VALUE_P (outgoing) = 1;
5510 
5511       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5512       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5513       value_address = convert_memory_address (mode, value_address);
5514 
5515       emit_move_insn (outgoing, value_address);
5516 
5517       /* Show return register used to hold result (in this case the address
5518 	 of the result.  */
5519       crtl->return_rtx = outgoing;
5520     }
5521 
5522   /* Emit the actual code to clobber return register.  Don't emit
5523      it if clobber_after is a barrier, then the previous basic block
5524      certainly doesn't fall thru into the exit block.  */
5525   if (!BARRIER_P (clobber_after))
5526     {
5527       start_sequence ();
5528       clobber_return_register ();
5529       rtx_insn *seq = get_insns ();
5530       end_sequence ();
5531 
5532       emit_insn_after (seq, clobber_after);
5533     }
5534 
5535   /* Output the label for the naked return from the function.  */
5536   if (naked_return_label)
5537     emit_label (naked_return_label);
5538 
5539   /* @@@ This is a kludge.  We want to ensure that instructions that
5540      may trap are not moved into the epilogue by scheduling, because
5541      we don't always emit unwind information for the epilogue.  */
5542   if (cfun->can_throw_non_call_exceptions
5543       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5544     emit_insn (gen_blockage ());
5545 
5546   /* If stack protection is enabled for this function, check the guard.  */
5547   if (crtl->stack_protect_guard
5548       && targetm.stack_protect_runtime_enabled_p ()
5549       && naked_return_label)
5550     stack_protect_epilogue ();
5551 
5552   /* If we had calls to alloca, and this machine needs
5553      an accurate stack pointer to exit the function,
5554      insert some code to save and restore the stack pointer.  */
5555   if (! EXIT_IGNORE_STACK
5556       && cfun->calls_alloca)
5557     {
5558       rtx tem = 0;
5559 
5560       start_sequence ();
5561       emit_stack_save (SAVE_FUNCTION, &tem);
5562       rtx_insn *seq = get_insns ();
5563       end_sequence ();
5564       emit_insn_before (seq, parm_birth_insn);
5565 
5566       emit_stack_restore (SAVE_FUNCTION, tem);
5567     }
5568 
5569   /* ??? This should no longer be necessary since stupid is no longer with
5570      us, but there are some parts of the compiler (eg reload_combine, and
5571      sh mach_dep_reorg) that still try and compute their own lifetime info
5572      instead of using the general framework.  */
5573   use_return_register ();
5574 }
5575 
5576 rtx
get_arg_pointer_save_area(void)5577 get_arg_pointer_save_area (void)
5578 {
5579   rtx ret = arg_pointer_save_area;
5580 
5581   if (! ret)
5582     {
5583       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5584       arg_pointer_save_area = ret;
5585     }
5586 
5587   if (! crtl->arg_pointer_save_area_init)
5588     {
5589       /* Save the arg pointer at the beginning of the function.  The
5590 	 generated stack slot may not be a valid memory address, so we
5591 	 have to check it and fix it if necessary.  */
5592       start_sequence ();
5593       emit_move_insn (validize_mem (copy_rtx (ret)),
5594                       crtl->args.internal_arg_pointer);
5595       rtx_insn *seq = get_insns ();
5596       end_sequence ();
5597 
5598       push_topmost_sequence ();
5599       emit_insn_after (seq, entry_of_function ());
5600       pop_topmost_sequence ();
5601 
5602       crtl->arg_pointer_save_area_init = true;
5603     }
5604 
5605   return ret;
5606 }
5607 
5608 
5609 /* If debugging dumps are requested, dump information about how the
5610    target handled -fstack-check=clash for the prologue.
5611 
5612    PROBES describes what if any probes were emitted.
5613 
5614    RESIDUALS indicates if the prologue had any residual allocation
5615    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
5616 
5617 void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)5618 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5619 {
5620   if (!dump_file)
5621     return;
5622 
5623   switch (probes)
5624     {
5625     case NO_PROBE_NO_FRAME:
5626       fprintf (dump_file,
5627 	       "Stack clash no probe no stack adjustment in prologue.\n");
5628       break;
5629     case NO_PROBE_SMALL_FRAME:
5630       fprintf (dump_file,
5631 	       "Stack clash no probe small stack adjustment in prologue.\n");
5632       break;
5633     case PROBE_INLINE:
5634       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5635       break;
5636     case PROBE_LOOP:
5637       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5638       break;
5639     }
5640 
5641   if (residuals)
5642     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5643   else
5644     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5645 
5646   if (frame_pointer_needed)
5647     fprintf (dump_file, "Stack clash frame pointer needed.\n");
5648   else
5649     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5650 
5651   if (TREE_THIS_VOLATILE (cfun->decl))
5652     fprintf (dump_file,
5653 	     "Stack clash noreturn prologue, assuming no implicit"
5654 	     " probes in caller.\n");
5655   else
5656     fprintf (dump_file,
5657 	     "Stack clash not noreturn prologue.\n");
5658 }
5659 
5660 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5661    for the first time.  */
5662 
5663 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5664 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5665 {
5666   rtx_insn *tmp;
5667   hash_table<insn_cache_hasher> *hash = *hashp;
5668 
5669   if (hash == NULL)
5670     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5671 
5672   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5673     {
5674       rtx *slot = hash->find_slot (tmp, INSERT);
5675       gcc_assert (*slot == NULL);
5676       *slot = tmp;
5677     }
5678 }
5679 
5680 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5681    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5682    insn, then record COPY as well.  */
5683 
5684 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5685 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5686 {
5687   hash_table<insn_cache_hasher> *hash;
5688   rtx *slot;
5689 
5690   hash = epilogue_insn_hash;
5691   if (!hash || !hash->find (insn))
5692     {
5693       hash = prologue_insn_hash;
5694       if (!hash || !hash->find (insn))
5695 	return;
5696     }
5697 
5698   slot = hash->find_slot (copy, INSERT);
5699   gcc_assert (*slot == NULL);
5700   *slot = copy;
5701 }
5702 
5703 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5704    we can be running after reorg, SEQUENCE rtl is possible.  */
5705 
5706 static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)5707 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5708 {
5709   if (hash == NULL)
5710     return false;
5711 
5712   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5713     {
5714       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5715       int i;
5716       for (i = seq->len () - 1; i >= 0; i--)
5717 	if (hash->find (seq->element (i)))
5718 	  return true;
5719       return false;
5720     }
5721 
5722   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5723 }
5724 
5725 int
prologue_contains(const rtx_insn * insn)5726 prologue_contains (const rtx_insn *insn)
5727 {
5728   return contains (insn, prologue_insn_hash);
5729 }
5730 
5731 int
epilogue_contains(const rtx_insn * insn)5732 epilogue_contains (const rtx_insn *insn)
5733 {
5734   return contains (insn, epilogue_insn_hash);
5735 }
5736 
5737 int
prologue_epilogue_contains(const rtx_insn * insn)5738 prologue_epilogue_contains (const rtx_insn *insn)
5739 {
5740   if (contains (insn, prologue_insn_hash))
5741     return 1;
5742   if (contains (insn, epilogue_insn_hash))
5743     return 1;
5744   return 0;
5745 }
5746 
5747 void
record_prologue_seq(rtx_insn * seq)5748 record_prologue_seq (rtx_insn *seq)
5749 {
5750   record_insns (seq, NULL, &prologue_insn_hash);
5751 }
5752 
5753 void
record_epilogue_seq(rtx_insn * seq)5754 record_epilogue_seq (rtx_insn *seq)
5755 {
5756   record_insns (seq, NULL, &epilogue_insn_hash);
5757 }
5758 
5759 /* Set JUMP_LABEL for a return insn.  */
5760 
5761 void
set_return_jump_label(rtx_insn * returnjump)5762 set_return_jump_label (rtx_insn *returnjump)
5763 {
5764   rtx pat = PATTERN (returnjump);
5765   if (GET_CODE (pat) == PARALLEL)
5766     pat = XVECEXP (pat, 0, 0);
5767   if (ANY_RETURN_P (pat))
5768     JUMP_LABEL (returnjump) = pat;
5769   else
5770     JUMP_LABEL (returnjump) = ret_rtx;
5771 }
5772 
5773 /* Return a sequence to be used as the split prologue for the current
5774    function, or NULL.  */
5775 
5776 static rtx_insn *
make_split_prologue_seq(void)5777 make_split_prologue_seq (void)
5778 {
5779   if (!flag_split_stack
5780       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5781     return NULL;
5782 
5783   start_sequence ();
5784   emit_insn (targetm.gen_split_stack_prologue ());
5785   rtx_insn *seq = get_insns ();
5786   end_sequence ();
5787 
5788   record_insns (seq, NULL, &prologue_insn_hash);
5789   set_insn_locations (seq, prologue_location);
5790 
5791   return seq;
5792 }
5793 
5794 /* Return a sequence to be used as the prologue for the current function,
5795    or NULL.  */
5796 
5797 static rtx_insn *
make_prologue_seq(void)5798 make_prologue_seq (void)
5799 {
5800   if (!targetm.have_prologue ())
5801     return NULL;
5802 
5803   start_sequence ();
5804   rtx_insn *seq = targetm.gen_prologue ();
5805   emit_insn (seq);
5806 
5807   /* Insert an explicit USE for the frame pointer
5808      if the profiling is on and the frame pointer is required.  */
5809   if (crtl->profile && frame_pointer_needed)
5810     emit_use (hard_frame_pointer_rtx);
5811 
5812   /* Retain a map of the prologue insns.  */
5813   record_insns (seq, NULL, &prologue_insn_hash);
5814   emit_note (NOTE_INSN_PROLOGUE_END);
5815 
5816   /* Ensure that instructions are not moved into the prologue when
5817      profiling is on.  The call to the profiling routine can be
5818      emitted within the live range of a call-clobbered register.  */
5819   if (!targetm.profile_before_prologue () && crtl->profile)
5820     emit_insn (gen_blockage ());
5821 
5822   seq = get_insns ();
5823   end_sequence ();
5824   set_insn_locations (seq, prologue_location);
5825 
5826   return seq;
5827 }
5828 
5829 /* Emit a sequence of insns to zero the call-used registers before RET
5830    according to ZERO_REGS_TYPE.  */
5831 
5832 static void
gen_call_used_regs_seq(rtx_insn * ret,unsigned int zero_regs_type)5833 gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
5834 {
5835   bool only_gpr = true;
5836   bool only_used = true;
5837   bool only_arg = true;
5838 
5839   /* No need to zero call-used-regs in main ().  */
5840   if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
5841     return;
5842 
5843   /* No need to zero call-used-regs if __builtin_eh_return is called
5844      since it isn't a normal function return.  */
5845   if (crtl->calls_eh_return)
5846     return;
5847 
5848   /* If only_gpr is true, only zero call-used registers that are
5849      general-purpose registers; if only_used is true, only zero
5850      call-used registers that are used in the current function;
5851      if only_arg is true, only zero call-used registers that pass
5852      parameters defined by the flatform's calling conversion.  */
5853 
5854   using namespace zero_regs_flags;
5855 
5856   only_gpr = zero_regs_type & ONLY_GPR;
5857   only_used = zero_regs_type & ONLY_USED;
5858   only_arg = zero_regs_type & ONLY_ARG;
5859 
5860   /* For each of the hard registers, we should zero it if:
5861 	    1. it is a call-used register;
5862 	and 2. it is not a fixed register;
5863 	and 3. it is not live at the return of the routine;
5864 	and 4. it is general registor if only_gpr is true;
5865 	and 5. it is used in the routine if only_used is true;
5866 	and 6. it is a register that passes parameter if only_arg is true.  */
5867 
5868   /* First, prepare the data flow information.  */
5869   basic_block bb = BLOCK_FOR_INSN (ret);
5870   auto_bitmap live_out;
5871   bitmap_copy (live_out, df_get_live_out (bb));
5872   df_simulate_initialize_backwards (bb, live_out);
5873   df_simulate_one_insn_backwards (bb, ret, live_out);
5874 
5875   HARD_REG_SET selected_hardregs;
5876   CLEAR_HARD_REG_SET (selected_hardregs);
5877   for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5878     {
5879       if (!crtl->abi->clobbers_full_reg_p (regno))
5880 	continue;
5881       if (fixed_regs[regno])
5882 	continue;
5883       if (REGNO_REG_SET_P (live_out, regno))
5884 	continue;
5885       if (only_gpr
5886 	  && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
5887 	continue;
5888       if (only_used && !df_regs_ever_live_p (regno))
5889 	continue;
5890       if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
5891 	continue;
5892 #ifdef LEAF_REG_REMAP
5893       if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
5894 	continue;
5895 #endif
5896 
5897       /* Now this is a register that we might want to zero.  */
5898       SET_HARD_REG_BIT (selected_hardregs, regno);
5899     }
5900 
5901   if (hard_reg_set_empty_p (selected_hardregs))
5902     return;
5903 
5904   /* Now that we have a hard register set that needs to be zeroed, pass it to
5905      target to generate zeroing sequence.  */
5906   HARD_REG_SET zeroed_hardregs;
5907   start_sequence ();
5908   zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
5909   rtx_insn *seq = get_insns ();
5910   end_sequence ();
5911   if (seq)
5912     {
5913       /* Emit the memory blockage and register clobber asm volatile before
5914 	 the whole sequence.  */
5915       start_sequence ();
5916       expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
5917       rtx_insn *seq_barrier = get_insns ();
5918       end_sequence ();
5919 
5920       emit_insn_before (seq_barrier, ret);
5921       emit_insn_before (seq, ret);
5922 
5923       /* Update the data flow information.  */
5924       crtl->must_be_zero_on_return |= zeroed_hardregs;
5925       df_set_bb_dirty (EXIT_BLOCK_PTR_FOR_FN (cfun));
5926     }
5927 }
5928 
5929 
5930 /* Return a sequence to be used as the epilogue for the current function,
5931    or NULL.  */
5932 
5933 static rtx_insn *
make_epilogue_seq(void)5934 make_epilogue_seq (void)
5935 {
5936   if (!targetm.have_epilogue ())
5937     return NULL;
5938 
5939   start_sequence ();
5940   emit_note (NOTE_INSN_EPILOGUE_BEG);
5941   rtx_insn *seq = targetm.gen_epilogue ();
5942   if (seq)
5943     emit_jump_insn (seq);
5944 
5945   /* Retain a map of the epilogue insns.  */
5946   record_insns (seq, NULL, &epilogue_insn_hash);
5947   set_insn_locations (seq, epilogue_location);
5948 
5949   seq = get_insns ();
5950   rtx_insn *returnjump = get_last_insn ();
5951   end_sequence ();
5952 
5953   if (JUMP_P (returnjump))
5954     set_return_jump_label (returnjump);
5955 
5956   return seq;
5957 }
5958 
5959 
5960 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5961    this into place with notes indicating where the prologue ends and where
5962    the epilogue begins.  Update the basic block information when possible.
5963 
5964    Notes on epilogue placement:
5965    There are several kinds of edges to the exit block:
5966    * a single fallthru edge from LAST_BB
5967    * possibly, edges from blocks containing sibcalls
5968    * possibly, fake edges from infinite loops
5969 
5970    The epilogue is always emitted on the fallthru edge from the last basic
5971    block in the function, LAST_BB, into the exit block.
5972 
5973    If LAST_BB is empty except for a label, it is the target of every
5974    other basic block in the function that ends in a return.  If a
5975    target has a return or simple_return pattern (possibly with
5976    conditional variants), these basic blocks can be changed so that a
5977    return insn is emitted into them, and their target is adjusted to
5978    the real exit block.
5979 
5980    Notes on shrink wrapping: We implement a fairly conservative
5981    version of shrink-wrapping rather than the textbook one.  We only
5982    generate a single prologue and a single epilogue.  This is
5983    sufficient to catch a number of interesting cases involving early
5984    exits.
5985 
5986    First, we identify the blocks that require the prologue to occur before
5987    them.  These are the ones that modify a call-saved register, or reference
5988    any of the stack or frame pointer registers.  To simplify things, we then
5989    mark everything reachable from these blocks as also requiring a prologue.
5990    This takes care of loops automatically, and avoids the need to examine
5991    whether MEMs reference the frame, since it is sufficient to check for
5992    occurrences of the stack or frame pointer.
5993 
5994    We then compute the set of blocks for which the need for a prologue
5995    is anticipatable (borrowing terminology from the shrink-wrapping
5996    description in Muchnick's book).  These are the blocks which either
5997    require a prologue themselves, or those that have only successors
5998    where the prologue is anticipatable.  The prologue needs to be
5999    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6000    is not.  For the moment, we ensure that only one such edge exists.
6001 
6002    The epilogue is placed as described above, but we make a
6003    distinction between inserting return and simple_return patterns
6004    when modifying other blocks that end in a return.  Blocks that end
6005    in a sibcall omit the sibcall_epilogue if the block is not in
6006    ANTIC.  */
6007 
6008 void
thread_prologue_and_epilogue_insns(void)6009 thread_prologue_and_epilogue_insns (void)
6010 {
6011   df_analyze ();
6012 
6013   /* Can't deal with multiple successors of the entry block at the
6014      moment.  Function should always have at least one entry
6015      point.  */
6016   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6017 
6018   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6019   edge orig_entry_edge = entry_edge;
6020 
6021   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6022   rtx_insn *prologue_seq = make_prologue_seq ();
6023   rtx_insn *epilogue_seq = make_epilogue_seq ();
6024 
6025   /* Try to perform a kind of shrink-wrapping, making sure the
6026      prologue/epilogue is emitted only around those parts of the
6027      function that require it.  */
6028   try_shrink_wrapping (&entry_edge, prologue_seq);
6029 
6030   /* If the target can handle splitting the prologue/epilogue into separate
6031      components, try to shrink-wrap these components separately.  */
6032   try_shrink_wrapping_separate (entry_edge->dest);
6033 
6034   /* If that did anything for any component we now need the generate the
6035      "main" prologue again.  Because some targets require some of these
6036      to be called in a specific order (i386 requires the split prologue
6037      to be first, for example), we create all three sequences again here.
6038      If this does not work for some target, that target should not enable
6039      separate shrink-wrapping.  */
6040   if (crtl->shrink_wrapped_separate)
6041     {
6042       split_prologue_seq = make_split_prologue_seq ();
6043       prologue_seq = make_prologue_seq ();
6044       epilogue_seq = make_epilogue_seq ();
6045     }
6046 
6047   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6048 
6049   /* A small fib -- epilogue is not yet completed, but we wish to re-use
6050      this marker for the splits of EH_RETURN patterns, and nothing else
6051      uses the flag in the meantime.  */
6052   epilogue_completed = 1;
6053 
6054   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
6055      some targets, these get split to a special version of the epilogue
6056      code.  In order to be able to properly annotate these with unwind
6057      info, try to split them now.  If we get a valid split, drop an
6058      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
6059   edge e;
6060   edge_iterator ei;
6061   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6062     {
6063       rtx_insn *prev, *last, *trial;
6064 
6065       if (e->flags & EDGE_FALLTHRU)
6066 	continue;
6067       last = BB_END (e->src);
6068       if (!eh_returnjump_p (last))
6069 	continue;
6070 
6071       prev = PREV_INSN (last);
6072       trial = try_split (PATTERN (last), last, 1);
6073       if (trial == last)
6074 	continue;
6075 
6076       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6077       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6078     }
6079 
6080   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6081 
6082   if (exit_fallthru_edge)
6083     {
6084       if (epilogue_seq)
6085 	{
6086 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6087 	  commit_edge_insertions ();
6088 
6089 	  /* The epilogue insns we inserted may cause the exit edge to no longer
6090 	     be fallthru.  */
6091 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6092 	    {
6093 	      if (((e->flags & EDGE_FALLTHRU) != 0)
6094 		  && returnjump_p (BB_END (e->src)))
6095 		e->flags &= ~EDGE_FALLTHRU;
6096 	    }
6097 	}
6098       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6099 	{
6100 	  /* We have a fall-through edge to the exit block, the source is not
6101 	     at the end of the function, and there will be an assembler epilogue
6102 	     at the end of the function.
6103 	     We can't use force_nonfallthru here, because that would try to
6104 	     use return.  Inserting a jump 'by hand' is extremely messy, so
6105 	     we take advantage of cfg_layout_finalize using
6106 	     fixup_fallthru_exit_predecessor.  */
6107 	  cfg_layout_initialize (0);
6108 	  basic_block cur_bb;
6109 	  FOR_EACH_BB_FN (cur_bb, cfun)
6110 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
6111 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6112 	      cur_bb->aux = cur_bb->next_bb;
6113 	  cfg_layout_finalize ();
6114 	}
6115     }
6116 
6117   /* Insert the prologue.  */
6118 
6119   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6120 
6121   if (split_prologue_seq || prologue_seq)
6122     {
6123       rtx_insn *split_prologue_insn = split_prologue_seq;
6124       if (split_prologue_seq)
6125 	{
6126 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6127 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
6128 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6129 	}
6130 
6131       rtx_insn *prologue_insn = prologue_seq;
6132       if (prologue_seq)
6133 	{
6134 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6135 	    prologue_insn = NEXT_INSN (prologue_insn);
6136 	  insert_insn_on_edge (prologue_seq, entry_edge);
6137 	}
6138 
6139       commit_edge_insertions ();
6140 
6141       /* Look for basic blocks within the prologue insns.  */
6142       if (split_prologue_insn
6143 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6144 	split_prologue_insn = NULL;
6145       if (prologue_insn
6146 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
6147 	prologue_insn = NULL;
6148       if (split_prologue_insn || prologue_insn)
6149 	{
6150 	  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6151 	  bitmap_clear (blocks);
6152 	  if (split_prologue_insn)
6153 	    bitmap_set_bit (blocks,
6154 			    BLOCK_FOR_INSN (split_prologue_insn)->index);
6155 	  if (prologue_insn)
6156 	    bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6157 	  find_many_sub_basic_blocks (blocks);
6158 	}
6159     }
6160 
6161   default_rtl_profile ();
6162 
6163   /* Emit sibling epilogues before any sibling call sites.  */
6164   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6165        (e = ei_safe_edge (ei));
6166        ei_next (&ei))
6167     {
6168       /* Skip those already handled, the ones that run without prologue.  */
6169       if (e->flags & EDGE_IGNORE)
6170 	{
6171 	  e->flags &= ~EDGE_IGNORE;
6172 	  continue;
6173 	}
6174 
6175       rtx_insn *insn = BB_END (e->src);
6176 
6177       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6178 	continue;
6179 
6180       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6181 	{
6182 	  start_sequence ();
6183 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6184 	  emit_insn (ep_seq);
6185 	  rtx_insn *seq = get_insns ();
6186 	  end_sequence ();
6187 
6188 	  /* Retain a map of the epilogue insns.  Used in life analysis to
6189 	     avoid getting rid of sibcall epilogue insns.  Do this before we
6190 	     actually emit the sequence.  */
6191 	  record_insns (seq, NULL, &epilogue_insn_hash);
6192 	  set_insn_locations (seq, epilogue_location);
6193 
6194 	  emit_insn_before (seq, insn);
6195 	}
6196     }
6197 
6198   if (epilogue_seq)
6199     {
6200       rtx_insn *insn, *next;
6201 
6202       /* Similarly, move any line notes that appear after the epilogue.
6203          There is no need, however, to be quite so anal about the existence
6204 	 of such a note.  Also possibly move
6205 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6206 	 info generation.  */
6207       for (insn = epilogue_seq; insn; insn = next)
6208 	{
6209 	  next = NEXT_INSN (insn);
6210 	  if (NOTE_P (insn)
6211 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6212 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6213 	}
6214     }
6215 
6216   /* Threading the prologue and epilogue changes the artificial refs
6217      in the entry and exit blocks.  */
6218   epilogue_completed = 1;
6219   df_update_entry_exit_and_calls ();
6220 }
6221 
6222 /* Reposition the prologue-end and epilogue-begin notes after
6223    instruction scheduling.  */
6224 
6225 void
reposition_prologue_and_epilogue_notes(void)6226 reposition_prologue_and_epilogue_notes (void)
6227 {
6228   if (!targetm.have_prologue ()
6229       && !targetm.have_epilogue ()
6230       && !targetm.have_sibcall_epilogue ())
6231     return;
6232 
6233   /* Since the hash table is created on demand, the fact that it is
6234      non-null is a signal that it is non-empty.  */
6235   if (prologue_insn_hash != NULL)
6236     {
6237       size_t len = prologue_insn_hash->elements ();
6238       rtx_insn *insn, *last = NULL, *note = NULL;
6239 
6240       /* Scan from the beginning until we reach the last prologue insn.  */
6241       /* ??? While we do have the CFG intact, there are two problems:
6242 	 (1) The prologue can contain loops (typically probing the stack),
6243 	     which means that the end of the prologue isn't in the first bb.
6244 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6245       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6246 	{
6247 	  if (NOTE_P (insn))
6248 	    {
6249 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6250 		note = insn;
6251 	    }
6252 	  else if (contains (insn, prologue_insn_hash))
6253 	    {
6254 	      last = insn;
6255 	      if (--len == 0)
6256 		break;
6257 	    }
6258 	}
6259 
6260       if (last)
6261 	{
6262 	  if (note == NULL)
6263 	    {
6264 	      /* Scan forward looking for the PROLOGUE_END note.  It should
6265 		 be right at the beginning of the block, possibly with other
6266 		 insn notes that got moved there.  */
6267 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6268 		{
6269 		  if (NOTE_P (note)
6270 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6271 		    break;
6272 		}
6273 	    }
6274 
6275 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6276 	  if (LABEL_P (last))
6277 	    last = NEXT_INSN (last);
6278 	  reorder_insns (note, note, last);
6279 	}
6280     }
6281 
6282   if (epilogue_insn_hash != NULL)
6283     {
6284       edge_iterator ei;
6285       edge e;
6286 
6287       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6288 	{
6289 	  rtx_insn *insn, *first = NULL, *note = NULL;
6290 	  basic_block bb = e->src;
6291 
6292 	  /* Scan from the beginning until we reach the first epilogue insn. */
6293 	  FOR_BB_INSNS (bb, insn)
6294 	    {
6295 	      if (NOTE_P (insn))
6296 		{
6297 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6298 		    {
6299 		      note = insn;
6300 		      if (first != NULL)
6301 			break;
6302 		    }
6303 		}
6304 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6305 		{
6306 		  first = insn;
6307 		  if (note != NULL)
6308 		    break;
6309 		}
6310 	    }
6311 
6312 	  if (note)
6313 	    {
6314 	      /* If the function has a single basic block, and no real
6315 		 epilogue insns (e.g. sibcall with no cleanup), the
6316 		 epilogue note can get scheduled before the prologue
6317 		 note.  If we have frame related prologue insns, having
6318 		 them scanned during the epilogue will result in a crash.
6319 		 In this case re-order the epilogue note to just before
6320 		 the last insn in the block.  */
6321 	      if (first == NULL)
6322 		first = BB_END (bb);
6323 
6324 	      if (PREV_INSN (first) != note)
6325 		reorder_insns (note, note, PREV_INSN (first));
6326 	    }
6327 	}
6328     }
6329 }
6330 
6331 /* Returns the name of function declared by FNDECL.  */
6332 const char *
fndecl_name(tree fndecl)6333 fndecl_name (tree fndecl)
6334 {
6335   if (fndecl == NULL)
6336     return "(nofn)";
6337   return lang_hooks.decl_printable_name (fndecl, 1);
6338 }
6339 
6340 /* Returns the name of function FN.  */
6341 const char *
function_name(struct function * fn)6342 function_name (struct function *fn)
6343 {
6344   tree fndecl = (fn == NULL) ? NULL : fn->decl;
6345   return fndecl_name (fndecl);
6346 }
6347 
6348 /* Returns the name of the current function.  */
6349 const char *
current_function_name(void)6350 current_function_name (void)
6351 {
6352   return function_name (cfun);
6353 }
6354 
6355 
6356 static unsigned int
rest_of_handle_check_leaf_regs(void)6357 rest_of_handle_check_leaf_regs (void)
6358 {
6359 #ifdef LEAF_REGISTERS
6360   crtl->uses_only_leaf_regs
6361     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6362 #endif
6363   return 0;
6364 }
6365 
6366 /* Insert a TYPE into the used types hash table of CFUN.  */
6367 
6368 static void
used_types_insert_helper(tree type,struct function * func)6369 used_types_insert_helper (tree type, struct function *func)
6370 {
6371   if (type != NULL && func != NULL)
6372     {
6373       if (func->used_types_hash == NULL)
6374 	func->used_types_hash = hash_set<tree>::create_ggc (37);
6375 
6376       func->used_types_hash->add (type);
6377     }
6378 }
6379 
6380 /* Given a type, insert it into the used hash table in cfun.  */
6381 void
used_types_insert(tree t)6382 used_types_insert (tree t)
6383 {
6384   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6385     if (TYPE_NAME (t))
6386       break;
6387     else
6388       t = TREE_TYPE (t);
6389   if (TREE_CODE (t) == ERROR_MARK)
6390     return;
6391   if (TYPE_NAME (t) == NULL_TREE
6392       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6393     t = TYPE_MAIN_VARIANT (t);
6394   if (debug_info_level > DINFO_LEVEL_NONE)
6395     {
6396       if (cfun)
6397 	used_types_insert_helper (t, cfun);
6398       else
6399 	{
6400 	  /* So this might be a type referenced by a global variable.
6401 	     Record that type so that we can later decide to emit its
6402 	     debug information.  */
6403 	  vec_safe_push (types_used_by_cur_var_decl, t);
6404 	}
6405     }
6406 }
6407 
6408 /* Helper to Hash a struct types_used_by_vars_entry.  */
6409 
6410 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6411 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6412 {
6413   gcc_assert (entry && entry->var_decl && entry->type);
6414 
6415   return iterative_hash_object (entry->type,
6416 				iterative_hash_object (entry->var_decl, 0));
6417 }
6418 
6419 /* Hash function of the types_used_by_vars_entry hash table.  */
6420 
6421 hashval_t
hash(types_used_by_vars_entry * entry)6422 used_type_hasher::hash (types_used_by_vars_entry *entry)
6423 {
6424   return hash_types_used_by_vars_entry (entry);
6425 }
6426 
6427 /*Equality function of the types_used_by_vars_entry hash table.  */
6428 
6429 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6430 used_type_hasher::equal (types_used_by_vars_entry *e1,
6431 			 types_used_by_vars_entry *e2)
6432 {
6433   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6434 }
6435 
6436 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6437 
6438 void
types_used_by_var_decl_insert(tree type,tree var_decl)6439 types_used_by_var_decl_insert (tree type, tree var_decl)
6440 {
6441   if (type != NULL && var_decl != NULL)
6442     {
6443       types_used_by_vars_entry **slot;
6444       struct types_used_by_vars_entry e;
6445       e.var_decl = var_decl;
6446       e.type = type;
6447       if (types_used_by_vars_hash == NULL)
6448 	types_used_by_vars_hash
6449 	  = hash_table<used_type_hasher>::create_ggc (37);
6450 
6451       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6452       if (*slot == NULL)
6453 	{
6454 	  struct types_used_by_vars_entry *entry;
6455 	  entry = ggc_alloc<types_used_by_vars_entry> ();
6456 	  entry->type = type;
6457 	  entry->var_decl = var_decl;
6458 	  *slot = entry;
6459 	}
6460     }
6461 }
6462 
6463 namespace {
6464 
6465 const pass_data pass_data_leaf_regs =
6466 {
6467   RTL_PASS, /* type */
6468   "*leaf_regs", /* name */
6469   OPTGROUP_NONE, /* optinfo_flags */
6470   TV_NONE, /* tv_id */
6471   0, /* properties_required */
6472   0, /* properties_provided */
6473   0, /* properties_destroyed */
6474   0, /* todo_flags_start */
6475   0, /* todo_flags_finish */
6476 };
6477 
6478 class pass_leaf_regs : public rtl_opt_pass
6479 {
6480 public:
pass_leaf_regs(gcc::context * ctxt)6481   pass_leaf_regs (gcc::context *ctxt)
6482     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6483   {}
6484 
6485   /* opt_pass methods: */
execute(function *)6486   virtual unsigned int execute (function *)
6487     {
6488       return rest_of_handle_check_leaf_regs ();
6489     }
6490 
6491 }; // class pass_leaf_regs
6492 
6493 } // anon namespace
6494 
6495 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6496 make_pass_leaf_regs (gcc::context *ctxt)
6497 {
6498   return new pass_leaf_regs (ctxt);
6499 }
6500 
6501 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6502 rest_of_handle_thread_prologue_and_epilogue (void)
6503 {
6504   /* prepare_shrink_wrap is sensitive to the block structure of the control
6505      flow graph, so clean it up first.  */
6506   if (optimize)
6507     cleanup_cfg (0);
6508 
6509   /* On some machines, the prologue and epilogue code, or parts thereof,
6510      can be represented as RTL.  Doing so lets us schedule insns between
6511      it and the rest of the code and also allows delayed branch
6512      scheduling to operate in the epilogue.  */
6513   thread_prologue_and_epilogue_insns ();
6514 
6515   /* Some non-cold blocks may now be only reachable from cold blocks.
6516      Fix that up.  */
6517   fixup_partitions ();
6518 
6519   /* Shrink-wrapping can result in unreachable edges in the epilogue,
6520      see PR57320.  */
6521   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6522 
6523   /* The stack usage info is finalized during prologue expansion.  */
6524   if (flag_stack_usage_info || flag_callgraph_info)
6525     output_stack_usage ();
6526 
6527   return 0;
6528 }
6529 
6530 /* Record a final call to CALLEE at LOCATION.  */
6531 
6532 void
record_final_call(tree callee,location_t location)6533 record_final_call (tree callee, location_t location)
6534 {
6535   struct callinfo_callee datum = { location, callee };
6536   vec_safe_push (cfun->su->callees, datum);
6537 }
6538 
6539 /* Record a dynamic allocation made for DECL_OR_EXP.  */
6540 
6541 void
record_dynamic_alloc(tree decl_or_exp)6542 record_dynamic_alloc (tree decl_or_exp)
6543 {
6544   struct callinfo_dalloc datum;
6545 
6546   if (DECL_P (decl_or_exp))
6547     {
6548       datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6549       const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6550       const char *dot = strrchr (name, '.');
6551       if (dot)
6552 	name = dot + 1;
6553       datum.name = ggc_strdup (name);
6554     }
6555   else
6556     {
6557       datum.location = EXPR_LOCATION (decl_or_exp);
6558       datum.name = NULL;
6559     }
6560 
6561   vec_safe_push (cfun->su->dallocs, datum);
6562 }
6563 
6564 namespace {
6565 
6566 const pass_data pass_data_thread_prologue_and_epilogue =
6567 {
6568   RTL_PASS, /* type */
6569   "pro_and_epilogue", /* name */
6570   OPTGROUP_NONE, /* optinfo_flags */
6571   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6572   0, /* properties_required */
6573   0, /* properties_provided */
6574   0, /* properties_destroyed */
6575   0, /* todo_flags_start */
6576   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6577 };
6578 
6579 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6580 {
6581 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6582   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6583     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6584   {}
6585 
6586   /* opt_pass methods: */
execute(function *)6587   virtual unsigned int execute (function *)
6588     {
6589       return rest_of_handle_thread_prologue_and_epilogue ();
6590     }
6591 
6592 }; // class pass_thread_prologue_and_epilogue
6593 
6594 } // anon namespace
6595 
6596 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6597 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6598 {
6599   return new pass_thread_prologue_and_epilogue (ctxt);
6600 }
6601 
6602 namespace {
6603 
6604 const pass_data pass_data_zero_call_used_regs =
6605 {
6606   RTL_PASS, /* type */
6607   "zero_call_used_regs", /* name */
6608   OPTGROUP_NONE, /* optinfo_flags */
6609   TV_NONE, /* tv_id */
6610   0, /* properties_required */
6611   0, /* properties_provided */
6612   0, /* properties_destroyed */
6613   0, /* todo_flags_start */
6614   0, /* todo_flags_finish */
6615 };
6616 
6617 class pass_zero_call_used_regs: public rtl_opt_pass
6618 {
6619 public:
pass_zero_call_used_regs(gcc::context * ctxt)6620   pass_zero_call_used_regs (gcc::context *ctxt)
6621     : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
6622   {}
6623 
6624   /* opt_pass methods: */
6625   virtual unsigned int execute (function *);
6626 
6627 }; // class pass_zero_call_used_regs
6628 
6629 unsigned int
execute(function * fun)6630 pass_zero_call_used_regs::execute (function *fun)
6631 {
6632   using namespace zero_regs_flags;
6633   unsigned int zero_regs_type = UNSET;
6634 
6635   tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
6636 					  DECL_ATTRIBUTES (fun->decl));
6637 
6638   /* Get the type of zero_call_used_regs from function attribute.
6639      We have filtered out invalid attribute values already at this point.  */
6640   if (attr_zero_regs)
6641     {
6642       /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
6643 	 is the attribute argument's value.  */
6644       attr_zero_regs = TREE_VALUE (attr_zero_regs);
6645       gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
6646       attr_zero_regs = TREE_VALUE (attr_zero_regs);
6647       gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
6648 
6649       for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
6650 	if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
6651 		     zero_call_used_regs_opts[i].name) == 0)
6652 	  {
6653 	    zero_regs_type = zero_call_used_regs_opts[i].flag;
6654  	    break;
6655 	  }
6656     }
6657 
6658   if (!zero_regs_type)
6659     zero_regs_type = flag_zero_call_used_regs;
6660 
6661   /* No need to zero call-used-regs when no user request is present.  */
6662   if (!(zero_regs_type & ENABLED))
6663     return 0;
6664 
6665   edge_iterator ei;
6666   edge e;
6667 
6668   /* This pass needs data flow information.  */
6669   df_analyze ();
6670 
6671   /* Iterate over the function's return instructions and insert any
6672      register zeroing required by the -fzero-call-used-regs command-line
6673      option or the "zero_call_used_regs" function attribute.  */
6674   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6675     {
6676       rtx_insn *insn = BB_END (e->src);
6677       if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
6678 	gen_call_used_regs_seq (insn, zero_regs_type);
6679     }
6680 
6681   return 0;
6682 }
6683 
6684 } // anon namespace
6685 
6686 rtl_opt_pass *
make_pass_zero_call_used_regs(gcc::context * ctxt)6687 make_pass_zero_call_used_regs (gcc::context *ctxt)
6688 {
6689   return new pass_zero_call_used_regs (ctxt);
6690 }
6691 
6692 /* If CONSTRAINT is a matching constraint, then return its number.
6693    Otherwise, return -1.  */
6694 
6695 static int
matching_constraint_num(const char * constraint)6696 matching_constraint_num (const char *constraint)
6697 {
6698   if (*constraint == '%')
6699     constraint++;
6700 
6701   if (IN_RANGE (*constraint, '0', '9'))
6702     return strtoul (constraint, NULL, 10);
6703 
6704   return -1;
6705 }
6706 
6707 /* This mini-pass fixes fall-out from SSA in asm statements that have
6708    in-out constraints.  Say you start with
6709 
6710      orig = inout;
6711      asm ("": "+mr" (inout));
6712      use (orig);
6713 
6714    which is transformed very early to use explicit output and match operands:
6715 
6716      orig = inout;
6717      asm ("": "=mr" (inout) : "0" (inout));
6718      use (orig);
6719 
6720    Or, after SSA and copyprop,
6721 
6722      asm ("": "=mr" (inout_2) : "0" (inout_1));
6723      use (inout_1);
6724 
6725    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6726    they represent two separate values, so they will get different pseudo
6727    registers during expansion.  Then, since the two operands need to match
6728    per the constraints, but use different pseudo registers, reload can
6729    only register a reload for these operands.  But reloads can only be
6730    satisfied by hardregs, not by memory, so we need a register for this
6731    reload, just because we are presented with non-matching operands.
6732    So, even though we allow memory for this operand, no memory can be
6733    used for it, just because the two operands don't match.  This can
6734    cause reload failures on register-starved targets.
6735 
6736    So it's a symptom of reload not being able to use memory for reloads
6737    or, alternatively it's also a symptom of both operands not coming into
6738    reload as matching (in which case the pseudo could go to memory just
6739    fine, as the alternative allows it, and no reload would be necessary).
6740    We fix the latter problem here, by transforming
6741 
6742      asm ("": "=mr" (inout_2) : "0" (inout_1));
6743 
6744    back to
6745 
6746      inout_2 = inout_1;
6747      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6748 
6749 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6750 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6751 {
6752   int i;
6753   bool changed = false;
6754   rtx op = SET_SRC (p_sets[0]);
6755   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6756   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6757   bool *output_matched = XALLOCAVEC (bool, noutputs);
6758 
6759   memset (output_matched, 0, noutputs * sizeof (bool));
6760   for (i = 0; i < ninputs; i++)
6761     {
6762       rtx input, output;
6763       rtx_insn *insns;
6764       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6765       int match, j;
6766 
6767       match = matching_constraint_num (constraint);
6768       if (match < 0)
6769 	continue;
6770 
6771       gcc_assert (match < noutputs);
6772       output = SET_DEST (p_sets[match]);
6773       input = RTVEC_ELT (inputs, i);
6774       /* Only do the transformation for pseudos.  */
6775       if (! REG_P (output)
6776 	  || rtx_equal_p (output, input)
6777 	  || !(REG_P (input) || SUBREG_P (input)
6778 	       || MEM_P (input) || CONSTANT_P (input))
6779 	  || !general_operand (input, GET_MODE (output)))
6780 	continue;
6781 
6782       /* We can't do anything if the output is also used as input,
6783 	 as we're going to overwrite it.  */
6784       for (j = 0; j < ninputs; j++)
6785 	if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6786 	  break;
6787       if (j != ninputs)
6788 	continue;
6789 
6790       /* Avoid changing the same input several times.  For
6791 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6792 	 only change it once (to out1), rather than changing it
6793 	 first to out1 and afterwards to out2.  */
6794       if (i > 0)
6795 	{
6796 	  for (j = 0; j < noutputs; j++)
6797 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6798 	      break;
6799 	  if (j != noutputs)
6800 	    continue;
6801 	}
6802       output_matched[match] = true;
6803 
6804       start_sequence ();
6805       emit_move_insn (output, copy_rtx (input));
6806       insns = get_insns ();
6807       end_sequence ();
6808       emit_insn_before (insns, insn);
6809 
6810       constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6811       bool early_clobber_p = strchr (constraint, '&') != NULL;
6812 
6813       /* Now replace all mentions of the input with output.  We can't
6814 	 just replace the occurrence in inputs[i], as the register might
6815 	 also be used in some other input (or even in an address of an
6816 	 output), which would mean possibly increasing the number of
6817 	 inputs by one (namely 'output' in addition), which might pose
6818 	 a too complicated problem for reload to solve.  E.g. this situation:
6819 
6820 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6821 
6822 	 Here 'input' is used in two occurrences as input (once for the
6823 	 input operand, once for the address in the second output operand).
6824 	 If we would replace only the occurrence of the input operand (to
6825 	 make the matching) we would be left with this:
6826 
6827 	   output = input
6828 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6829 
6830 	 Now we suddenly have two different input values (containing the same
6831 	 value, but different pseudos) where we formerly had only one.
6832 	 With more complicated asms this might lead to reload failures
6833 	 which wouldn't have happen without this pass.  So, iterate over
6834 	 all operands and replace all occurrences of the register used.
6835 
6836 	 However, if one or more of the 'input' uses have a non-matching
6837 	 constraint and the matched output operand is an early clobber
6838 	 operand, then do not replace the input operand, since by definition
6839 	 it conflicts with the output operand and cannot share the same
6840 	 register.  See PR89313 for details.  */
6841 
6842       for (j = 0; j < noutputs; j++)
6843 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6844 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6845 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6846 					      input, output);
6847       for (j = 0; j < ninputs; j++)
6848 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6849 	  {
6850 	    if (!early_clobber_p
6851 		|| match == matching_constraint_num
6852 			      (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6853 	      RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6854 						   input, output);
6855 	  }
6856 
6857       changed = true;
6858     }
6859 
6860   if (changed)
6861     df_insn_rescan (insn);
6862 }
6863 
6864 /* Add the decl D to the local_decls list of FUN.  */
6865 
6866 void
add_local_decl(struct function * fun,tree d)6867 add_local_decl (struct function *fun, tree d)
6868 {
6869   gcc_assert (VAR_P (d));
6870   vec_safe_push (fun->local_decls, d);
6871 }
6872 
6873 namespace {
6874 
6875 const pass_data pass_data_match_asm_constraints =
6876 {
6877   RTL_PASS, /* type */
6878   "asmcons", /* name */
6879   OPTGROUP_NONE, /* optinfo_flags */
6880   TV_NONE, /* tv_id */
6881   0, /* properties_required */
6882   0, /* properties_provided */
6883   0, /* properties_destroyed */
6884   0, /* todo_flags_start */
6885   0, /* todo_flags_finish */
6886 };
6887 
6888 class pass_match_asm_constraints : public rtl_opt_pass
6889 {
6890 public:
pass_match_asm_constraints(gcc::context * ctxt)6891   pass_match_asm_constraints (gcc::context *ctxt)
6892     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6893   {}
6894 
6895   /* opt_pass methods: */
6896   virtual unsigned int execute (function *);
6897 
6898 }; // class pass_match_asm_constraints
6899 
6900 unsigned
execute(function * fun)6901 pass_match_asm_constraints::execute (function *fun)
6902 {
6903   basic_block bb;
6904   rtx_insn *insn;
6905   rtx pat, *p_sets;
6906   int noutputs;
6907 
6908   if (!crtl->has_asm_statement)
6909     return 0;
6910 
6911   df_set_flags (DF_DEFER_INSN_RESCAN);
6912   FOR_EACH_BB_FN (bb, fun)
6913     {
6914       FOR_BB_INSNS (bb, insn)
6915 	{
6916 	  if (!INSN_P (insn))
6917 	    continue;
6918 
6919 	  pat = PATTERN (insn);
6920 	  if (GET_CODE (pat) == PARALLEL)
6921 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6922 	  else if (GET_CODE (pat) == SET)
6923 	    p_sets = &PATTERN (insn), noutputs = 1;
6924 	  else
6925 	    continue;
6926 
6927 	  if (GET_CODE (*p_sets) == SET
6928 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6929 	    match_asm_constraints_1 (insn, p_sets, noutputs);
6930 	 }
6931     }
6932 
6933   return TODO_df_finish;
6934 }
6935 
6936 } // anon namespace
6937 
6938 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6939 make_pass_match_asm_constraints (gcc::context *ctxt)
6940 {
6941   return new pass_match_asm_constraints (ctxt);
6942 }
6943 
6944 
6945 #include "gt-function.h"
6946