1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file handles the generation of rtl code from tree structure
21    at the level of the function as a whole.
22    It creates the rtl expressions for parameters and auto variables
23    and has full responsibility for allocating stack slots.
24 
25    `expand_function_start' is called at the beginning of a function,
26    before the function body is parsed, and `expand_function_end' is
27    called after parsing the body.
28 
29    Call `assign_stack_local' to allocate a stack slot for a local variable.
30    This is usually done during the RTL generation for the function body,
31    but it can also be done in the reload pass when a pseudo-register does
32    not get a hard register.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-dfa.h"
77 #include "tree-ssa.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82 
83 /* So we can assign to cfun in this file.  */
84 #undef cfun
85 
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
88 #endif
89 
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
91 
92 /* Round a value to the lowest integer less than it that is a multiple of
93    the required alignment.  Avoid using division in case the value is
94    negative.  Assume the alignment is a power of two.  */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
96 
97 /* Similar, but round to the next highest integer that meets the
98    alignment.  */
99 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
100 
101 /* Nonzero once virtual register instantiation has been done.
102    assign_stack_local uses frame_pointer_rtx when this is nonzero.
103    calls.c:emit_library_call_value_1 uses it to set up
104    post-instantiation libcalls.  */
105 int virtuals_instantiated;
106 
107 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
108 static GTY(()) int funcdef_no;
109 
110 /* These variables hold pointers to functions to create and destroy
111    target specific, per-function data structures.  */
112 struct machine_function * (*init_machine_status) (void);
113 
114 /* The currently compiled function.  */
115 struct function *cfun = 0;
116 
117 /* These hashes record the prologue and epilogue insns.  */
118 
119 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
120 {
hashinsn_cache_hasher121   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher122   static bool equal (rtx a, rtx b) { return a == b; }
123 };
124 
125 static GTY((cache))
126   hash_table<insn_cache_hasher> *prologue_insn_hash;
127 static GTY((cache))
128   hash_table<insn_cache_hasher> *epilogue_insn_hash;
129 
130 
131 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
132 vec<tree, va_gc> *types_used_by_cur_var_decl;
133 
134 /* Forward declarations.  */
135 
136 static struct temp_slot *find_temp_slot_from_address (rtx);
137 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
138 static void pad_below (struct args_size *, machine_mode, tree);
139 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
140 static int all_blocks (tree, tree *);
141 static tree *get_block_vector (tree, int *);
142 extern tree debug_find_var_in_block_tree (tree, tree);
143 /* We always define `record_insns' even if it's not used so that we
144    can always export `prologue_epilogue_contains'.  */
145 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
146      ATTRIBUTE_UNUSED;
147 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx, void *);
150 static void do_use_return_reg (rtx, void *);
151 
152 
153 /* Stack of nested functions.  */
154 /* Keep track of the cfun stack.  */
155 
156 static vec<function *> function_context_stack;
157 
158 /* Save the current context for compilation of a nested function.
159    This is called from language-specific code.  */
160 
161 void
push_function_context(void)162 push_function_context (void)
163 {
164   if (cfun == 0)
165     allocate_struct_function (NULL, false);
166 
167   function_context_stack.safe_push (cfun);
168   set_cfun (NULL);
169 }
170 
171 /* Restore the last saved context, at the end of a nested function.
172    This function is called from language-specific code.  */
173 
174 void
pop_function_context(void)175 pop_function_context (void)
176 {
177   struct function *p = function_context_stack.pop ();
178   set_cfun (p);
179   current_function_decl = p->decl;
180 
181   /* Reset variables that have known state during rtx generation.  */
182   virtuals_instantiated = 0;
183   generating_concat_p = 1;
184 }
185 
186 /* Clear out all parts of the state in F that can safely be discarded
187    after the function has been parsed, but not compiled, to let
188    garbage collection reclaim the memory.  */
189 
190 void
free_after_parsing(struct function * f)191 free_after_parsing (struct function *f)
192 {
193   f->language = 0;
194 }
195 
196 /* Clear out all parts of the state in F that can safely be discarded
197    after the function has been compiled, to let garbage collection
198    reclaim the memory.  */
199 
200 void
free_after_compilation(struct function * f)201 free_after_compilation (struct function *f)
202 {
203   prologue_insn_hash = NULL;
204   epilogue_insn_hash = NULL;
205 
206   free (crtl->emit.regno_pointer_align);
207 
208   memset (crtl, 0, sizeof (struct rtl_data));
209   f->eh = NULL;
210   f->machine = NULL;
211   f->cfg = NULL;
212   f->curr_properties &= ~PROP_cfg;
213 
214   regno_reg_rtx = NULL;
215 }
216 
217 /* Return size needed for stack frame based on slots so far allocated.
218    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
219    the caller may have to do that.  */
220 
221 poly_int64
get_frame_size(void)222 get_frame_size (void)
223 {
224   if (FRAME_GROWS_DOWNWARD)
225     return -frame_offset;
226   else
227     return frame_offset;
228 }
229 
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231    the signed target pointer arithmetics for function FUNC.  Otherwise
232    return FALSE.  */
233 
234 bool
frame_offset_overflow(poly_int64 offset,tree func)235 frame_offset_overflow (poly_int64 offset, tree func)
236 {
237   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238   unsigned HOST_WIDE_INT limit
239     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
240        /* Leave room for the fixed part of the frame.  */
241        - 64 * UNITS_PER_WORD);
242 
243   if (!coeffs_in_range_p (size, 0U, limit))
244     {
245       unsigned HOST_WIDE_INT hwisize;
246       if (size.is_constant (&hwisize))
247 	error_at (DECL_SOURCE_LOCATION (func),
248 		  "total size of local objects %wu exceeds maximum %wu",
249 		  hwisize, limit);
250       else
251 	error_at (DECL_SOURCE_LOCATION (func),
252 		  "total size of local objects exceeds maximum %wu",
253 		  limit);
254       return true;
255     }
256 
257   return false;
258 }
259 
260 /* Return the minimum spill slot alignment for a register of mode MODE.  */
261 
262 unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)263 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
264 {
265   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
266 }
267 
268 /* Return stack slot alignment in bits for TYPE and MODE.  */
269 
270 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)271 get_stack_local_alignment (tree type, machine_mode mode)
272 {
273   unsigned int alignment;
274 
275   if (mode == BLKmode)
276     alignment = BIGGEST_ALIGNMENT;
277   else
278     alignment = GET_MODE_ALIGNMENT (mode);
279 
280   /* Allow the frond-end to (possibly) increase the alignment of this
281      stack slot.  */
282   if (! type)
283     type = lang_hooks.types.type_for_mode (mode, 0);
284 
285   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
286 }
287 
288 /* Determine whether it is possible to fit a stack slot of size SIZE and
289    alignment ALIGNMENT into an area in the stack frame that starts at
290    frame offset START and has a length of LENGTH.  If so, store the frame
291    offset to be used for the stack slot in *POFFSET and return true;
292    return false otherwise.  This function will extend the frame size when
293    given a start/length pair that lies at the end of the frame.  */
294 
295 static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)296 try_fit_stack_local (poly_int64 start, poly_int64 length,
297 		     poly_int64 size, unsigned int alignment,
298 		     poly_int64_pod *poffset)
299 {
300   poly_int64 this_frame_offset;
301   int frame_off, frame_alignment, frame_phase;
302 
303   /* Calculate how many bytes the start of local variables is off from
304      stack alignment.  */
305   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
306   frame_off = targetm.starting_frame_offset () % frame_alignment;
307   frame_phase = frame_off ? frame_alignment - frame_off : 0;
308 
309   /* Round the frame offset to the specified alignment.  */
310 
311   if (FRAME_GROWS_DOWNWARD)
312     this_frame_offset
313       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
314 	 + frame_phase);
315   else
316     this_frame_offset
317       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
318 
319   /* See if it fits.  If this space is at the edge of the frame,
320      consider extending the frame to make it fit.  Our caller relies on
321      this when allocating a new slot.  */
322   if (maybe_lt (this_frame_offset, start))
323     {
324       if (known_eq (frame_offset, start))
325 	frame_offset = this_frame_offset;
326       else
327 	return false;
328     }
329   else if (maybe_gt (this_frame_offset + size, start + length))
330     {
331       if (known_eq (frame_offset, start + length))
332 	frame_offset = this_frame_offset + size;
333       else
334 	return false;
335     }
336 
337   *poffset = this_frame_offset;
338   return true;
339 }
340 
341 /* Create a new frame_space structure describing free space in the stack
342    frame beginning at START and ending at END, and chain it into the
343    function's frame_space_list.  */
344 
345 static void
add_frame_space(poly_int64 start,poly_int64 end)346 add_frame_space (poly_int64 start, poly_int64 end)
347 {
348   struct frame_space *space = ggc_alloc<frame_space> ();
349   space->next = crtl->frame_space_list;
350   crtl->frame_space_list = space;
351   space->start = start;
352   space->length = end - start;
353 }
354 
355 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
356    with machine mode MODE.
357 
358    ALIGN controls the amount of alignment for the address of the slot:
359    0 means according to MODE,
360    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
361    -2 means use BITS_PER_UNIT,
362    positive specifies alignment boundary in bits.
363 
364    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
365    alignment and ASLK_RECORD_PAD bit set if we should remember
366    extra space we allocated for alignment purposes.  When we are
367    called from assign_stack_temp_for_type, it is not set so we don't
368    track the same stack slot in two independent lists.
369 
370    We do not round to stack_boundary here.  */
371 
372 rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)373 assign_stack_local_1 (machine_mode mode, poly_int64 size,
374 		      int align, int kind)
375 {
376   rtx x, addr;
377   poly_int64 bigend_correction = 0;
378   poly_int64 slot_offset = 0, old_frame_offset;
379   unsigned int alignment, alignment_in_bits;
380 
381   if (align == 0)
382     {
383       alignment = get_stack_local_alignment (NULL, mode);
384       alignment /= BITS_PER_UNIT;
385     }
386   else if (align == -1)
387     {
388       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
389       size = aligned_upper_bound (size, alignment);
390     }
391   else if (align == -2)
392     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
393   else
394     alignment = align / BITS_PER_UNIT;
395 
396   alignment_in_bits = alignment * BITS_PER_UNIT;
397 
398   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
399   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
400     {
401       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
402       alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
403     }
404 
405   if (SUPPORTS_STACK_ALIGNMENT)
406     {
407       if (crtl->stack_alignment_estimated < alignment_in_bits)
408 	{
409           if (!crtl->stack_realign_processed)
410 	    crtl->stack_alignment_estimated = alignment_in_bits;
411           else
412 	    {
413 	      /* If stack is realigned and stack alignment value
414 		 hasn't been finalized, it is OK not to increase
415 		 stack_alignment_estimated.  The bigger alignment
416 		 requirement is recorded in stack_alignment_needed
417 		 below.  */
418 	      gcc_assert (!crtl->stack_realign_finalized);
419 	      if (!crtl->stack_realign_needed)
420 		{
421 		  /* It is OK to reduce the alignment as long as the
422 		     requested size is 0 or the estimated stack
423 		     alignment >= mode alignment.  */
424 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
425 			      || known_eq (size, 0)
426 			      || (crtl->stack_alignment_estimated
427 				  >= GET_MODE_ALIGNMENT (mode)));
428 		  alignment_in_bits = crtl->stack_alignment_estimated;
429 		  alignment = alignment_in_bits / BITS_PER_UNIT;
430 		}
431 	    }
432 	}
433     }
434 
435   if (crtl->stack_alignment_needed < alignment_in_bits)
436     crtl->stack_alignment_needed = alignment_in_bits;
437   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
438     crtl->max_used_stack_slot_alignment = alignment_in_bits;
439 
440   if (mode != BLKmode || maybe_ne (size, 0))
441     {
442       if (kind & ASLK_RECORD_PAD)
443 	{
444 	  struct frame_space **psp;
445 
446 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
447 	    {
448 	      struct frame_space *space = *psp;
449 	      if (!try_fit_stack_local (space->start, space->length, size,
450 					alignment, &slot_offset))
451 		continue;
452 	      *psp = space->next;
453 	      if (known_gt (slot_offset, space->start))
454 		add_frame_space (space->start, slot_offset);
455 	      if (known_lt (slot_offset + size, space->start + space->length))
456 		add_frame_space (slot_offset + size,
457 				 space->start + space->length);
458 	      goto found_space;
459 	    }
460 	}
461     }
462   else if (!STACK_ALIGNMENT_NEEDED)
463     {
464       slot_offset = frame_offset;
465       goto found_space;
466     }
467 
468   old_frame_offset = frame_offset;
469 
470   if (FRAME_GROWS_DOWNWARD)
471     {
472       frame_offset -= size;
473       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
474 
475       if (kind & ASLK_RECORD_PAD)
476 	{
477 	  if (known_gt (slot_offset, frame_offset))
478 	    add_frame_space (frame_offset, slot_offset);
479 	  if (known_lt (slot_offset + size, old_frame_offset))
480 	    add_frame_space (slot_offset + size, old_frame_offset);
481 	}
482     }
483   else
484     {
485       frame_offset += size;
486       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
487 
488       if (kind & ASLK_RECORD_PAD)
489 	{
490 	  if (known_gt (slot_offset, old_frame_offset))
491 	    add_frame_space (old_frame_offset, slot_offset);
492 	  if (known_lt (slot_offset + size, frame_offset))
493 	    add_frame_space (slot_offset + size, frame_offset);
494 	}
495     }
496 
497  found_space:
498   /* On a big-endian machine, if we are allocating more space than we will use,
499      use the least significant bytes of those that are allocated.  */
500   if (mode != BLKmode)
501     {
502       /* The slot size can sometimes be smaller than the mode size;
503 	 e.g. the rs6000 port allocates slots with a vector mode
504 	 that have the size of only one element.  However, the slot
505 	 size must always be ordered wrt to the mode size, in the
506 	 same way as for a subreg.  */
507       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
508       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
509 	bigend_correction = size - GET_MODE_SIZE (mode);
510     }
511 
512   /* If we have already instantiated virtual registers, return the actual
513      address relative to the frame pointer.  */
514   if (virtuals_instantiated)
515     addr = plus_constant (Pmode, frame_pointer_rtx,
516 			  trunc_int_for_mode
517 			  (slot_offset + bigend_correction
518 			   + targetm.starting_frame_offset (), Pmode));
519   else
520     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
521 			  trunc_int_for_mode
522 			  (slot_offset + bigend_correction,
523 			   Pmode));
524 
525   x = gen_rtx_MEM (mode, addr);
526   set_mem_align (x, alignment_in_bits);
527   MEM_NOTRAP_P (x) = 1;
528 
529   vec_safe_push (stack_slot_list, x);
530 
531   if (frame_offset_overflow (frame_offset, current_function_decl))
532     frame_offset = 0;
533 
534   return x;
535 }
536 
537 /* Wrap up assign_stack_local_1 with last parameter as false.  */
538 
539 rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)540 assign_stack_local (machine_mode mode, poly_int64 size, int align)
541 {
542   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
543 }
544 
545 /* In order to evaluate some expressions, such as function calls returning
546    structures in memory, we need to temporarily allocate stack locations.
547    We record each allocated temporary in the following structure.
548 
549    Associated with each temporary slot is a nesting level.  When we pop up
550    one level, all temporaries associated with the previous level are freed.
551    Normally, all temporaries are freed after the execution of the statement
552    in which they were created.  However, if we are inside a ({...}) grouping,
553    the result may be in a temporary and hence must be preserved.  If the
554    result could be in a temporary, we preserve it if we can determine which
555    one it is in.  If we cannot determine which temporary may contain the
556    result, all temporaries are preserved.  A temporary is preserved by
557    pretending it was allocated at the previous nesting level.  */
558 
559 struct GTY(()) temp_slot {
560   /* Points to next temporary slot.  */
561   struct temp_slot *next;
562   /* Points to previous temporary slot.  */
563   struct temp_slot *prev;
564   /* The rtx to used to reference the slot.  */
565   rtx slot;
566   /* The size, in units, of the slot.  */
567   poly_int64 size;
568   /* The type of the object in the slot, or zero if it doesn't correspond
569      to a type.  We use this to determine whether a slot can be reused.
570      It can be reused if objects of the type of the new slot will always
571      conflict with objects of the type of the old slot.  */
572   tree type;
573   /* The alignment (in bits) of the slot.  */
574   unsigned int align;
575   /* Nonzero if this temporary is currently in use.  */
576   char in_use;
577   /* Nesting level at which this slot is being used.  */
578   int level;
579   /* The offset of the slot from the frame_pointer, including extra space
580      for alignment.  This info is for combine_temp_slots.  */
581   poly_int64 base_offset;
582   /* The size of the slot, including extra space for alignment.  This
583      info is for combine_temp_slots.  */
584   poly_int64 full_size;
585 };
586 
587 /* Entry for the below hash table.  */
588 struct GTY((for_user)) temp_slot_address_entry {
589   hashval_t hash;
590   rtx address;
591   struct temp_slot *temp_slot;
592 };
593 
594 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
595 {
596   static hashval_t hash (temp_slot_address_entry *);
597   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
598 };
599 
600 /* A table of addresses that represent a stack slot.  The table is a mapping
601    from address RTXen to a temp slot.  */
602 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
603 static size_t n_temp_slots_in_use;
604 
605 /* Removes temporary slot TEMP from LIST.  */
606 
607 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)608 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
609 {
610   if (temp->next)
611     temp->next->prev = temp->prev;
612   if (temp->prev)
613     temp->prev->next = temp->next;
614   else
615     *list = temp->next;
616 
617   temp->prev = temp->next = NULL;
618 }
619 
620 /* Inserts temporary slot TEMP to LIST.  */
621 
622 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)623 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
624 {
625   temp->next = *list;
626   if (*list)
627     (*list)->prev = temp;
628   temp->prev = NULL;
629   *list = temp;
630 }
631 
632 /* Returns the list of used temp slots at LEVEL.  */
633 
634 static struct temp_slot **
temp_slots_at_level(int level)635 temp_slots_at_level (int level)
636 {
637   if (level >= (int) vec_safe_length (used_temp_slots))
638     vec_safe_grow_cleared (used_temp_slots, level + 1);
639 
640   return &(*used_temp_slots)[level];
641 }
642 
643 /* Returns the maximal temporary slot level.  */
644 
645 static int
max_slot_level(void)646 max_slot_level (void)
647 {
648   if (!used_temp_slots)
649     return -1;
650 
651   return used_temp_slots->length () - 1;
652 }
653 
654 /* Moves temporary slot TEMP to LEVEL.  */
655 
656 static void
move_slot_to_level(struct temp_slot * temp,int level)657 move_slot_to_level (struct temp_slot *temp, int level)
658 {
659   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
660   insert_slot_to_list (temp, temp_slots_at_level (level));
661   temp->level = level;
662 }
663 
664 /* Make temporary slot TEMP available.  */
665 
666 static void
make_slot_available(struct temp_slot * temp)667 make_slot_available (struct temp_slot *temp)
668 {
669   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
670   insert_slot_to_list (temp, &avail_temp_slots);
671   temp->in_use = 0;
672   temp->level = -1;
673   n_temp_slots_in_use--;
674 }
675 
676 /* Compute the hash value for an address -> temp slot mapping.
677    The value is cached on the mapping entry.  */
678 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)679 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
680 {
681   int do_not_record = 0;
682   return hash_rtx (t->address, GET_MODE (t->address),
683 		   &do_not_record, NULL, false);
684 }
685 
686 /* Return the hash value for an address -> temp slot mapping.  */
687 hashval_t
hash(temp_slot_address_entry * t)688 temp_address_hasher::hash (temp_slot_address_entry *t)
689 {
690   return t->hash;
691 }
692 
693 /* Compare two address -> temp slot mapping entries.  */
694 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)695 temp_address_hasher::equal (temp_slot_address_entry *t1,
696 			    temp_slot_address_entry *t2)
697 {
698   return exp_equiv_p (t1->address, t2->address, 0, true);
699 }
700 
701 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
702 static void
insert_temp_slot_address(rtx address,struct temp_slot * temp_slot)703 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
704 {
705   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
706   t->address = address;
707   t->temp_slot = temp_slot;
708   t->hash = temp_slot_address_compute_hash (t);
709   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
710 }
711 
712 /* Remove an address -> temp slot mapping entry if the temp slot is
713    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
714 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)715 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
716 {
717   const struct temp_slot_address_entry *t = *slot;
718   if (! t->temp_slot->in_use)
719     temp_slot_address_table->clear_slot (slot);
720   return 1;
721 }
722 
723 /* Remove all mappings of addresses to unused temp slots.  */
724 static void
remove_unused_temp_slot_addresses(void)725 remove_unused_temp_slot_addresses (void)
726 {
727   /* Use quicker clearing if there aren't any active temp slots.  */
728   if (n_temp_slots_in_use)
729     temp_slot_address_table->traverse
730       <void *, remove_unused_temp_slot_addresses_1> (NULL);
731   else
732     temp_slot_address_table->empty ();
733 }
734 
735 /* Find the temp slot corresponding to the object at address X.  */
736 
737 static struct temp_slot *
find_temp_slot_from_address(rtx x)738 find_temp_slot_from_address (rtx x)
739 {
740   struct temp_slot *p;
741   struct temp_slot_address_entry tmp, *t;
742 
743   /* First try the easy way:
744      See if X exists in the address -> temp slot mapping.  */
745   tmp.address = x;
746   tmp.temp_slot = NULL;
747   tmp.hash = temp_slot_address_compute_hash (&tmp);
748   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
749   if (t)
750     return t->temp_slot;
751 
752   /* If we have a sum involving a register, see if it points to a temp
753      slot.  */
754   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
755       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
756     return p;
757   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
758 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
759     return p;
760 
761   /* Last resort: Address is a virtual stack var address.  */
762   poly_int64 offset;
763   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
764     {
765       int i;
766       for (i = max_slot_level (); i >= 0; i--)
767 	for (p = *temp_slots_at_level (i); p; p = p->next)
768 	  if (known_in_range_p (offset, p->base_offset, p->full_size))
769 	    return p;
770     }
771 
772   return NULL;
773 }
774 
775 /* Allocate a temporary stack slot and record it for possible later
776    reuse.
777 
778    MODE is the machine mode to be given to the returned rtx.
779 
780    SIZE is the size in units of the space required.  We do no rounding here
781    since assign_stack_local will do any required rounding.
782 
783    TYPE is the type that will be used for the stack slot.  */
784 
785 rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)786 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
787 {
788   unsigned int align;
789   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
790   rtx slot;
791 
792   gcc_assert (known_size_p (size));
793 
794   align = get_stack_local_alignment (type, mode);
795 
796   /* Try to find an available, already-allocated temporary of the proper
797      mode which meets the size and alignment requirements.  Choose the
798      smallest one with the closest alignment.
799 
800      If assign_stack_temp is called outside of the tree->rtl expansion,
801      we cannot reuse the stack slots (that may still refer to
802      VIRTUAL_STACK_VARS_REGNUM).  */
803   if (!virtuals_instantiated)
804     {
805       for (p = avail_temp_slots; p; p = p->next)
806 	{
807 	  if (p->align >= align
808 	      && known_ge (p->size, size)
809 	      && GET_MODE (p->slot) == mode
810 	      && objects_must_conflict_p (p->type, type)
811 	      && (best_p == 0
812 		  || (known_eq (best_p->size, p->size)
813 		      ? best_p->align > p->align
814 		      : known_ge (best_p->size, p->size))))
815 	    {
816 	      if (p->align == align && known_eq (p->size, size))
817 		{
818 		  selected = p;
819 		  cut_slot_from_list (selected, &avail_temp_slots);
820 		  best_p = 0;
821 		  break;
822 		}
823 	      best_p = p;
824 	    }
825 	}
826     }
827 
828   /* Make our best, if any, the one to use.  */
829   if (best_p)
830     {
831       selected = best_p;
832       cut_slot_from_list (selected, &avail_temp_slots);
833 
834       /* If there are enough aligned bytes left over, make them into a new
835 	 temp_slot so that the extra bytes don't get wasted.  Do this only
836 	 for BLKmode slots, so that we can be sure of the alignment.  */
837       if (GET_MODE (best_p->slot) == BLKmode)
838 	{
839 	  int alignment = best_p->align / BITS_PER_UNIT;
840 	  poly_int64 rounded_size = aligned_upper_bound (size, alignment);
841 
842 	  if (known_ge (best_p->size - rounded_size, alignment))
843 	    {
844 	      p = ggc_alloc<temp_slot> ();
845 	      p->in_use = 0;
846 	      p->size = best_p->size - rounded_size;
847 	      p->base_offset = best_p->base_offset + rounded_size;
848 	      p->full_size = best_p->full_size - rounded_size;
849 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
850 	      p->align = best_p->align;
851 	      p->type = best_p->type;
852 	      insert_slot_to_list (p, &avail_temp_slots);
853 
854 	      vec_safe_push (stack_slot_list, p->slot);
855 
856 	      best_p->size = rounded_size;
857 	      best_p->full_size = rounded_size;
858 	    }
859 	}
860     }
861 
862   /* If we still didn't find one, make a new temporary.  */
863   if (selected == 0)
864     {
865       poly_int64 frame_offset_old = frame_offset;
866 
867       p = ggc_alloc<temp_slot> ();
868 
869       /* We are passing an explicit alignment request to assign_stack_local.
870 	 One side effect of that is assign_stack_local will not round SIZE
871 	 to ensure the frame offset remains suitably aligned.
872 
873 	 So for requests which depended on the rounding of SIZE, we go ahead
874 	 and round it now.  We also make sure ALIGNMENT is at least
875 	 BIGGEST_ALIGNMENT.  */
876       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
877       p->slot = assign_stack_local_1 (mode,
878 				      (mode == BLKmode
879 				       ? aligned_upper_bound (size,
880 							      (int) align
881 							      / BITS_PER_UNIT)
882 				       : size),
883 				      align, 0);
884 
885       p->align = align;
886 
887       /* The following slot size computation is necessary because we don't
888 	 know the actual size of the temporary slot until assign_stack_local
889 	 has performed all the frame alignment and size rounding for the
890 	 requested temporary.  Note that extra space added for alignment
891 	 can be either above or below this stack slot depending on which
892 	 way the frame grows.  We include the extra space if and only if it
893 	 is above this slot.  */
894       if (FRAME_GROWS_DOWNWARD)
895 	p->size = frame_offset_old - frame_offset;
896       else
897 	p->size = size;
898 
899       /* Now define the fields used by combine_temp_slots.  */
900       if (FRAME_GROWS_DOWNWARD)
901 	{
902 	  p->base_offset = frame_offset;
903 	  p->full_size = frame_offset_old - frame_offset;
904 	}
905       else
906 	{
907 	  p->base_offset = frame_offset_old;
908 	  p->full_size = frame_offset - frame_offset_old;
909 	}
910 
911       selected = p;
912     }
913 
914   p = selected;
915   p->in_use = 1;
916   p->type = type;
917   p->level = temp_slot_level;
918   n_temp_slots_in_use++;
919 
920   pp = temp_slots_at_level (p->level);
921   insert_slot_to_list (p, pp);
922   insert_temp_slot_address (XEXP (p->slot, 0), p);
923 
924   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
925   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
926   vec_safe_push (stack_slot_list, slot);
927 
928   /* If we know the alias set for the memory that will be used, use
929      it.  If there's no TYPE, then we don't know anything about the
930      alias set for the memory.  */
931   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
932   set_mem_align (slot, align);
933 
934   /* If a type is specified, set the relevant flags.  */
935   if (type != 0)
936     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
937   MEM_NOTRAP_P (slot) = 1;
938 
939   return slot;
940 }
941 
942 /* Allocate a temporary stack slot and record it for possible later
943    reuse.  First two arguments are same as in preceding function.  */
944 
945 rtx
assign_stack_temp(machine_mode mode,poly_int64 size)946 assign_stack_temp (machine_mode mode, poly_int64 size)
947 {
948   return assign_stack_temp_for_type (mode, size, NULL_TREE);
949 }
950 
951 /* Assign a temporary.
952    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
953    and so that should be used in error messages.  In either case, we
954    allocate of the given type.
955    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
956    it is 0 if a register is OK.
957    DONT_PROMOTE is 1 if we should not promote values in register
958    to wider modes.  */
959 
960 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)961 assign_temp (tree type_or_decl, int memory_required,
962 	     int dont_promote ATTRIBUTE_UNUSED)
963 {
964   tree type, decl;
965   machine_mode mode;
966 #ifdef PROMOTE_MODE
967   int unsignedp;
968 #endif
969 
970   if (DECL_P (type_or_decl))
971     decl = type_or_decl, type = TREE_TYPE (decl);
972   else
973     decl = NULL, type = type_or_decl;
974 
975   mode = TYPE_MODE (type);
976 #ifdef PROMOTE_MODE
977   unsignedp = TYPE_UNSIGNED (type);
978 #endif
979 
980   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
981      end.  See also create_tmp_var for the gimplification-time check.  */
982   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
983 
984   if (mode == BLKmode || memory_required)
985     {
986       poly_int64 size;
987       rtx tmp;
988 
989       /* Unfortunately, we don't yet know how to allocate variable-sized
990 	 temporaries.  However, sometimes we can find a fixed upper limit on
991 	 the size, so try that instead.  */
992       if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
993 	size = max_int_size_in_bytes (type);
994 
995       /* Zero sized arrays are a GNU C extension.  Set size to 1 to avoid
996 	 problems with allocating the stack space.  */
997       if (known_eq (size, 0))
998 	size = 1;
999 
1000       /* The size of the temporary may be too large to fit into an integer.  */
1001       /* ??? Not sure this should happen except for user silliness, so limit
1002 	 this to things that aren't compiler-generated temporaries.  The
1003 	 rest of the time we'll die in assign_stack_temp_for_type.  */
1004       if (decl
1005 	  && !known_size_p (size)
1006 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1007 	{
1008 	  error ("size of variable %q+D is too large", decl);
1009 	  size = 1;
1010 	}
1011 
1012       tmp = assign_stack_temp_for_type (mode, size, type);
1013       return tmp;
1014     }
1015 
1016 #ifdef PROMOTE_MODE
1017   if (! dont_promote)
1018     mode = promote_mode (type, mode, &unsignedp);
1019 #endif
1020 
1021   return gen_reg_rtx (mode);
1022 }
1023 
1024 /* Combine temporary stack slots which are adjacent on the stack.
1025 
1026    This allows for better use of already allocated stack space.  This is only
1027    done for BLKmode slots because we can be sure that we won't have alignment
1028    problems in this case.  */
1029 
1030 static void
combine_temp_slots(void)1031 combine_temp_slots (void)
1032 {
1033   struct temp_slot *p, *q, *next, *next_q;
1034   int num_slots;
1035 
1036   /* We can't combine slots, because the information about which slot
1037      is in which alias set will be lost.  */
1038   if (flag_strict_aliasing)
1039     return;
1040 
1041   /* If there are a lot of temp slots, don't do anything unless
1042      high levels of optimization.  */
1043   if (! flag_expensive_optimizations)
1044     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1045       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1046 	return;
1047 
1048   for (p = avail_temp_slots; p; p = next)
1049     {
1050       int delete_p = 0;
1051 
1052       next = p->next;
1053 
1054       if (GET_MODE (p->slot) != BLKmode)
1055 	continue;
1056 
1057       for (q = p->next; q; q = next_q)
1058 	{
1059        	  int delete_q = 0;
1060 
1061 	  next_q = q->next;
1062 
1063 	  if (GET_MODE (q->slot) != BLKmode)
1064 	    continue;
1065 
1066 	  if (known_eq (p->base_offset + p->full_size, q->base_offset))
1067 	    {
1068 	      /* Q comes after P; combine Q into P.  */
1069 	      p->size += q->size;
1070 	      p->full_size += q->full_size;
1071 	      delete_q = 1;
1072 	    }
1073 	  else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1074 	    {
1075 	      /* P comes after Q; combine P into Q.  */
1076 	      q->size += p->size;
1077 	      q->full_size += p->full_size;
1078 	      delete_p = 1;
1079 	      break;
1080 	    }
1081 	  if (delete_q)
1082 	    cut_slot_from_list (q, &avail_temp_slots);
1083 	}
1084 
1085       /* Either delete P or advance past it.  */
1086       if (delete_p)
1087 	cut_slot_from_list (p, &avail_temp_slots);
1088     }
1089 }
1090 
1091 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1092    slot that previously was known by OLD_RTX.  */
1093 
1094 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1095 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1096 {
1097   struct temp_slot *p;
1098 
1099   if (rtx_equal_p (old_rtx, new_rtx))
1100     return;
1101 
1102   p = find_temp_slot_from_address (old_rtx);
1103 
1104   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1105      NEW_RTX is a register, see if one operand of the PLUS is a
1106      temporary location.  If so, NEW_RTX points into it.  Otherwise,
1107      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1108      in common between them.  If so, try a recursive call on those
1109      values.  */
1110   if (p == 0)
1111     {
1112       if (GET_CODE (old_rtx) != PLUS)
1113 	return;
1114 
1115       if (REG_P (new_rtx))
1116 	{
1117 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1118 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1119 	  return;
1120 	}
1121       else if (GET_CODE (new_rtx) != PLUS)
1122 	return;
1123 
1124       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1125 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1126       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1127 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1128       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1129 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1130       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1131 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1132 
1133       return;
1134     }
1135 
1136   /* Otherwise add an alias for the temp's address.  */
1137   insert_temp_slot_address (new_rtx, p);
1138 }
1139 
1140 /* If X could be a reference to a temporary slot, mark that slot as
1141    belonging to the to one level higher than the current level.  If X
1142    matched one of our slots, just mark that one.  Otherwise, we can't
1143    easily predict which it is, so upgrade all of them.
1144 
1145    This is called when an ({...}) construct occurs and a statement
1146    returns a value in memory.  */
1147 
1148 void
preserve_temp_slots(rtx x)1149 preserve_temp_slots (rtx x)
1150 {
1151   struct temp_slot *p = 0, *next;
1152 
1153   if (x == 0)
1154     return;
1155 
1156   /* If X is a register that is being used as a pointer, see if we have
1157      a temporary slot we know it points to.  */
1158   if (REG_P (x) && REG_POINTER (x))
1159     p = find_temp_slot_from_address (x);
1160 
1161   /* If X is not in memory or is at a constant address, it cannot be in
1162      a temporary slot.  */
1163   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1164     return;
1165 
1166   /* First see if we can find a match.  */
1167   if (p == 0)
1168     p = find_temp_slot_from_address (XEXP (x, 0));
1169 
1170   if (p != 0)
1171     {
1172       if (p->level == temp_slot_level)
1173 	move_slot_to_level (p, temp_slot_level - 1);
1174       return;
1175     }
1176 
1177   /* Otherwise, preserve all non-kept slots at this level.  */
1178   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1179     {
1180       next = p->next;
1181       move_slot_to_level (p, temp_slot_level - 1);
1182     }
1183 }
1184 
1185 /* Free all temporaries used so far.  This is normally called at the
1186    end of generating code for a statement.  */
1187 
1188 void
free_temp_slots(void)1189 free_temp_slots (void)
1190 {
1191   struct temp_slot *p, *next;
1192   bool some_available = false;
1193 
1194   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1195     {
1196       next = p->next;
1197       make_slot_available (p);
1198       some_available = true;
1199     }
1200 
1201   if (some_available)
1202     {
1203       remove_unused_temp_slot_addresses ();
1204       combine_temp_slots ();
1205     }
1206 }
1207 
1208 /* Push deeper into the nesting level for stack temporaries.  */
1209 
1210 void
push_temp_slots(void)1211 push_temp_slots (void)
1212 {
1213   temp_slot_level++;
1214 }
1215 
1216 /* Pop a temporary nesting level.  All slots in use in the current level
1217    are freed.  */
1218 
1219 void
pop_temp_slots(void)1220 pop_temp_slots (void)
1221 {
1222   free_temp_slots ();
1223   temp_slot_level--;
1224 }
1225 
1226 /* Initialize temporary slots.  */
1227 
1228 void
init_temp_slots(void)1229 init_temp_slots (void)
1230 {
1231   /* We have not allocated any temporaries yet.  */
1232   avail_temp_slots = 0;
1233   vec_alloc (used_temp_slots, 0);
1234   temp_slot_level = 0;
1235   n_temp_slots_in_use = 0;
1236 
1237   /* Set up the table to map addresses to temp slots.  */
1238   if (! temp_slot_address_table)
1239     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1240   else
1241     temp_slot_address_table->empty ();
1242 }
1243 
1244 /* Functions and data structures to keep track of the values hard regs
1245    had at the start of the function.  */
1246 
1247 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1248    and has_hard_reg_initial_val..  */
1249 struct GTY(()) initial_value_pair {
1250   rtx hard_reg;
1251   rtx pseudo;
1252 };
1253 /* ???  This could be a VEC but there is currently no way to define an
1254    opaque VEC type.  This could be worked around by defining struct
1255    initial_value_pair in function.h.  */
1256 struct GTY(()) initial_value_struct {
1257   int num_entries;
1258   int max_entries;
1259   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1260 };
1261 
1262 /* If a pseudo represents an initial hard reg (or expression), return
1263    it, else return NULL_RTX.  */
1264 
1265 rtx
get_hard_reg_initial_reg(rtx reg)1266 get_hard_reg_initial_reg (rtx reg)
1267 {
1268   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1269   int i;
1270 
1271   if (ivs == 0)
1272     return NULL_RTX;
1273 
1274   for (i = 0; i < ivs->num_entries; i++)
1275     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1276       return ivs->entries[i].hard_reg;
1277 
1278   return NULL_RTX;
1279 }
1280 
1281 /* Make sure that there's a pseudo register of mode MODE that stores the
1282    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1283 
1284 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1285 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1286 {
1287   struct initial_value_struct *ivs;
1288   rtx rv;
1289 
1290   rv = has_hard_reg_initial_val (mode, regno);
1291   if (rv)
1292     return rv;
1293 
1294   ivs = crtl->hard_reg_initial_vals;
1295   if (ivs == 0)
1296     {
1297       ivs = ggc_alloc<initial_value_struct> ();
1298       ivs->num_entries = 0;
1299       ivs->max_entries = 5;
1300       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1301       crtl->hard_reg_initial_vals = ivs;
1302     }
1303 
1304   if (ivs->num_entries >= ivs->max_entries)
1305     {
1306       ivs->max_entries += 5;
1307       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1308 				    ivs->max_entries);
1309     }
1310 
1311   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1312   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1313 
1314   return ivs->entries[ivs->num_entries++].pseudo;
1315 }
1316 
1317 /* See if get_hard_reg_initial_val has been used to create a pseudo
1318    for the initial value of hard register REGNO in mode MODE.  Return
1319    the associated pseudo if so, otherwise return NULL.  */
1320 
1321 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1322 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1323 {
1324   struct initial_value_struct *ivs;
1325   int i;
1326 
1327   ivs = crtl->hard_reg_initial_vals;
1328   if (ivs != 0)
1329     for (i = 0; i < ivs->num_entries; i++)
1330       if (GET_MODE (ivs->entries[i].hard_reg) == mode
1331 	  && REGNO (ivs->entries[i].hard_reg) == regno)
1332 	return ivs->entries[i].pseudo;
1333 
1334   return NULL_RTX;
1335 }
1336 
1337 unsigned int
emit_initial_value_sets(void)1338 emit_initial_value_sets (void)
1339 {
1340   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1341   int i;
1342   rtx_insn *seq;
1343 
1344   if (ivs == 0)
1345     return 0;
1346 
1347   start_sequence ();
1348   for (i = 0; i < ivs->num_entries; i++)
1349     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1350   seq = get_insns ();
1351   end_sequence ();
1352 
1353   emit_insn_at_entry (seq);
1354   return 0;
1355 }
1356 
1357 /* Return the hardreg-pseudoreg initial values pair entry I and
1358    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1359 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1360 initial_value_entry (int i, rtx *hreg, rtx *preg)
1361 {
1362   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1363   if (!ivs || i >= ivs->num_entries)
1364     return false;
1365 
1366   *hreg = ivs->entries[i].hard_reg;
1367   *preg = ivs->entries[i].pseudo;
1368   return true;
1369 }
1370 
1371 /* These routines are responsible for converting virtual register references
1372    to the actual hard register references once RTL generation is complete.
1373 
1374    The following four variables are used for communication between the
1375    routines.  They contain the offsets of the virtual registers from their
1376    respective hard registers.  */
1377 
1378 static poly_int64 in_arg_offset;
1379 static poly_int64 var_offset;
1380 static poly_int64 dynamic_offset;
1381 static poly_int64 out_arg_offset;
1382 static poly_int64 cfa_offset;
1383 
1384 /* In most machines, the stack pointer register is equivalent to the bottom
1385    of the stack.  */
1386 
1387 #ifndef STACK_POINTER_OFFSET
1388 #define STACK_POINTER_OFFSET	0
1389 #endif
1390 
1391 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1392 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1393 #endif
1394 
1395 /* If not defined, pick an appropriate default for the offset of dynamically
1396    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1397    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1398 
1399 #ifndef STACK_DYNAMIC_OFFSET
1400 
1401 /* The bottom of the stack points to the actual arguments.  If
1402    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1403    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1404    stack space for register parameters is not pushed by the caller, but
1405    rather part of the fixed stack areas and hence not included in
1406    `crtl->outgoing_args_size'.  Nevertheless, we must allow
1407    for it when allocating stack dynamic objects.  */
1408 
1409 #ifdef INCOMING_REG_PARM_STACK_SPACE
1410 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1411 ((ACCUMULATE_OUTGOING_ARGS						      \
1412   ? (crtl->outgoing_args_size				      \
1413      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1414 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1415   : 0) + (STACK_POINTER_OFFSET))
1416 #else
1417 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1418   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1419  + (STACK_POINTER_OFFSET))
1420 #endif
1421 #endif
1422 
1423 
1424 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1425    is a virtual register, return the equivalent hard register and set the
1426    offset indirectly through the pointer.  Otherwise, return 0.  */
1427 
1428 static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)1429 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1430 {
1431   rtx new_rtx;
1432   poly_int64 offset;
1433 
1434   if (x == virtual_incoming_args_rtx)
1435     {
1436       if (stack_realign_drap)
1437         {
1438 	  /* Replace virtual_incoming_args_rtx with internal arg
1439 	     pointer if DRAP is used to realign stack.  */
1440           new_rtx = crtl->args.internal_arg_pointer;
1441           offset = 0;
1442         }
1443       else
1444         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1445     }
1446   else if (x == virtual_stack_vars_rtx)
1447     new_rtx = frame_pointer_rtx, offset = var_offset;
1448   else if (x == virtual_stack_dynamic_rtx)
1449     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1450   else if (x == virtual_outgoing_args_rtx)
1451     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1452   else if (x == virtual_cfa_rtx)
1453     {
1454 #ifdef FRAME_POINTER_CFA_OFFSET
1455       new_rtx = frame_pointer_rtx;
1456 #else
1457       new_rtx = arg_pointer_rtx;
1458 #endif
1459       offset = cfa_offset;
1460     }
1461   else if (x == virtual_preferred_stack_boundary_rtx)
1462     {
1463       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1464       offset = 0;
1465     }
1466   else
1467     return NULL_RTX;
1468 
1469   *poffset = offset;
1470   return new_rtx;
1471 }
1472 
1473 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1474    registers present inside of *LOC.  The expression is simplified,
1475    as much as possible, but is not to be considered "valid" in any sense
1476    implied by the target.  Return true if any change is made.  */
1477 
1478 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1479 instantiate_virtual_regs_in_rtx (rtx *loc)
1480 {
1481   if (!*loc)
1482     return false;
1483   bool changed = false;
1484   subrtx_ptr_iterator::array_type array;
1485   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1486     {
1487       rtx *loc = *iter;
1488       if (rtx x = *loc)
1489 	{
1490 	  rtx new_rtx;
1491 	  poly_int64 offset;
1492 	  switch (GET_CODE (x))
1493 	    {
1494 	    case REG:
1495 	      new_rtx = instantiate_new_reg (x, &offset);
1496 	      if (new_rtx)
1497 		{
1498 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1499 		  changed = true;
1500 		}
1501 	      iter.skip_subrtxes ();
1502 	      break;
1503 
1504 	    case PLUS:
1505 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1506 	      if (new_rtx)
1507 		{
1508 		  XEXP (x, 0) = new_rtx;
1509 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1510 		  changed = true;
1511 		  iter.skip_subrtxes ();
1512 		  break;
1513 		}
1514 
1515 	      /* FIXME -- from old code */
1516 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1517 		 we can commute the PLUS and SUBREG because pointers into the
1518 		 frame are well-behaved.  */
1519 	      break;
1520 
1521 	    default:
1522 	      break;
1523 	    }
1524 	}
1525     }
1526   return changed;
1527 }
1528 
1529 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1530    matches the predicate for insn CODE operand OPERAND.  */
1531 
1532 static int
safe_insn_predicate(int code,int operand,rtx x)1533 safe_insn_predicate (int code, int operand, rtx x)
1534 {
1535   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1536 }
1537 
1538 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1539    registers present inside of insn.  The result will be a valid insn.  */
1540 
1541 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1542 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1543 {
1544   poly_int64 offset;
1545   int insn_code, i;
1546   bool any_change = false;
1547   rtx set, new_rtx, x;
1548   rtx_insn *seq;
1549 
1550   /* There are some special cases to be handled first.  */
1551   set = single_set (insn);
1552   if (set)
1553     {
1554       /* We're allowed to assign to a virtual register.  This is interpreted
1555 	 to mean that the underlying register gets assigned the inverse
1556 	 transformation.  This is used, for example, in the handling of
1557 	 non-local gotos.  */
1558       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1559       if (new_rtx)
1560 	{
1561 	  start_sequence ();
1562 
1563 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1564 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1565 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1566 	  x = force_operand (x, new_rtx);
1567 	  if (x != new_rtx)
1568 	    emit_move_insn (new_rtx, x);
1569 
1570 	  seq = get_insns ();
1571 	  end_sequence ();
1572 
1573 	  emit_insn_before (seq, insn);
1574 	  delete_insn (insn);
1575 	  return;
1576 	}
1577 
1578       /* Handle a straight copy from a virtual register by generating a
1579 	 new add insn.  The difference between this and falling through
1580 	 to the generic case is avoiding a new pseudo and eliminating a
1581 	 move insn in the initial rtl stream.  */
1582       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1583       if (new_rtx
1584 	  && maybe_ne (offset, 0)
1585 	  && REG_P (SET_DEST (set))
1586 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1587 	{
1588 	  start_sequence ();
1589 
1590 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1591 				   gen_int_mode (offset,
1592 						 GET_MODE (SET_DEST (set))),
1593 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1594 	  if (x != SET_DEST (set))
1595 	    emit_move_insn (SET_DEST (set), x);
1596 
1597 	  seq = get_insns ();
1598 	  end_sequence ();
1599 
1600 	  emit_insn_before (seq, insn);
1601 	  delete_insn (insn);
1602 	  return;
1603 	}
1604 
1605       extract_insn (insn);
1606       insn_code = INSN_CODE (insn);
1607 
1608       /* Handle a plus involving a virtual register by determining if the
1609 	 operands remain valid if they're modified in place.  */
1610       poly_int64 delta;
1611       if (GET_CODE (SET_SRC (set)) == PLUS
1612 	  && recog_data.n_operands >= 3
1613 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1614 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1615 	  && poly_int_rtx_p (recog_data.operand[2], &delta)
1616 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1617 	{
1618 	  offset += delta;
1619 
1620 	  /* If the sum is zero, then replace with a plain move.  */
1621 	  if (known_eq (offset, 0)
1622 	      && REG_P (SET_DEST (set))
1623 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1624 	    {
1625 	      start_sequence ();
1626 	      emit_move_insn (SET_DEST (set), new_rtx);
1627 	      seq = get_insns ();
1628 	      end_sequence ();
1629 
1630 	      emit_insn_before (seq, insn);
1631 	      delete_insn (insn);
1632 	      return;
1633 	    }
1634 
1635 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1636 
1637 	  /* Using validate_change and apply_change_group here leaves
1638 	     recog_data in an invalid state.  Since we know exactly what
1639 	     we want to check, do those two by hand.  */
1640 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1641 	      && safe_insn_predicate (insn_code, 2, x))
1642 	    {
1643 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1644 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1645 	      any_change = true;
1646 
1647 	      /* Fall through into the regular operand fixup loop in
1648 		 order to take care of operands other than 1 and 2.  */
1649 	    }
1650 	}
1651     }
1652   else
1653     {
1654       extract_insn (insn);
1655       insn_code = INSN_CODE (insn);
1656     }
1657 
1658   /* In the general case, we expect virtual registers to appear only in
1659      operands, and then only as either bare registers or inside memories.  */
1660   for (i = 0; i < recog_data.n_operands; ++i)
1661     {
1662       x = recog_data.operand[i];
1663       switch (GET_CODE (x))
1664 	{
1665 	case MEM:
1666 	  {
1667 	    rtx addr = XEXP (x, 0);
1668 
1669 	    if (!instantiate_virtual_regs_in_rtx (&addr))
1670 	      continue;
1671 
1672 	    start_sequence ();
1673 	    x = replace_equiv_address (x, addr, true);
1674 	    /* It may happen that the address with the virtual reg
1675 	       was valid (e.g. based on the virtual stack reg, which might
1676 	       be acceptable to the predicates with all offsets), whereas
1677 	       the address now isn't anymore, for instance when the address
1678 	       is still offsetted, but the base reg isn't virtual-stack-reg
1679 	       anymore.  Below we would do a force_reg on the whole operand,
1680 	       but this insn might actually only accept memory.  Hence,
1681 	       before doing that last resort, try to reload the address into
1682 	       a register, so this operand stays a MEM.  */
1683 	    if (!safe_insn_predicate (insn_code, i, x))
1684 	      {
1685 		addr = force_reg (GET_MODE (addr), addr);
1686 		x = replace_equiv_address (x, addr, true);
1687 	      }
1688 	    seq = get_insns ();
1689 	    end_sequence ();
1690 	    if (seq)
1691 	      emit_insn_before (seq, insn);
1692 	  }
1693 	  break;
1694 
1695 	case REG:
1696 	  new_rtx = instantiate_new_reg (x, &offset);
1697 	  if (new_rtx == NULL)
1698 	    continue;
1699 	  if (known_eq (offset, 0))
1700 	    x = new_rtx;
1701 	  else
1702 	    {
1703 	      start_sequence ();
1704 
1705 	      /* Careful, special mode predicates may have stuff in
1706 		 insn_data[insn_code].operand[i].mode that isn't useful
1707 		 to us for computing a new value.  */
1708 	      /* ??? Recognize address_operand and/or "p" constraints
1709 		 to see if (plus new offset) is a valid before we put
1710 		 this through expand_simple_binop.  */
1711 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1712 				       gen_int_mode (offset, GET_MODE (x)),
1713 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1714 	      seq = get_insns ();
1715 	      end_sequence ();
1716 	      emit_insn_before (seq, insn);
1717 	    }
1718 	  break;
1719 
1720 	case SUBREG:
1721 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1722 	  if (new_rtx == NULL)
1723 	    continue;
1724 	  if (maybe_ne (offset, 0))
1725 	    {
1726 	      start_sequence ();
1727 	      new_rtx = expand_simple_binop
1728 		(GET_MODE (new_rtx), PLUS, new_rtx,
1729 		 gen_int_mode (offset, GET_MODE (new_rtx)),
1730 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1731 	      seq = get_insns ();
1732 	      end_sequence ();
1733 	      emit_insn_before (seq, insn);
1734 	    }
1735 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1736 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1737 	  gcc_assert (x);
1738 	  break;
1739 
1740 	default:
1741 	  continue;
1742 	}
1743 
1744       /* At this point, X contains the new value for the operand.
1745 	 Validate the new value vs the insn predicate.  Note that
1746 	 asm insns will have insn_code -1 here.  */
1747       if (!safe_insn_predicate (insn_code, i, x))
1748 	{
1749 	  start_sequence ();
1750 	  if (REG_P (x))
1751 	    {
1752 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1753 	      x = copy_to_reg (x);
1754 	    }
1755 	  else
1756 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1757 	  seq = get_insns ();
1758 	  end_sequence ();
1759 	  if (seq)
1760 	    emit_insn_before (seq, insn);
1761 	}
1762 
1763       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1764       any_change = true;
1765     }
1766 
1767   if (any_change)
1768     {
1769       /* Propagate operand changes into the duplicates.  */
1770       for (i = 0; i < recog_data.n_dups; ++i)
1771 	*recog_data.dup_loc[i]
1772 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1773 
1774       /* Force re-recognition of the instruction for validation.  */
1775       INSN_CODE (insn) = -1;
1776     }
1777 
1778   if (asm_noperands (PATTERN (insn)) >= 0)
1779     {
1780       if (!check_asm_operands (PATTERN (insn)))
1781 	{
1782 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1783 	  /* For asm goto, instead of fixing up all the edges
1784 	     just clear the template and clear input operands
1785 	     (asm goto doesn't have any output operands).  */
1786 	  if (JUMP_P (insn))
1787 	    {
1788 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1789 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1790 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1791 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1792 	    }
1793 	  else
1794 	    delete_insn (insn);
1795 	}
1796     }
1797   else
1798     {
1799       if (recog_memoized (insn) < 0)
1800 	fatal_insn_not_found (insn);
1801     }
1802 }
1803 
1804 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1805    do any instantiation required.  */
1806 
1807 void
instantiate_decl_rtl(rtx x)1808 instantiate_decl_rtl (rtx x)
1809 {
1810   rtx addr;
1811 
1812   if (x == 0)
1813     return;
1814 
1815   /* If this is a CONCAT, recurse for the pieces.  */
1816   if (GET_CODE (x) == CONCAT)
1817     {
1818       instantiate_decl_rtl (XEXP (x, 0));
1819       instantiate_decl_rtl (XEXP (x, 1));
1820       return;
1821     }
1822 
1823   /* If this is not a MEM, no need to do anything.  Similarly if the
1824      address is a constant or a register that is not a virtual register.  */
1825   if (!MEM_P (x))
1826     return;
1827 
1828   addr = XEXP (x, 0);
1829   if (CONSTANT_P (addr)
1830       || (REG_P (addr)
1831 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1832 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1833     return;
1834 
1835   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1836 }
1837 
1838 /* Helper for instantiate_decls called via walk_tree: Process all decls
1839    in the given DECL_VALUE_EXPR.  */
1840 
1841 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1842 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1843 {
1844   tree t = *tp;
1845   if (! EXPR_P (t))
1846     {
1847       *walk_subtrees = 0;
1848       if (DECL_P (t))
1849 	{
1850 	  if (DECL_RTL_SET_P (t))
1851 	    instantiate_decl_rtl (DECL_RTL (t));
1852 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1853 	      && DECL_INCOMING_RTL (t))
1854 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1855 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1856 	      && DECL_HAS_VALUE_EXPR_P (t))
1857 	    {
1858 	      tree v = DECL_VALUE_EXPR (t);
1859 	      walk_tree (&v, instantiate_expr, NULL, NULL);
1860 	    }
1861 	}
1862     }
1863   return NULL;
1864 }
1865 
1866 /* Subroutine of instantiate_decls: Process all decls in the given
1867    BLOCK node and all its subblocks.  */
1868 
1869 static void
instantiate_decls_1(tree let)1870 instantiate_decls_1 (tree let)
1871 {
1872   tree t;
1873 
1874   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1875     {
1876       if (DECL_RTL_SET_P (t))
1877 	instantiate_decl_rtl (DECL_RTL (t));
1878       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1879 	{
1880 	  tree v = DECL_VALUE_EXPR (t);
1881 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1882 	}
1883     }
1884 
1885   /* Process all subblocks.  */
1886   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1887     instantiate_decls_1 (t);
1888 }
1889 
1890 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1891    all virtual registers in their DECL_RTL's.  */
1892 
1893 static void
instantiate_decls(tree fndecl)1894 instantiate_decls (tree fndecl)
1895 {
1896   tree decl;
1897   unsigned ix;
1898 
1899   /* Process all parameters of the function.  */
1900   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1901     {
1902       instantiate_decl_rtl (DECL_RTL (decl));
1903       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1904       if (DECL_HAS_VALUE_EXPR_P (decl))
1905 	{
1906 	  tree v = DECL_VALUE_EXPR (decl);
1907 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1908 	}
1909     }
1910 
1911   if ((decl = DECL_RESULT (fndecl))
1912       && TREE_CODE (decl) == RESULT_DECL)
1913     {
1914       if (DECL_RTL_SET_P (decl))
1915 	instantiate_decl_rtl (DECL_RTL (decl));
1916       if (DECL_HAS_VALUE_EXPR_P (decl))
1917 	{
1918 	  tree v = DECL_VALUE_EXPR (decl);
1919 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1920 	}
1921     }
1922 
1923   /* Process the saved static chain if it exists.  */
1924   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1925   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1926     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1927 
1928   /* Now process all variables defined in the function or its subblocks.  */
1929   if (DECL_INITIAL (fndecl))
1930     instantiate_decls_1 (DECL_INITIAL (fndecl));
1931 
1932   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1933     if (DECL_RTL_SET_P (decl))
1934       instantiate_decl_rtl (DECL_RTL (decl));
1935   vec_free (cfun->local_decls);
1936 }
1937 
1938 /* Pass through the INSNS of function FNDECL and convert virtual register
1939    references to hard register references.  */
1940 
1941 static unsigned int
instantiate_virtual_regs(void)1942 instantiate_virtual_regs (void)
1943 {
1944   rtx_insn *insn;
1945 
1946   /* Compute the offsets to use for this function.  */
1947   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1948   var_offset = targetm.starting_frame_offset ();
1949   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1950   out_arg_offset = STACK_POINTER_OFFSET;
1951 #ifdef FRAME_POINTER_CFA_OFFSET
1952   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1953 #else
1954   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1955 #endif
1956 
1957   /* Initialize recognition, indicating that volatile is OK.  */
1958   init_recog ();
1959 
1960   /* Scan through all the insns, instantiating every virtual register still
1961      present.  */
1962   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1963     if (INSN_P (insn))
1964       {
1965 	/* These patterns in the instruction stream can never be recognized.
1966 	   Fortunately, they shouldn't contain virtual registers either.  */
1967         if (GET_CODE (PATTERN (insn)) == USE
1968 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1969 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT
1970 	    || DEBUG_MARKER_INSN_P (insn))
1971 	  continue;
1972 	else if (DEBUG_BIND_INSN_P (insn))
1973 	  instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1974 	else
1975 	  instantiate_virtual_regs_in_insn (insn);
1976 
1977 	if (insn->deleted ())
1978 	  continue;
1979 
1980 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1981 
1982 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1983 	if (CALL_P (insn))
1984 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1985       }
1986 
1987   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1988   instantiate_decls (current_function_decl);
1989 
1990   targetm.instantiate_decls ();
1991 
1992   /* Indicate that, from now on, assign_stack_local should use
1993      frame_pointer_rtx.  */
1994   virtuals_instantiated = 1;
1995 
1996   return 0;
1997 }
1998 
1999 namespace {
2000 
2001 const pass_data pass_data_instantiate_virtual_regs =
2002 {
2003   RTL_PASS, /* type */
2004   "vregs", /* name */
2005   OPTGROUP_NONE, /* optinfo_flags */
2006   TV_NONE, /* tv_id */
2007   0, /* properties_required */
2008   0, /* properties_provided */
2009   0, /* properties_destroyed */
2010   0, /* todo_flags_start */
2011   0, /* todo_flags_finish */
2012 };
2013 
2014 class pass_instantiate_virtual_regs : public rtl_opt_pass
2015 {
2016 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)2017   pass_instantiate_virtual_regs (gcc::context *ctxt)
2018     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2019   {}
2020 
2021   /* opt_pass methods: */
execute(function *)2022   virtual unsigned int execute (function *)
2023     {
2024       return instantiate_virtual_regs ();
2025     }
2026 
2027 }; // class pass_instantiate_virtual_regs
2028 
2029 } // anon namespace
2030 
2031 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2032 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2033 {
2034   return new pass_instantiate_virtual_regs (ctxt);
2035 }
2036 
2037 
2038 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2039    This means a type for which function calls must pass an address to the
2040    function or get an address back from the function.
2041    EXP may be a type node or an expression (whose type is tested).  */
2042 
2043 int
aggregate_value_p(const_tree exp,const_tree fntype)2044 aggregate_value_p (const_tree exp, const_tree fntype)
2045 {
2046   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2047   int i, regno, nregs;
2048   rtx reg;
2049 
2050   if (fntype)
2051     switch (TREE_CODE (fntype))
2052       {
2053       case CALL_EXPR:
2054 	{
2055 	  tree fndecl = get_callee_fndecl (fntype);
2056 	  if (fndecl)
2057 	    fntype = TREE_TYPE (fndecl);
2058 	  else if (CALL_EXPR_FN (fntype))
2059 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2060 	  else
2061 	    /* For internal functions, assume nothing needs to be
2062 	       returned in memory.  */
2063 	    return 0;
2064 	}
2065 	break;
2066       case FUNCTION_DECL:
2067 	fntype = TREE_TYPE (fntype);
2068 	break;
2069       case FUNCTION_TYPE:
2070       case METHOD_TYPE:
2071         break;
2072       case IDENTIFIER_NODE:
2073 	fntype = NULL_TREE;
2074 	break;
2075       default:
2076 	/* We don't expect other tree types here.  */
2077 	gcc_unreachable ();
2078       }
2079 
2080   if (VOID_TYPE_P (type))
2081     return 0;
2082 
2083   /* If a record should be passed the same as its first (and only) member
2084      don't pass it as an aggregate.  */
2085   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2086     return aggregate_value_p (first_field (type), fntype);
2087 
2088   /* If the front end has decided that this needs to be passed by
2089      reference, do so.  */
2090   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2091       && DECL_BY_REFERENCE (exp))
2092     return 1;
2093 
2094   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2095   if (fntype && TREE_ADDRESSABLE (fntype))
2096     return 1;
2097 
2098   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2099      and thus can't be returned in registers.  */
2100   if (TREE_ADDRESSABLE (type))
2101     return 1;
2102 
2103   if (TYPE_EMPTY_P (type))
2104     return 0;
2105 
2106   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2107     return 1;
2108 
2109   if (targetm.calls.return_in_memory (type, fntype))
2110     return 1;
2111 
2112   /* Make sure we have suitable call-clobbered regs to return
2113      the value in; if not, we must return it in memory.  */
2114   reg = hard_function_value (type, 0, fntype, 0);
2115 
2116   /* If we have something other than a REG (e.g. a PARALLEL), then assume
2117      it is OK.  */
2118   if (!REG_P (reg))
2119     return 0;
2120 
2121   regno = REGNO (reg);
2122   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2123   for (i = 0; i < nregs; i++)
2124     if (! call_used_regs[regno + i])
2125       return 1;
2126 
2127   return 0;
2128 }
2129 
2130 /* Return true if we should assign DECL a pseudo register; false if it
2131    should live on the local stack.  */
2132 
2133 bool
use_register_for_decl(const_tree decl)2134 use_register_for_decl (const_tree decl)
2135 {
2136   if (TREE_CODE (decl) == SSA_NAME)
2137     {
2138       /* We often try to use the SSA_NAME, instead of its underlying
2139 	 decl, to get type information and guide decisions, to avoid
2140 	 differences of behavior between anonymous and named
2141 	 variables, but in this one case we have to go for the actual
2142 	 variable if there is one.  The main reason is that, at least
2143 	 at -O0, we want to place user variables on the stack, but we
2144 	 don't mind using pseudos for anonymous or ignored temps.
2145 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2146 	 should go in pseudos, whereas their corresponding variables
2147 	 might have to go on the stack.  So, disregarding the decl
2148 	 here would negatively impact debug info at -O0, enable
2149 	 coalescing between SSA_NAMEs that ought to get different
2150 	 stack/pseudo assignments, and get the incoming argument
2151 	 processing thoroughly confused by PARM_DECLs expected to live
2152 	 in stack slots but assigned to pseudos.  */
2153       if (!SSA_NAME_VAR (decl))
2154 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2155 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2156 
2157       decl = SSA_NAME_VAR (decl);
2158     }
2159 
2160   /* Honor volatile.  */
2161   if (TREE_SIDE_EFFECTS (decl))
2162     return false;
2163 
2164   /* Honor addressability.  */
2165   if (TREE_ADDRESSABLE (decl))
2166     return false;
2167 
2168   /* RESULT_DECLs are a bit special in that they're assigned without
2169      regard to use_register_for_decl, but we generally only store in
2170      them.  If we coalesce their SSA NAMEs, we'd better return a
2171      result that matches the assignment in expand_function_start.  */
2172   if (TREE_CODE (decl) == RESULT_DECL)
2173     {
2174       /* If it's not an aggregate, we're going to use a REG or a
2175 	 PARALLEL containing a REG.  */
2176       if (!aggregate_value_p (decl, current_function_decl))
2177 	return true;
2178 
2179       /* If expand_function_start determines the return value, we'll
2180 	 use MEM if it's not by reference.  */
2181       if (cfun->returns_pcc_struct
2182 	  || (targetm.calls.struct_value_rtx
2183 	      (TREE_TYPE (current_function_decl), 1)))
2184 	return DECL_BY_REFERENCE (decl);
2185 
2186       /* Otherwise, we're taking an extra all.function_result_decl
2187 	 argument.  It's set up in assign_parms_augmented_arg_list,
2188 	 under the (negated) conditions above, and then it's used to
2189 	 set up the RESULT_DECL rtl in assign_params, after looping
2190 	 over all parameters.  Now, if the RESULT_DECL is not by
2191 	 reference, we'll use a MEM either way.  */
2192       if (!DECL_BY_REFERENCE (decl))
2193 	return false;
2194 
2195       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2196 	 the function_result_decl's assignment.  Since it's a pointer,
2197 	 we can short-circuit a number of the tests below, and we must
2198 	 duplicat e them because we don't have the
2199 	 function_result_decl to test.  */
2200       if (!targetm.calls.allocate_stack_slots_for_args ())
2201 	return true;
2202       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
2203       if (optimize)
2204 	return true;
2205       /* We don't set DECL_REGISTER for the function_result_decl.  */
2206       return false;
2207     }
2208 
2209   /* Only register-like things go in registers.  */
2210   if (DECL_MODE (decl) == BLKmode)
2211     return false;
2212 
2213   /* If -ffloat-store specified, don't put explicit float variables
2214      into registers.  */
2215   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2216      propagates values across these stores, and it probably shouldn't.  */
2217   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2218     return false;
2219 
2220   if (!targetm.calls.allocate_stack_slots_for_args ())
2221     return true;
2222 
2223   /* If we're not interested in tracking debugging information for
2224      this decl, then we can certainly put it in a register.  */
2225   if (DECL_IGNORED_P (decl))
2226     return true;
2227 
2228   if (optimize)
2229     return true;
2230 
2231   if (!DECL_REGISTER (decl))
2232     return false;
2233 
2234   /* When not optimizing, disregard register keyword for types that
2235      could have methods, otherwise the methods won't be callable from
2236      the debugger.  */
2237   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2238     return false;
2239 
2240   return true;
2241 }
2242 
2243 /* Structures to communicate between the subroutines of assign_parms.
2244    The first holds data persistent across all parameters, the second
2245    is cleared out for each parameter.  */
2246 
2247 struct assign_parm_data_all
2248 {
2249   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2250      should become a job of the target or otherwise encapsulated.  */
2251   CUMULATIVE_ARGS args_so_far_v;
2252   cumulative_args_t args_so_far;
2253   struct args_size stack_args_size;
2254   tree function_result_decl;
2255   tree orig_fnargs;
2256   rtx_insn *first_conversion_insn;
2257   rtx_insn *last_conversion_insn;
2258   HOST_WIDE_INT pretend_args_size;
2259   HOST_WIDE_INT extra_pretend_bytes;
2260   int reg_parm_stack_space;
2261 };
2262 
2263 struct assign_parm_data_one
2264 {
2265   tree nominal_type;
2266   tree passed_type;
2267   rtx entry_parm;
2268   rtx stack_parm;
2269   machine_mode nominal_mode;
2270   machine_mode passed_mode;
2271   machine_mode promoted_mode;
2272   struct locate_and_pad_arg_data locate;
2273   int partial;
2274   BOOL_BITFIELD named_arg : 1;
2275   BOOL_BITFIELD passed_pointer : 1;
2276   BOOL_BITFIELD on_stack : 1;
2277   BOOL_BITFIELD loaded_in_reg : 1;
2278 };
2279 
2280 /* A subroutine of assign_parms.  Initialize ALL.  */
2281 
2282 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2283 assign_parms_initialize_all (struct assign_parm_data_all *all)
2284 {
2285   tree fntype ATTRIBUTE_UNUSED;
2286 
2287   memset (all, 0, sizeof (*all));
2288 
2289   fntype = TREE_TYPE (current_function_decl);
2290 
2291 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2292   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2293 #else
2294   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2295 			current_function_decl, -1);
2296 #endif
2297   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2298 
2299 #ifdef INCOMING_REG_PARM_STACK_SPACE
2300   all->reg_parm_stack_space
2301     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2302 #endif
2303 }
2304 
2305 /* If ARGS contains entries with complex types, split the entry into two
2306    entries of the component type.  Return a new list of substitutions are
2307    needed, else the old list.  */
2308 
2309 static void
split_complex_args(vec<tree> * args)2310 split_complex_args (vec<tree> *args)
2311 {
2312   unsigned i;
2313   tree p;
2314 
2315   FOR_EACH_VEC_ELT (*args, i, p)
2316     {
2317       tree type = TREE_TYPE (p);
2318       if (TREE_CODE (type) == COMPLEX_TYPE
2319 	  && targetm.calls.split_complex_arg (type))
2320 	{
2321 	  tree decl;
2322 	  tree subtype = TREE_TYPE (type);
2323 	  bool addressable = TREE_ADDRESSABLE (p);
2324 
2325 	  /* Rewrite the PARM_DECL's type with its component.  */
2326 	  p = copy_node (p);
2327 	  TREE_TYPE (p) = subtype;
2328 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2329 	  SET_DECL_MODE (p, VOIDmode);
2330 	  DECL_SIZE (p) = NULL;
2331 	  DECL_SIZE_UNIT (p) = NULL;
2332 	  /* If this arg must go in memory, put it in a pseudo here.
2333 	     We can't allow it to go in memory as per normal parms,
2334 	     because the usual place might not have the imag part
2335 	     adjacent to the real part.  */
2336 	  DECL_ARTIFICIAL (p) = addressable;
2337 	  DECL_IGNORED_P (p) = addressable;
2338 	  TREE_ADDRESSABLE (p) = 0;
2339 	  layout_decl (p, 0);
2340 	  (*args)[i] = p;
2341 
2342 	  /* Build a second synthetic decl.  */
2343 	  decl = build_decl (EXPR_LOCATION (p),
2344 			     PARM_DECL, NULL_TREE, subtype);
2345 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2346 	  DECL_ARTIFICIAL (decl) = addressable;
2347 	  DECL_IGNORED_P (decl) = addressable;
2348 	  layout_decl (decl, 0);
2349 	  args->safe_insert (++i, decl);
2350 	}
2351     }
2352 }
2353 
2354 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2355    the hidden struct return argument, and (abi willing) complex args.
2356    Return the new parameter list.  */
2357 
2358 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2359 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2360 {
2361   tree fndecl = current_function_decl;
2362   tree fntype = TREE_TYPE (fndecl);
2363   vec<tree> fnargs = vNULL;
2364   tree arg;
2365 
2366   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2367     fnargs.safe_push (arg);
2368 
2369   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2370 
2371   /* If struct value address is treated as the first argument, make it so.  */
2372   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2373       && ! cfun->returns_pcc_struct
2374       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2375     {
2376       tree type = build_pointer_type (TREE_TYPE (fntype));
2377       tree decl;
2378 
2379       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2380 			 PARM_DECL, get_identifier (".result_ptr"), type);
2381       DECL_ARG_TYPE (decl) = type;
2382       DECL_ARTIFICIAL (decl) = 1;
2383       DECL_NAMELESS (decl) = 1;
2384       TREE_CONSTANT (decl) = 1;
2385       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
2386 	 changes, the end of the RESULT_DECL handling block in
2387 	 use_register_for_decl must be adjusted to match.  */
2388 
2389       DECL_CHAIN (decl) = all->orig_fnargs;
2390       all->orig_fnargs = decl;
2391       fnargs.safe_insert (0, decl);
2392 
2393       all->function_result_decl = decl;
2394     }
2395 
2396   /* If the target wants to split complex arguments into scalars, do so.  */
2397   if (targetm.calls.split_complex_arg)
2398     split_complex_args (&fnargs);
2399 
2400   return fnargs;
2401 }
2402 
2403 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2404    data for the parameter.  Incorporate ABI specifics such as pass-by-
2405    reference and type promotion.  */
2406 
2407 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2408 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2409 			     struct assign_parm_data_one *data)
2410 {
2411   tree nominal_type, passed_type;
2412   machine_mode nominal_mode, passed_mode, promoted_mode;
2413   int unsignedp;
2414 
2415   memset (data, 0, sizeof (*data));
2416 
2417   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2418   if (!cfun->stdarg)
2419     data->named_arg = 1;  /* No variadic parms.  */
2420   else if (DECL_CHAIN (parm))
2421     data->named_arg = 1;  /* Not the last non-variadic parm. */
2422   else if (targetm.calls.strict_argument_naming (all->args_so_far))
2423     data->named_arg = 1;  /* Only variadic ones are unnamed.  */
2424   else
2425     data->named_arg = 0;  /* Treat as variadic.  */
2426 
2427   nominal_type = TREE_TYPE (parm);
2428   passed_type = DECL_ARG_TYPE (parm);
2429 
2430   /* Look out for errors propagating this far.  Also, if the parameter's
2431      type is void then its value doesn't matter.  */
2432   if (TREE_TYPE (parm) == error_mark_node
2433       /* This can happen after weird syntax errors
2434 	 or if an enum type is defined among the parms.  */
2435       || TREE_CODE (parm) != PARM_DECL
2436       || passed_type == NULL
2437       || VOID_TYPE_P (nominal_type))
2438     {
2439       nominal_type = passed_type = void_type_node;
2440       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2441       goto egress;
2442     }
2443 
2444   /* Find mode of arg as it is passed, and mode of arg as it should be
2445      during execution of this function.  */
2446   passed_mode = TYPE_MODE (passed_type);
2447   nominal_mode = TYPE_MODE (nominal_type);
2448 
2449   /* If the parm is to be passed as a transparent union or record, use the
2450      type of the first field for the tests below.  We have already verified
2451      that the modes are the same.  */
2452   if ((TREE_CODE (passed_type) == UNION_TYPE
2453        || TREE_CODE (passed_type) == RECORD_TYPE)
2454       && TYPE_TRANSPARENT_AGGR (passed_type))
2455     passed_type = TREE_TYPE (first_field (passed_type));
2456 
2457   /* See if this arg was passed by invisible reference.  */
2458   if (pass_by_reference (&all->args_so_far_v, passed_mode,
2459 			 passed_type, data->named_arg))
2460     {
2461       passed_type = nominal_type = build_pointer_type (passed_type);
2462       data->passed_pointer = true;
2463       passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2464     }
2465 
2466   /* Find mode as it is passed by the ABI.  */
2467   unsignedp = TYPE_UNSIGNED (passed_type);
2468   promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2469 				         TREE_TYPE (current_function_decl), 0);
2470 
2471  egress:
2472   data->nominal_type = nominal_type;
2473   data->passed_type = passed_type;
2474   data->nominal_mode = nominal_mode;
2475   data->passed_mode = passed_mode;
2476   data->promoted_mode = promoted_mode;
2477 }
2478 
2479 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2480 
2481 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2482 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2483 			    struct assign_parm_data_one *data, bool no_rtl)
2484 {
2485   int varargs_pretend_bytes = 0;
2486 
2487   targetm.calls.setup_incoming_varargs (all->args_so_far,
2488 					data->promoted_mode,
2489 					data->passed_type,
2490 					&varargs_pretend_bytes, no_rtl);
2491 
2492   /* If the back-end has requested extra stack space, record how much is
2493      needed.  Do not change pretend_args_size otherwise since it may be
2494      nonzero from an earlier partial argument.  */
2495   if (varargs_pretend_bytes > 0)
2496     all->pretend_args_size = varargs_pretend_bytes;
2497 }
2498 
2499 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2500    the incoming location of the current parameter.  */
2501 
2502 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2503 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2504 			    struct assign_parm_data_one *data)
2505 {
2506   HOST_WIDE_INT pretend_bytes = 0;
2507   rtx entry_parm;
2508   bool in_regs;
2509 
2510   if (data->promoted_mode == VOIDmode)
2511     {
2512       data->entry_parm = data->stack_parm = const0_rtx;
2513       return;
2514     }
2515 
2516   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2517 					    data->passed_type);
2518 
2519   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2520 						    data->promoted_mode,
2521 						    data->passed_type,
2522 						    data->named_arg);
2523 
2524   if (entry_parm == 0)
2525     data->promoted_mode = data->passed_mode;
2526 
2527   /* Determine parm's home in the stack, in case it arrives in the stack
2528      or we should pretend it did.  Compute the stack position and rtx where
2529      the argument arrives and its size.
2530 
2531      There is one complexity here:  If this was a parameter that would
2532      have been passed in registers, but wasn't only because it is
2533      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2534      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2535      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2536      as it was the previous time.  */
2537   in_regs = (entry_parm != 0);
2538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2539   in_regs = true;
2540 #endif
2541   if (!in_regs && !data->named_arg)
2542     {
2543       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2544 	{
2545 	  rtx tem;
2546 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2547 						     data->promoted_mode,
2548 						     data->passed_type, true);
2549 	  in_regs = tem != NULL;
2550 	}
2551     }
2552 
2553   /* If this parameter was passed both in registers and in the stack, use
2554      the copy on the stack.  */
2555   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2556 					data->passed_type))
2557     entry_parm = 0;
2558 
2559   if (entry_parm)
2560     {
2561       int partial;
2562 
2563       partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2564 						 data->promoted_mode,
2565 						 data->passed_type,
2566 						 data->named_arg);
2567       data->partial = partial;
2568 
2569       /* The caller might already have allocated stack space for the
2570 	 register parameters.  */
2571       if (partial != 0 && all->reg_parm_stack_space == 0)
2572 	{
2573 	  /* Part of this argument is passed in registers and part
2574 	     is passed on the stack.  Ask the prologue code to extend
2575 	     the stack part so that we can recreate the full value.
2576 
2577 	     PRETEND_BYTES is the size of the registers we need to store.
2578 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2579 	     stack space that the prologue should allocate.
2580 
2581 	     Internally, gcc assumes that the argument pointer is aligned
2582 	     to STACK_BOUNDARY bits.  This is used both for alignment
2583 	     optimizations (see init_emit) and to locate arguments that are
2584 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2585 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2586 	     a stack boundary.  */
2587 
2588 	  /* We assume at most one partial arg, and it must be the first
2589 	     argument on the stack.  */
2590 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2591 
2592 	  pretend_bytes = partial;
2593 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2594 
2595 	  /* We want to align relative to the actual stack pointer, so
2596 	     don't include this in the stack size until later.  */
2597 	  all->extra_pretend_bytes = all->pretend_args_size;
2598 	}
2599     }
2600 
2601   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2602 		       all->reg_parm_stack_space,
2603 		       entry_parm ? data->partial : 0, current_function_decl,
2604 		       &all->stack_args_size, &data->locate);
2605 
2606   /* Update parm_stack_boundary if this parameter is passed in the
2607      stack.  */
2608   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2609     crtl->parm_stack_boundary = data->locate.boundary;
2610 
2611   /* Adjust offsets to include the pretend args.  */
2612   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2613   data->locate.slot_offset.constant += pretend_bytes;
2614   data->locate.offset.constant += pretend_bytes;
2615 
2616   data->entry_parm = entry_parm;
2617 }
2618 
2619 /* A subroutine of assign_parms.  If there is actually space on the stack
2620    for this parm, count it in stack_args_size and return true.  */
2621 
2622 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2623 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2624 			   struct assign_parm_data_one *data)
2625 {
2626   /* Trivially true if we've no incoming register.  */
2627   if (data->entry_parm == NULL)
2628     ;
2629   /* Also true if we're partially in registers and partially not,
2630      since we've arranged to drop the entire argument on the stack.  */
2631   else if (data->partial != 0)
2632     ;
2633   /* Also true if the target says that it's passed in both registers
2634      and on the stack.  */
2635   else if (GET_CODE (data->entry_parm) == PARALLEL
2636 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2637     ;
2638   /* Also true if the target says that there's stack allocated for
2639      all register parameters.  */
2640   else if (all->reg_parm_stack_space > 0)
2641     ;
2642   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2643   else
2644     return false;
2645 
2646   all->stack_args_size.constant += data->locate.size.constant;
2647   if (data->locate.size.var)
2648     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2649 
2650   return true;
2651 }
2652 
2653 /* A subroutine of assign_parms.  Given that this parameter is allocated
2654    stack space by the ABI, find it.  */
2655 
2656 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2657 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2658 {
2659   rtx offset_rtx, stack_parm;
2660   unsigned int align, boundary;
2661 
2662   /* If we're passing this arg using a reg, make its stack home the
2663      aligned stack slot.  */
2664   if (data->entry_parm)
2665     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2666   else
2667     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2668 
2669   stack_parm = crtl->args.internal_arg_pointer;
2670   if (offset_rtx != const0_rtx)
2671     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2672   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2673 
2674   if (!data->passed_pointer)
2675     {
2676       set_mem_attributes (stack_parm, parm, 1);
2677       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2678 	 while promoted mode's size is needed.  */
2679       if (data->promoted_mode != BLKmode
2680 	  && data->promoted_mode != DECL_MODE (parm))
2681 	{
2682 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2683 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2684 	    {
2685 	      poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2686 							 data->promoted_mode);
2687 	      if (maybe_ne (offset, 0))
2688 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2689 	    }
2690 	}
2691     }
2692 
2693   boundary = data->locate.boundary;
2694   align = BITS_PER_UNIT;
2695 
2696   /* If we're padding upward, we know that the alignment of the slot
2697      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2698      intentionally forcing upward padding.  Otherwise we have to come
2699      up with a guess at the alignment based on OFFSET_RTX.  */
2700   poly_int64 offset;
2701   if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2702     align = boundary;
2703   else if (poly_int_rtx_p (offset_rtx, &offset))
2704     {
2705       align = least_bit_hwi (boundary);
2706       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2707       if (offset_align != 0)
2708 	align = MIN (align, offset_align);
2709     }
2710   set_mem_align (stack_parm, align);
2711 
2712   if (data->entry_parm)
2713     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2714 
2715   data->stack_parm = stack_parm;
2716 }
2717 
2718 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2719    always valid and contiguous.  */
2720 
2721 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2722 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2723 {
2724   rtx entry_parm = data->entry_parm;
2725   rtx stack_parm = data->stack_parm;
2726 
2727   /* If this parm was passed part in regs and part in memory, pretend it
2728      arrived entirely in memory by pushing the register-part onto the stack.
2729      In the special case of a DImode or DFmode that is split, we could put
2730      it together in a pseudoreg directly, but for now that's not worth
2731      bothering with.  */
2732   if (data->partial != 0)
2733     {
2734       /* Handle calls that pass values in multiple non-contiguous
2735 	 locations.  The Irix 6 ABI has examples of this.  */
2736       if (GET_CODE (entry_parm) == PARALLEL)
2737 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2738 			  data->passed_type,
2739 			  int_size_in_bytes (data->passed_type));
2740       else
2741 	{
2742 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2743 	  move_block_from_reg (REGNO (entry_parm),
2744 			       validize_mem (copy_rtx (stack_parm)),
2745 			       data->partial / UNITS_PER_WORD);
2746 	}
2747 
2748       entry_parm = stack_parm;
2749     }
2750 
2751   /* If we didn't decide this parm came in a register, by default it came
2752      on the stack.  */
2753   else if (entry_parm == NULL)
2754     entry_parm = stack_parm;
2755 
2756   /* When an argument is passed in multiple locations, we can't make use
2757      of this information, but we can save some copying if the whole argument
2758      is passed in a single register.  */
2759   else if (GET_CODE (entry_parm) == PARALLEL
2760 	   && data->nominal_mode != BLKmode
2761 	   && data->passed_mode != BLKmode)
2762     {
2763       size_t i, len = XVECLEN (entry_parm, 0);
2764 
2765       for (i = 0; i < len; i++)
2766 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2767 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2768 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2769 		== data->passed_mode)
2770 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2771 	  {
2772 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2773 	    break;
2774 	  }
2775     }
2776 
2777   data->entry_parm = entry_parm;
2778 }
2779 
2780 /* A subroutine of assign_parms.  Reconstitute any values which were
2781    passed in multiple registers and would fit in a single register.  */
2782 
2783 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2784 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2785 {
2786   rtx entry_parm = data->entry_parm;
2787 
2788   /* Convert the PARALLEL to a REG of the same mode as the parallel.
2789      This can be done with register operations rather than on the
2790      stack, even if we will store the reconstituted parameter on the
2791      stack later.  */
2792   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2793     {
2794       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2795       emit_group_store (parmreg, entry_parm, data->passed_type,
2796 			GET_MODE_SIZE (GET_MODE (entry_parm)));
2797       entry_parm = parmreg;
2798     }
2799 
2800   data->entry_parm = entry_parm;
2801 }
2802 
2803 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2804    always valid and properly aligned.  */
2805 
2806 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2807 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2808 {
2809   rtx stack_parm = data->stack_parm;
2810 
2811   /* If we can't trust the parm stack slot to be aligned enough for its
2812      ultimate type, don't use that slot after entry.  We'll make another
2813      stack slot, if we need one.  */
2814   if (stack_parm
2815       && ((STRICT_ALIGNMENT
2816 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2817 	  || (data->nominal_type
2818 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2819 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2820     stack_parm = NULL;
2821 
2822   /* If parm was passed in memory, and we need to convert it on entry,
2823      don't store it back in that same slot.  */
2824   else if (data->entry_parm == stack_parm
2825 	   && data->nominal_mode != BLKmode
2826 	   && data->nominal_mode != data->passed_mode)
2827     stack_parm = NULL;
2828 
2829   /* If stack protection is in effect for this function, don't leave any
2830      pointers in their passed stack slots.  */
2831   else if (crtl->stack_protect_guard
2832 	   && (flag_stack_protect == 2
2833 	       || data->passed_pointer
2834 	       || POINTER_TYPE_P (data->nominal_type)))
2835     stack_parm = NULL;
2836 
2837   data->stack_parm = stack_parm;
2838 }
2839 
2840 /* A subroutine of assign_parms.  Return true if the current parameter
2841    should be stored as a BLKmode in the current frame.  */
2842 
2843 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2844 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2845 {
2846   if (data->nominal_mode == BLKmode)
2847     return true;
2848   if (GET_MODE (data->entry_parm) == BLKmode)
2849     return true;
2850 
2851 #ifdef BLOCK_REG_PADDING
2852   /* Only assign_parm_setup_block knows how to deal with register arguments
2853      that are padded at the least significant end.  */
2854   if (REG_P (data->entry_parm)
2855       && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
2856       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2857 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2858     return true;
2859 #endif
2860 
2861   return false;
2862 }
2863 
2864 /* A subroutine of assign_parms.  Arrange for the parameter to be
2865    present and valid in DATA->STACK_RTL.  */
2866 
2867 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2868 assign_parm_setup_block (struct assign_parm_data_all *all,
2869 			 tree parm, struct assign_parm_data_one *data)
2870 {
2871   rtx entry_parm = data->entry_parm;
2872   rtx stack_parm = data->stack_parm;
2873   rtx target_reg = NULL_RTX;
2874   bool in_conversion_seq = false;
2875   HOST_WIDE_INT size;
2876   HOST_WIDE_INT size_stored;
2877 
2878   if (GET_CODE (entry_parm) == PARALLEL)
2879     entry_parm = emit_group_move_into_temps (entry_parm);
2880 
2881   /* If we want the parameter in a pseudo, don't use a stack slot.  */
2882   if (is_gimple_reg (parm) && use_register_for_decl (parm))
2883     {
2884       tree def = ssa_default_def (cfun, parm);
2885       gcc_assert (def);
2886       machine_mode mode = promote_ssa_mode (def, NULL);
2887       rtx reg = gen_reg_rtx (mode);
2888       if (GET_CODE (reg) != CONCAT)
2889 	stack_parm = reg;
2890       else
2891 	{
2892 	  target_reg = reg;
2893 	  /* Avoid allocating a stack slot, if there isn't one
2894 	     preallocated by the ABI.  It might seem like we should
2895 	     always prefer a pseudo, but converting between
2896 	     floating-point and integer modes goes through the stack
2897 	     on various machines, so it's better to use the reserved
2898 	     stack slot than to risk wasting it and allocating more
2899 	     for the conversion.  */
2900 	  if (stack_parm == NULL_RTX)
2901 	    {
2902 	      int save = generating_concat_p;
2903 	      generating_concat_p = 0;
2904 	      stack_parm = gen_reg_rtx (mode);
2905 	      generating_concat_p = save;
2906 	    }
2907 	}
2908       data->stack_parm = NULL;
2909     }
2910 
2911   size = int_size_in_bytes (data->passed_type);
2912   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2913   if (stack_parm == 0)
2914     {
2915       SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2916       if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2917 	{
2918 	  rtx allocsize = gen_int_mode (size_stored, Pmode);
2919 	  get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2920 	  stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2921 					   MAX_SUPPORTED_STACK_ALIGNMENT);
2922 	  rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2923 					    DECL_ALIGN (parm));
2924 	  mark_reg_pointer (addr, DECL_ALIGN (parm));
2925 	  stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2926 	  MEM_NOTRAP_P (stack_parm) = 1;
2927 	}
2928       else
2929 	stack_parm = assign_stack_local (BLKmode, size_stored,
2930 					 DECL_ALIGN (parm));
2931       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2932 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2933       set_mem_attributes (stack_parm, parm, 1);
2934     }
2935 
2936   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2937      calls that pass values in multiple non-contiguous locations.  */
2938   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2939     {
2940       rtx mem;
2941 
2942       /* Note that we will be storing an integral number of words.
2943 	 So we have to be careful to ensure that we allocate an
2944 	 integral number of words.  We do this above when we call
2945 	 assign_stack_local if space was not allocated in the argument
2946 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2947 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2948 	 if it becomes a problem.  Exception is when BLKmode arrives
2949 	 with arguments not conforming to word_mode.  */
2950 
2951       if (data->stack_parm == 0)
2952 	;
2953       else if (GET_CODE (entry_parm) == PARALLEL)
2954 	;
2955       else
2956 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2957 
2958       mem = validize_mem (copy_rtx (stack_parm));
2959 
2960       /* Handle values in multiple non-contiguous locations.  */
2961       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2962 	emit_group_store (mem, entry_parm, data->passed_type, size);
2963       else if (GET_CODE (entry_parm) == PARALLEL)
2964 	{
2965 	  push_to_sequence2 (all->first_conversion_insn,
2966 			     all->last_conversion_insn);
2967 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2968 	  all->first_conversion_insn = get_insns ();
2969 	  all->last_conversion_insn = get_last_insn ();
2970 	  end_sequence ();
2971 	  in_conversion_seq = true;
2972 	}
2973 
2974       else if (size == 0)
2975 	;
2976 
2977       /* If SIZE is that of a mode no bigger than a word, just use
2978 	 that mode's store operation.  */
2979       else if (size <= UNITS_PER_WORD)
2980 	{
2981 	  unsigned int bits = size * BITS_PER_UNIT;
2982 	  machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2983 
2984 	  if (mode != BLKmode
2985 #ifdef BLOCK_REG_PADDING
2986 	      && (size == UNITS_PER_WORD
2987 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2988 		      != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2989 #endif
2990 	      )
2991 	    {
2992 	      rtx reg;
2993 
2994 	      /* We are really truncating a word_mode value containing
2995 		 SIZE bytes into a value of mode MODE.  If such an
2996 		 operation requires no actual instructions, we can refer
2997 		 to the value directly in mode MODE, otherwise we must
2998 		 start with the register in word_mode and explicitly
2999 		 convert it.  */
3000 	      if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3001 						 BITS_PER_WORD))
3002 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
3003 	      else
3004 		{
3005 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3006 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3007 		}
3008 	      emit_move_insn (change_address (mem, mode, 0), reg);
3009 	    }
3010 
3011 #ifdef BLOCK_REG_PADDING
3012 	  /* Storing the register in memory as a full word, as
3013 	     move_block_from_reg below would do, and then using the
3014 	     MEM in a smaller mode, has the effect of shifting right
3015 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
3016 	     shifting must be explicit.  */
3017 	  else if (!MEM_P (mem))
3018 	    {
3019 	      rtx x;
3020 
3021 	      /* If the assert below fails, we should have taken the
3022 		 mode != BLKmode path above, unless we have downward
3023 		 padding of smaller-than-word arguments on a machine
3024 		 with little-endian bytes, which would likely require
3025 		 additional changes to work correctly.  */
3026 	      gcc_checking_assert (BYTES_BIG_ENDIAN
3027 				   && (BLOCK_REG_PADDING (mode,
3028 							  data->passed_type, 1)
3029 				       == PAD_UPWARD));
3030 
3031 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3032 
3033 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3034 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3035 				NULL_RTX, 1);
3036 	      x = force_reg (word_mode, x);
3037 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3038 
3039 	      emit_move_insn (mem, x);
3040 	    }
3041 #endif
3042 
3043 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3044 	     machine must be aligned to the left before storing
3045 	     to memory.  Note that the previous test doesn't
3046 	     handle all cases (e.g. SIZE == 3).  */
3047 	  else if (size != UNITS_PER_WORD
3048 #ifdef BLOCK_REG_PADDING
3049 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3050 		       == PAD_DOWNWARD)
3051 #else
3052 		   && BYTES_BIG_ENDIAN
3053 #endif
3054 		   )
3055 	    {
3056 	      rtx tem, x;
3057 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3058 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3059 
3060 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3061 	      tem = change_address (mem, word_mode, 0);
3062 	      emit_move_insn (tem, x);
3063 	    }
3064 	  else
3065 	    move_block_from_reg (REGNO (entry_parm), mem,
3066 				 size_stored / UNITS_PER_WORD);
3067 	}
3068       else if (!MEM_P (mem))
3069 	{
3070 	  gcc_checking_assert (size > UNITS_PER_WORD);
3071 #ifdef BLOCK_REG_PADDING
3072 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3073 						  data->passed_type, 0)
3074 			       == PAD_UPWARD);
3075 #endif
3076 	  emit_move_insn (mem, entry_parm);
3077 	}
3078       else
3079 	move_block_from_reg (REGNO (entry_parm), mem,
3080 			     size_stored / UNITS_PER_WORD);
3081     }
3082   else if (data->stack_parm == 0)
3083     {
3084       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3085       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3086 		       BLOCK_OP_NORMAL);
3087       all->first_conversion_insn = get_insns ();
3088       all->last_conversion_insn = get_last_insn ();
3089       end_sequence ();
3090       in_conversion_seq = true;
3091     }
3092 
3093   if (target_reg)
3094     {
3095       if (!in_conversion_seq)
3096 	emit_move_insn (target_reg, stack_parm);
3097       else
3098 	{
3099 	  push_to_sequence2 (all->first_conversion_insn,
3100 			     all->last_conversion_insn);
3101 	  emit_move_insn (target_reg, stack_parm);
3102 	  all->first_conversion_insn = get_insns ();
3103 	  all->last_conversion_insn = get_last_insn ();
3104 	  end_sequence ();
3105 	}
3106       stack_parm = target_reg;
3107     }
3108 
3109   data->stack_parm = stack_parm;
3110   set_parm_rtl (parm, stack_parm);
3111 }
3112 
3113 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3114    parameter.  Get it there.  Perform all ABI specified conversions.  */
3115 
3116 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3117 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3118 		       struct assign_parm_data_one *data)
3119 {
3120   rtx parmreg, validated_mem;
3121   rtx equiv_stack_parm;
3122   machine_mode promoted_nominal_mode;
3123   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3124   bool did_conversion = false;
3125   bool need_conversion, moved;
3126   rtx rtl;
3127 
3128   /* Store the parm in a pseudoregister during the function, but we may
3129      need to do it in a wider mode.  Using 2 here makes the result
3130      consistent with promote_decl_mode and thus expand_expr_real_1.  */
3131   promoted_nominal_mode
3132     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3133 			     TREE_TYPE (current_function_decl), 2);
3134 
3135   parmreg = gen_reg_rtx (promoted_nominal_mode);
3136   if (!DECL_ARTIFICIAL (parm))
3137     mark_user_reg (parmreg);
3138 
3139   /* If this was an item that we received a pointer to,
3140      set rtl appropriately.  */
3141   if (data->passed_pointer)
3142     {
3143       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3144       set_mem_attributes (rtl, parm, 1);
3145     }
3146   else
3147     rtl = parmreg;
3148 
3149   assign_parm_remove_parallels (data);
3150 
3151   /* Copy the value into the register, thus bridging between
3152      assign_parm_find_data_types and expand_expr_real_1.  */
3153 
3154   equiv_stack_parm = data->stack_parm;
3155   validated_mem = validize_mem (copy_rtx (data->entry_parm));
3156 
3157   need_conversion = (data->nominal_mode != data->passed_mode
3158 		     || promoted_nominal_mode != data->promoted_mode);
3159   moved = false;
3160 
3161   if (need_conversion
3162       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3163       && data->nominal_mode == data->passed_mode
3164       && data->nominal_mode == GET_MODE (data->entry_parm))
3165     {
3166       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3167 	 mode, by the caller.  We now have to convert it to
3168 	 NOMINAL_MODE, if different.  However, PARMREG may be in
3169 	 a different mode than NOMINAL_MODE if it is being stored
3170 	 promoted.
3171 
3172 	 If ENTRY_PARM is a hard register, it might be in a register
3173 	 not valid for operating in its mode (e.g., an odd-numbered
3174 	 register for a DFmode).  In that case, moves are the only
3175 	 thing valid, so we can't do a convert from there.  This
3176 	 occurs when the calling sequence allow such misaligned
3177 	 usages.
3178 
3179 	 In addition, the conversion may involve a call, which could
3180 	 clobber parameters which haven't been copied to pseudo
3181 	 registers yet.
3182 
3183 	 First, we try to emit an insn which performs the necessary
3184 	 conversion.  We verify that this insn does not clobber any
3185 	 hard registers.  */
3186 
3187       enum insn_code icode;
3188       rtx op0, op1;
3189 
3190       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3191 			    unsignedp);
3192 
3193       op0 = parmreg;
3194       op1 = validated_mem;
3195       if (icode != CODE_FOR_nothing
3196 	  && insn_operand_matches (icode, 0, op0)
3197 	  && insn_operand_matches (icode, 1, op1))
3198 	{
3199 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3200 	  rtx_insn *insn, *insns;
3201 	  rtx t = op1;
3202 	  HARD_REG_SET hardregs;
3203 
3204 	  start_sequence ();
3205 	  /* If op1 is a hard register that is likely spilled, first
3206 	     force it into a pseudo, otherwise combiner might extend
3207 	     its lifetime too much.  */
3208 	  if (GET_CODE (t) == SUBREG)
3209 	    t = SUBREG_REG (t);
3210 	  if (REG_P (t)
3211 	      && HARD_REGISTER_P (t)
3212 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3213 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3214 	    {
3215 	      t = gen_reg_rtx (GET_MODE (op1));
3216 	      emit_move_insn (t, op1);
3217 	    }
3218 	  else
3219 	    t = op1;
3220 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3221 					   data->passed_mode, unsignedp);
3222 	  emit_insn (pat);
3223 	  insns = get_insns ();
3224 
3225 	  moved = true;
3226 	  CLEAR_HARD_REG_SET (hardregs);
3227 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3228 	    {
3229 	      if (INSN_P (insn))
3230 		note_stores (PATTERN (insn), record_hard_reg_sets,
3231 			     &hardregs);
3232 	      if (!hard_reg_set_empty_p (hardregs))
3233 		moved = false;
3234 	    }
3235 
3236 	  end_sequence ();
3237 
3238 	  if (moved)
3239 	    {
3240 	      emit_insn (insns);
3241 	      if (equiv_stack_parm != NULL_RTX)
3242 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3243 						  equiv_stack_parm);
3244 	    }
3245 	}
3246     }
3247 
3248   if (moved)
3249     /* Nothing to do.  */
3250     ;
3251   else if (need_conversion)
3252     {
3253       /* We did not have an insn to convert directly, or the sequence
3254 	 generated appeared unsafe.  We must first copy the parm to a
3255 	 pseudo reg, and save the conversion until after all
3256 	 parameters have been moved.  */
3257 
3258       int save_tree_used;
3259       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3260 
3261       emit_move_insn (tempreg, validated_mem);
3262 
3263       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3264       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3265 
3266       if (partial_subreg_p (tempreg)
3267 	  && GET_MODE (tempreg) == data->nominal_mode
3268 	  && REG_P (SUBREG_REG (tempreg))
3269 	  && data->nominal_mode == data->passed_mode
3270 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3271 	{
3272 	  /* The argument is already sign/zero extended, so note it
3273 	     into the subreg.  */
3274 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3275 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3276 	}
3277 
3278       /* TREE_USED gets set erroneously during expand_assignment.  */
3279       save_tree_used = TREE_USED (parm);
3280       SET_DECL_RTL (parm, rtl);
3281       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3282       SET_DECL_RTL (parm, NULL_RTX);
3283       TREE_USED (parm) = save_tree_used;
3284       all->first_conversion_insn = get_insns ();
3285       all->last_conversion_insn = get_last_insn ();
3286       end_sequence ();
3287 
3288       did_conversion = true;
3289     }
3290   else
3291     emit_move_insn (parmreg, validated_mem);
3292 
3293   /* If we were passed a pointer but the actual value can safely live
3294      in a register, retrieve it and use it directly.  */
3295   if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3296     {
3297       /* We can't use nominal_mode, because it will have been set to
3298 	 Pmode above.  We must use the actual mode of the parm.  */
3299       if (use_register_for_decl (parm))
3300 	{
3301 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3302 	  mark_user_reg (parmreg);
3303 	}
3304       else
3305 	{
3306 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3307 					    TYPE_MODE (TREE_TYPE (parm)),
3308 					    TYPE_ALIGN (TREE_TYPE (parm)));
3309 	  parmreg
3310 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3311 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3312 				  align);
3313 	  set_mem_attributes (parmreg, parm, 1);
3314 	}
3315 
3316       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3317 	 the debug info in case it is not legitimate.  */
3318       if (GET_MODE (parmreg) != GET_MODE (rtl))
3319 	{
3320 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3321 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3322 
3323 	  push_to_sequence2 (all->first_conversion_insn,
3324 			     all->last_conversion_insn);
3325 	  emit_move_insn (tempreg, rtl);
3326 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3327 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3328 			  tempreg);
3329 	  all->first_conversion_insn = get_insns ();
3330 	  all->last_conversion_insn = get_last_insn ();
3331 	  end_sequence ();
3332 
3333 	  did_conversion = true;
3334 	}
3335       else
3336 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3337 
3338       rtl = parmreg;
3339 
3340       /* STACK_PARM is the pointer, not the parm, and PARMREG is
3341 	 now the parm.  */
3342       data->stack_parm = NULL;
3343     }
3344 
3345   set_parm_rtl (parm, rtl);
3346 
3347   /* Mark the register as eliminable if we did no conversion and it was
3348      copied from memory at a fixed offset, and the arg pointer was not
3349      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3350      offset formed an invalid address, such memory-equivalences as we
3351      make here would screw up life analysis for it.  */
3352   if (data->nominal_mode == data->passed_mode
3353       && !did_conversion
3354       && data->stack_parm != 0
3355       && MEM_P (data->stack_parm)
3356       && data->locate.offset.var == 0
3357       && reg_mentioned_p (virtual_incoming_args_rtx,
3358 			  XEXP (data->stack_parm, 0)))
3359     {
3360       rtx_insn *linsn = get_last_insn ();
3361       rtx_insn *sinsn;
3362       rtx set;
3363 
3364       /* Mark complex types separately.  */
3365       if (GET_CODE (parmreg) == CONCAT)
3366 	{
3367 	  scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3368 	  int regnor = REGNO (XEXP (parmreg, 0));
3369 	  int regnoi = REGNO (XEXP (parmreg, 1));
3370 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3371 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3372 					  GET_MODE_SIZE (submode));
3373 
3374 	  /* Scan backwards for the set of the real and
3375 	     imaginary parts.  */
3376 	  for (sinsn = linsn; sinsn != 0;
3377 	       sinsn = prev_nonnote_insn (sinsn))
3378 	    {
3379 	      set = single_set (sinsn);
3380 	      if (set == 0)
3381 		continue;
3382 
3383 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3384 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3385 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3386 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3387 	    }
3388 	}
3389       else
3390 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3391     }
3392 
3393   /* For pointer data type, suggest pointer register.  */
3394   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3395     mark_reg_pointer (parmreg,
3396 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3397 }
3398 
3399 /* A subroutine of assign_parms.  Allocate stack space to hold the current
3400    parameter.  Get it there.  Perform all ABI specified conversions.  */
3401 
3402 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3403 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3404 		         struct assign_parm_data_one *data)
3405 {
3406   /* Value must be stored in the stack slot STACK_PARM during function
3407      execution.  */
3408   bool to_conversion = false;
3409 
3410   assign_parm_remove_parallels (data);
3411 
3412   if (data->promoted_mode != data->nominal_mode)
3413     {
3414       /* Conversion is required.  */
3415       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3416 
3417       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3418 
3419       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3420       to_conversion = true;
3421 
3422       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3423 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3424 
3425       if (data->stack_parm)
3426 	{
3427 	  poly_int64 offset
3428 	    = subreg_lowpart_offset (data->nominal_mode,
3429 				     GET_MODE (data->stack_parm));
3430 	  /* ??? This may need a big-endian conversion on sparc64.  */
3431 	  data->stack_parm
3432 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3433 	  if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3434 	    set_mem_offset (data->stack_parm,
3435 			    MEM_OFFSET (data->stack_parm) + offset);
3436 	}
3437     }
3438 
3439   if (data->entry_parm != data->stack_parm)
3440     {
3441       rtx src, dest;
3442 
3443       if (data->stack_parm == 0)
3444 	{
3445 	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3446 					    GET_MODE (data->entry_parm),
3447 					    TYPE_ALIGN (data->passed_type));
3448 	  data->stack_parm
3449 	    = assign_stack_local (GET_MODE (data->entry_parm),
3450 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3451 				  align);
3452 	  set_mem_attributes (data->stack_parm, parm, 1);
3453 	}
3454 
3455       dest = validize_mem (copy_rtx (data->stack_parm));
3456       src = validize_mem (copy_rtx (data->entry_parm));
3457 
3458       if (MEM_P (src))
3459 	{
3460 	  /* Use a block move to handle potentially misaligned entry_parm.  */
3461 	  if (!to_conversion)
3462 	    push_to_sequence2 (all->first_conversion_insn,
3463 			       all->last_conversion_insn);
3464 	  to_conversion = true;
3465 
3466 	  emit_block_move (dest, src,
3467 			   GEN_INT (int_size_in_bytes (data->passed_type)),
3468 			   BLOCK_OP_NORMAL);
3469 	}
3470       else
3471 	{
3472 	  if (!REG_P (src))
3473 	    src = force_reg (GET_MODE (src), src);
3474 	  emit_move_insn (dest, src);
3475 	}
3476     }
3477 
3478   if (to_conversion)
3479     {
3480       all->first_conversion_insn = get_insns ();
3481       all->last_conversion_insn = get_last_insn ();
3482       end_sequence ();
3483     }
3484 
3485   set_parm_rtl (parm, data->stack_parm);
3486 }
3487 
3488 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3489    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3490 
3491 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3492 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3493 			      vec<tree> fnargs)
3494 {
3495   tree parm;
3496   tree orig_fnargs = all->orig_fnargs;
3497   unsigned i = 0;
3498 
3499   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3500     {
3501       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3502 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3503 	{
3504 	  rtx tmp, real, imag;
3505 	  scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3506 
3507 	  real = DECL_RTL (fnargs[i]);
3508 	  imag = DECL_RTL (fnargs[i + 1]);
3509 	  if (inner != GET_MODE (real))
3510 	    {
3511 	      real = gen_lowpart_SUBREG (inner, real);
3512 	      imag = gen_lowpart_SUBREG (inner, imag);
3513 	    }
3514 
3515 	  if (TREE_ADDRESSABLE (parm))
3516 	    {
3517 	      rtx rmem, imem;
3518 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3519 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3520 						DECL_MODE (parm),
3521 						TYPE_ALIGN (TREE_TYPE (parm)));
3522 
3523 	      /* split_complex_arg put the real and imag parts in
3524 		 pseudos.  Move them to memory.  */
3525 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3526 	      set_mem_attributes (tmp, parm, 1);
3527 	      rmem = adjust_address_nv (tmp, inner, 0);
3528 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3529 	      push_to_sequence2 (all->first_conversion_insn,
3530 				 all->last_conversion_insn);
3531 	      emit_move_insn (rmem, real);
3532 	      emit_move_insn (imem, imag);
3533 	      all->first_conversion_insn = get_insns ();
3534 	      all->last_conversion_insn = get_last_insn ();
3535 	      end_sequence ();
3536 	    }
3537 	  else
3538 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3539 	  set_parm_rtl (parm, tmp);
3540 
3541 	  real = DECL_INCOMING_RTL (fnargs[i]);
3542 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3543 	  if (inner != GET_MODE (real))
3544 	    {
3545 	      real = gen_lowpart_SUBREG (inner, real);
3546 	      imag = gen_lowpart_SUBREG (inner, imag);
3547 	    }
3548 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3549 	  set_decl_incoming_rtl (parm, tmp, false);
3550 	  i++;
3551 	}
3552     }
3553 }
3554 
3555 /* Assign RTL expressions to the function's parameters.  This may involve
3556    copying them into registers and using those registers as the DECL_RTL.  */
3557 
3558 static void
assign_parms(tree fndecl)3559 assign_parms (tree fndecl)
3560 {
3561   struct assign_parm_data_all all;
3562   tree parm;
3563   vec<tree> fnargs;
3564   unsigned i;
3565 
3566   crtl->args.internal_arg_pointer
3567     = targetm.calls.internal_arg_pointer ();
3568 
3569   assign_parms_initialize_all (&all);
3570   fnargs = assign_parms_augmented_arg_list (&all);
3571 
3572   FOR_EACH_VEC_ELT (fnargs, i, parm)
3573     {
3574       struct assign_parm_data_one data;
3575 
3576       /* Extract the type of PARM; adjust it according to ABI.  */
3577       assign_parm_find_data_types (&all, parm, &data);
3578 
3579       /* Early out for errors and void parameters.  */
3580       if (data.passed_mode == VOIDmode)
3581 	{
3582 	  SET_DECL_RTL (parm, const0_rtx);
3583 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3584 	  continue;
3585 	}
3586 
3587       /* Estimate stack alignment from parameter alignment.  */
3588       if (SUPPORTS_STACK_ALIGNMENT)
3589         {
3590           unsigned int align
3591 	    = targetm.calls.function_arg_boundary (data.promoted_mode,
3592 						   data.passed_type);
3593 	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3594 				     align);
3595 	  if (TYPE_ALIGN (data.nominal_type) > align)
3596 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3597 				       TYPE_MODE (data.nominal_type),
3598 				       TYPE_ALIGN (data.nominal_type));
3599 	  if (crtl->stack_alignment_estimated < align)
3600 	    {
3601 	      gcc_assert (!crtl->stack_realign_processed);
3602 	      crtl->stack_alignment_estimated = align;
3603 	    }
3604 	}
3605 
3606       /* Find out where the parameter arrives in this function.  */
3607       assign_parm_find_entry_rtl (&all, &data);
3608 
3609       /* Find out where stack space for this parameter might be.  */
3610       if (assign_parm_is_stack_parm (&all, &data))
3611 	{
3612 	  assign_parm_find_stack_rtl (parm, &data);
3613 	  assign_parm_adjust_entry_rtl (&data);
3614 	}
3615       /* Record permanently how this parm was passed.  */
3616       if (data.passed_pointer)
3617 	{
3618 	  rtx incoming_rtl
3619 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3620 			   data.entry_parm);
3621 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3622 	}
3623       else
3624 	set_decl_incoming_rtl (parm, data.entry_parm, false);
3625 
3626       assign_parm_adjust_stack_rtl (&data);
3627 
3628       if (assign_parm_setup_block_p (&data))
3629 	assign_parm_setup_block (&all, parm, &data);
3630       else if (data.passed_pointer || use_register_for_decl (parm))
3631 	assign_parm_setup_reg (&all, parm, &data);
3632       else
3633 	assign_parm_setup_stack (&all, parm, &data);
3634 
3635       if (cfun->stdarg && !DECL_CHAIN (parm))
3636 	assign_parms_setup_varargs (&all, &data, false);
3637 
3638       /* Update info on where next arg arrives in registers.  */
3639       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3640 					  data.passed_type, data.named_arg);
3641     }
3642 
3643   if (targetm.calls.split_complex_arg)
3644     assign_parms_unsplit_complex (&all, fnargs);
3645 
3646   fnargs.release ();
3647 
3648   /* Output all parameter conversion instructions (possibly including calls)
3649      now that all parameters have been copied out of hard registers.  */
3650   emit_insn (all.first_conversion_insn);
3651 
3652   /* Estimate reload stack alignment from scalar return mode.  */
3653   if (SUPPORTS_STACK_ALIGNMENT)
3654     {
3655       if (DECL_RESULT (fndecl))
3656 	{
3657 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3658 	  machine_mode mode = TYPE_MODE (type);
3659 
3660 	  if (mode != BLKmode
3661 	      && mode != VOIDmode
3662 	      && !AGGREGATE_TYPE_P (type))
3663 	    {
3664 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3665 	      if (crtl->stack_alignment_estimated < align)
3666 		{
3667 		  gcc_assert (!crtl->stack_realign_processed);
3668 		  crtl->stack_alignment_estimated = align;
3669 		}
3670 	    }
3671 	}
3672     }
3673 
3674   /* If we are receiving a struct value address as the first argument, set up
3675      the RTL for the function result. As this might require code to convert
3676      the transmitted address to Pmode, we do this here to ensure that possible
3677      preliminary conversions of the address have been emitted already.  */
3678   if (all.function_result_decl)
3679     {
3680       tree result = DECL_RESULT (current_function_decl);
3681       rtx addr = DECL_RTL (all.function_result_decl);
3682       rtx x;
3683 
3684       if (DECL_BY_REFERENCE (result))
3685 	{
3686 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3687 	  x = addr;
3688 	}
3689       else
3690 	{
3691 	  SET_DECL_VALUE_EXPR (result,
3692 			       build1 (INDIRECT_REF, TREE_TYPE (result),
3693 				       all.function_result_decl));
3694 	  addr = convert_memory_address (Pmode, addr);
3695 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3696 	  set_mem_attributes (x, result, 1);
3697 	}
3698 
3699       DECL_HAS_VALUE_EXPR_P (result) = 1;
3700 
3701       set_parm_rtl (result, x);
3702     }
3703 
3704   /* We have aligned all the args, so add space for the pretend args.  */
3705   crtl->args.pretend_args_size = all.pretend_args_size;
3706   all.stack_args_size.constant += all.extra_pretend_bytes;
3707   crtl->args.size = all.stack_args_size.constant;
3708 
3709   /* Adjust function incoming argument size for alignment and
3710      minimum length.  */
3711 
3712   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3713   crtl->args.size = aligned_upper_bound (crtl->args.size,
3714 					 PARM_BOUNDARY / BITS_PER_UNIT);
3715 
3716   if (ARGS_GROW_DOWNWARD)
3717     {
3718       crtl->args.arg_offset_rtx
3719 	= (all.stack_args_size.var == 0
3720 	   ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3721 	   : expand_expr (size_diffop (all.stack_args_size.var,
3722 				       size_int (-all.stack_args_size.constant)),
3723 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
3724     }
3725   else
3726     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3727 
3728   /* See how many bytes, if any, of its args a function should try to pop
3729      on return.  */
3730 
3731   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3732 							 TREE_TYPE (fndecl),
3733 							 crtl->args.size);
3734 
3735   /* For stdarg.h function, save info about
3736      regs and stack space used by the named args.  */
3737 
3738   crtl->args.info = all.args_so_far_v;
3739 
3740   /* Set the rtx used for the function return value.  Put this in its
3741      own variable so any optimizers that need this information don't have
3742      to include tree.h.  Do this here so it gets done when an inlined
3743      function gets output.  */
3744 
3745   crtl->return_rtx
3746     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3747        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3748 
3749   /* If scalar return value was computed in a pseudo-reg, or was a named
3750      return value that got dumped to the stack, copy that to the hard
3751      return register.  */
3752   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3753     {
3754       tree decl_result = DECL_RESULT (fndecl);
3755       rtx decl_rtl = DECL_RTL (decl_result);
3756 
3757       if (REG_P (decl_rtl)
3758 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3759 	  : DECL_REGISTER (decl_result))
3760 	{
3761 	  rtx real_decl_rtl;
3762 
3763 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3764 							fndecl, true);
3765 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3766 	  /* The delay slot scheduler assumes that crtl->return_rtx
3767 	     holds the hard register containing the return value, not a
3768 	     temporary pseudo.  */
3769 	  crtl->return_rtx = real_decl_rtl;
3770 	}
3771     }
3772 }
3773 
3774 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3775    For all seen types, gimplify their sizes.  */
3776 
3777 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3778 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3779 {
3780   tree t = *tp;
3781 
3782   *walk_subtrees = 0;
3783   if (TYPE_P (t))
3784     {
3785       if (POINTER_TYPE_P (t))
3786 	*walk_subtrees = 1;
3787       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3788 	       && !TYPE_SIZES_GIMPLIFIED (t))
3789 	{
3790 	  gimplify_type_sizes (t, (gimple_seq *) data);
3791 	  *walk_subtrees = 1;
3792 	}
3793     }
3794 
3795   return NULL;
3796 }
3797 
3798 /* Gimplify the parameter list for current_function_decl.  This involves
3799    evaluating SAVE_EXPRs of variable sized parameters and generating code
3800    to implement callee-copies reference parameters.  Returns a sequence of
3801    statements to add to the beginning of the function.  */
3802 
3803 gimple_seq
gimplify_parameters(gimple_seq * cleanup)3804 gimplify_parameters (gimple_seq *cleanup)
3805 {
3806   struct assign_parm_data_all all;
3807   tree parm;
3808   gimple_seq stmts = NULL;
3809   vec<tree> fnargs;
3810   unsigned i;
3811 
3812   assign_parms_initialize_all (&all);
3813   fnargs = assign_parms_augmented_arg_list (&all);
3814 
3815   FOR_EACH_VEC_ELT (fnargs, i, parm)
3816     {
3817       struct assign_parm_data_one data;
3818 
3819       /* Extract the type of PARM; adjust it according to ABI.  */
3820       assign_parm_find_data_types (&all, parm, &data);
3821 
3822       /* Early out for errors and void parameters.  */
3823       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3824 	continue;
3825 
3826       /* Update info on where next arg arrives in registers.  */
3827       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3828 					  data.passed_type, data.named_arg);
3829 
3830       /* ??? Once upon a time variable_size stuffed parameter list
3831 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3832 	 turned out to be less than manageable in the gimple world.
3833 	 Now we have to hunt them down ourselves.  */
3834       walk_tree_without_duplicates (&data.passed_type,
3835 				    gimplify_parm_type, &stmts);
3836 
3837       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3838 	{
3839 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3840 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3841 	}
3842 
3843       if (data.passed_pointer)
3844 	{
3845           tree type = TREE_TYPE (data.passed_type);
3846 	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3847 				       type, data.named_arg))
3848 	    {
3849 	      tree local, t;
3850 
3851 	      /* For constant-sized objects, this is trivial; for
3852 		 variable-sized objects, we have to play games.  */
3853 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3854 		  && !(flag_stack_check == GENERIC_STACK_CHECK
3855 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
3856 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
3857 		{
3858 		  local = create_tmp_var (type, get_name (parm));
3859 		  DECL_IGNORED_P (local) = 0;
3860 		  /* If PARM was addressable, move that flag over
3861 		     to the local copy, as its address will be taken,
3862 		     not the PARMs.  Keep the parms address taken
3863 		     as we'll query that flag during gimplification.  */
3864 		  if (TREE_ADDRESSABLE (parm))
3865 		    TREE_ADDRESSABLE (local) = 1;
3866 		  else if (TREE_CODE (type) == COMPLEX_TYPE
3867 			   || TREE_CODE (type) == VECTOR_TYPE)
3868 		    DECL_GIMPLE_REG_P (local) = 1;
3869 
3870 		  if (!is_gimple_reg (local)
3871 		      && flag_stack_reuse != SR_NONE)
3872 		    {
3873 		      tree clobber = build_constructor (type, NULL);
3874 		      gimple *clobber_stmt;
3875 		      TREE_THIS_VOLATILE (clobber) = 1;
3876 		      clobber_stmt = gimple_build_assign (local, clobber);
3877 		      gimple_seq_add_stmt (cleanup, clobber_stmt);
3878 		    }
3879 		}
3880 	      else
3881 		{
3882 		  tree ptr_type, addr;
3883 
3884 		  ptr_type = build_pointer_type (type);
3885 		  addr = create_tmp_reg (ptr_type, get_name (parm));
3886 		  DECL_IGNORED_P (addr) = 0;
3887 		  local = build_fold_indirect_ref (addr);
3888 
3889 		  t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3890 					      DECL_ALIGN (parm),
3891 					      max_int_size_in_bytes (type));
3892 		  /* The call has been built for a variable-sized object.  */
3893 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
3894 		  t = fold_convert (ptr_type, t);
3895 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3896 		  gimplify_and_add (t, &stmts);
3897 		}
3898 
3899 	      gimplify_assign (local, parm, &stmts);
3900 
3901 	      SET_DECL_VALUE_EXPR (parm, local);
3902 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3903 	    }
3904 	}
3905     }
3906 
3907   fnargs.release ();
3908 
3909   return stmts;
3910 }
3911 
3912 /* Compute the size and offset from the start of the stacked arguments for a
3913    parm passed in mode PASSED_MODE and with type TYPE.
3914 
3915    INITIAL_OFFSET_PTR points to the current offset into the stacked
3916    arguments.
3917 
3918    The starting offset and size for this parm are returned in
3919    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3920    nonzero, the offset is that of stack slot, which is returned in
3921    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3922    padding required from the initial offset ptr to the stack slot.
3923 
3924    IN_REGS is nonzero if the argument will be passed in registers.  It will
3925    never be set if REG_PARM_STACK_SPACE is not defined.
3926 
3927    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3928    for arguments which are passed in registers.
3929 
3930    FNDECL is the function in which the argument was defined.
3931 
3932    There are two types of rounding that are done.  The first, controlled by
3933    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3934    argument list to be aligned to the specific boundary (in bits).  This
3935    rounding affects the initial and starting offsets, but not the argument
3936    size.
3937 
3938    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3939    optionally rounds the size of the parm to PARM_BOUNDARY.  The
3940    initial offset is not affected by this rounding, while the size always
3941    is and the starting offset may be.  */
3942 
3943 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3944     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3945     callers pass in the total size of args so far as
3946     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3947 
3948 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)3949 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3950 		     int reg_parm_stack_space, int partial,
3951 		     tree fndecl ATTRIBUTE_UNUSED,
3952 		     struct args_size *initial_offset_ptr,
3953 		     struct locate_and_pad_arg_data *locate)
3954 {
3955   tree sizetree;
3956   pad_direction where_pad;
3957   unsigned int boundary, round_boundary;
3958   int part_size_in_regs;
3959 
3960   /* If we have found a stack parm before we reach the end of the
3961      area reserved for registers, skip that area.  */
3962   if (! in_regs)
3963     {
3964       if (reg_parm_stack_space > 0)
3965 	{
3966 	  if (initial_offset_ptr->var
3967 	      || !ordered_p (initial_offset_ptr->constant,
3968 			     reg_parm_stack_space))
3969 	    {
3970 	      initial_offset_ptr->var
3971 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3972 			      ssize_int (reg_parm_stack_space));
3973 	      initial_offset_ptr->constant = 0;
3974 	    }
3975 	  else
3976 	    initial_offset_ptr->constant
3977 	      = ordered_max (initial_offset_ptr->constant,
3978 			     reg_parm_stack_space);
3979 	}
3980     }
3981 
3982   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3983 
3984   sizetree = (type
3985 	      ? arg_size_in_bytes (type)
3986 	      : size_int (GET_MODE_SIZE (passed_mode)));
3987   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
3988   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3989   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3990 							      type);
3991   locate->where_pad = where_pad;
3992 
3993   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
3994   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3995     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3996 
3997   locate->boundary = boundary;
3998 
3999   if (SUPPORTS_STACK_ALIGNMENT)
4000     {
4001       /* stack_alignment_estimated can't change after stack has been
4002 	 realigned.  */
4003       if (crtl->stack_alignment_estimated < boundary)
4004         {
4005           if (!crtl->stack_realign_processed)
4006 	    crtl->stack_alignment_estimated = boundary;
4007 	  else
4008 	    {
4009 	      /* If stack is realigned and stack alignment value
4010 		 hasn't been finalized, it is OK not to increase
4011 		 stack_alignment_estimated.  The bigger alignment
4012 		 requirement is recorded in stack_alignment_needed
4013 		 below.  */
4014 	      gcc_assert (!crtl->stack_realign_finalized
4015 			  && crtl->stack_realign_needed);
4016 	    }
4017 	}
4018     }
4019 
4020   /* Remember if the outgoing parameter requires extra alignment on the
4021      calling function side.  */
4022   if (crtl->stack_alignment_needed < boundary)
4023     crtl->stack_alignment_needed = boundary;
4024   if (crtl->preferred_stack_boundary < boundary)
4025     crtl->preferred_stack_boundary = boundary;
4026 
4027   if (ARGS_GROW_DOWNWARD)
4028     {
4029       locate->slot_offset.constant = -initial_offset_ptr->constant;
4030       if (initial_offset_ptr->var)
4031 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4032 					      initial_offset_ptr->var);
4033 
4034       {
4035 	tree s2 = sizetree;
4036 	if (where_pad != PAD_NONE
4037 	    && (!tree_fits_uhwi_p (sizetree)
4038 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4039 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4040 	SUB_PARM_SIZE (locate->slot_offset, s2);
4041       }
4042 
4043       locate->slot_offset.constant += part_size_in_regs;
4044 
4045       if (!in_regs || reg_parm_stack_space > 0)
4046 	pad_to_arg_alignment (&locate->slot_offset, boundary,
4047 			      &locate->alignment_pad);
4048 
4049       locate->size.constant = (-initial_offset_ptr->constant
4050 			       - locate->slot_offset.constant);
4051       if (initial_offset_ptr->var)
4052 	locate->size.var = size_binop (MINUS_EXPR,
4053 				       size_binop (MINUS_EXPR,
4054 						   ssize_int (0),
4055 						   initial_offset_ptr->var),
4056 				       locate->slot_offset.var);
4057 
4058       /* Pad_below needs the pre-rounded size to know how much to pad
4059 	 below.  */
4060       locate->offset = locate->slot_offset;
4061       if (where_pad == PAD_DOWNWARD)
4062 	pad_below (&locate->offset, passed_mode, sizetree);
4063 
4064     }
4065   else
4066     {
4067       if (!in_regs || reg_parm_stack_space > 0)
4068 	pad_to_arg_alignment (initial_offset_ptr, boundary,
4069 			      &locate->alignment_pad);
4070       locate->slot_offset = *initial_offset_ptr;
4071 
4072 #ifdef PUSH_ROUNDING
4073       if (passed_mode != BLKmode)
4074 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4075 #endif
4076 
4077       /* Pad_below needs the pre-rounded size to know how much to pad below
4078 	 so this must be done before rounding up.  */
4079       locate->offset = locate->slot_offset;
4080       if (where_pad == PAD_DOWNWARD)
4081 	pad_below (&locate->offset, passed_mode, sizetree);
4082 
4083       if (where_pad != PAD_NONE
4084 	  && (!tree_fits_uhwi_p (sizetree)
4085 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4086 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4087 
4088       ADD_PARM_SIZE (locate->size, sizetree);
4089 
4090       locate->size.constant -= part_size_in_regs;
4091     }
4092 
4093   locate->offset.constant
4094     += targetm.calls.function_arg_offset (passed_mode, type);
4095 }
4096 
4097 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4098    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4099 
4100 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4101 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4102 		      struct args_size *alignment_pad)
4103 {
4104   tree save_var = NULL_TREE;
4105   poly_int64 save_constant = 0;
4106   int boundary_in_bytes = boundary / BITS_PER_UNIT;
4107   poly_int64 sp_offset = STACK_POINTER_OFFSET;
4108 
4109 #ifdef SPARC_STACK_BOUNDARY_HACK
4110   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4111      the real alignment of %sp.  However, when it does this, the
4112      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4113   if (SPARC_STACK_BOUNDARY_HACK)
4114     sp_offset = 0;
4115 #endif
4116 
4117   if (boundary > PARM_BOUNDARY)
4118     {
4119       save_var = offset_ptr->var;
4120       save_constant = offset_ptr->constant;
4121     }
4122 
4123   alignment_pad->var = NULL_TREE;
4124   alignment_pad->constant = 0;
4125 
4126   if (boundary > BITS_PER_UNIT)
4127     {
4128       int misalign;
4129       if (offset_ptr->var
4130 	  || !known_misalignment (offset_ptr->constant + sp_offset,
4131 				  boundary_in_bytes, &misalign))
4132 	{
4133 	  tree sp_offset_tree = ssize_int (sp_offset);
4134 	  tree offset = size_binop (PLUS_EXPR,
4135 				    ARGS_SIZE_TREE (*offset_ptr),
4136 				    sp_offset_tree);
4137 	  tree rounded;
4138 	  if (ARGS_GROW_DOWNWARD)
4139 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
4140 	  else
4141 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4142 
4143 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4144 	  /* ARGS_SIZE_TREE includes constant term.  */
4145 	  offset_ptr->constant = 0;
4146 	  if (boundary > PARM_BOUNDARY)
4147 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4148 					     save_var);
4149 	}
4150       else
4151 	{
4152 	  if (ARGS_GROW_DOWNWARD)
4153 	    offset_ptr->constant -= misalign;
4154 	  else
4155 	    offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4156 
4157 	  if (boundary > PARM_BOUNDARY)
4158 	    alignment_pad->constant = offset_ptr->constant - save_constant;
4159 	}
4160     }
4161 }
4162 
4163 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4164 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4165 {
4166   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4167   int misalign;
4168   if (passed_mode != BLKmode
4169       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4170     offset_ptr->constant += -misalign & (align - 1);
4171   else
4172     {
4173       if (TREE_CODE (sizetree) != INTEGER_CST
4174 	  || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4175 	{
4176 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4177 	  tree s2 = round_up (sizetree, align);
4178 	  /* Add it in.  */
4179 	  ADD_PARM_SIZE (*offset_ptr, s2);
4180 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4181 	}
4182     }
4183 }
4184 
4185 
4186 /* True if register REGNO was alive at a place where `setjmp' was
4187    called and was set more than once or is an argument.  Such regs may
4188    be clobbered by `longjmp'.  */
4189 
4190 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4191 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4192 {
4193   /* There appear to be cases where some local vars never reach the
4194      backend but have bogus regnos.  */
4195   if (regno >= max_reg_num ())
4196     return false;
4197 
4198   return ((REG_N_SETS (regno) > 1
4199 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4200 			       regno))
4201 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4202 }
4203 
4204 /* Walk the tree of blocks describing the binding levels within a
4205    function and warn about variables the might be killed by setjmp or
4206    vfork.  This is done after calling flow_analysis before register
4207    allocation since that will clobber the pseudo-regs to hard
4208    regs.  */
4209 
4210 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4211 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4212 {
4213   tree decl, sub;
4214 
4215   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4216     {
4217       if (VAR_P (decl)
4218 	  && DECL_RTL_SET_P (decl)
4219 	  && REG_P (DECL_RTL (decl))
4220 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4221 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4222                  " %<longjmp%> or %<vfork%>", decl);
4223     }
4224 
4225   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4226     setjmp_vars_warning (setjmp_crosses, sub);
4227 }
4228 
4229 /* Do the appropriate part of setjmp_vars_warning
4230    but for arguments instead of local variables.  */
4231 
4232 static void
setjmp_args_warning(bitmap setjmp_crosses)4233 setjmp_args_warning (bitmap setjmp_crosses)
4234 {
4235   tree decl;
4236   for (decl = DECL_ARGUMENTS (current_function_decl);
4237        decl; decl = DECL_CHAIN (decl))
4238     if (DECL_RTL (decl) != 0
4239 	&& REG_P (DECL_RTL (decl))
4240 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4241       warning (OPT_Wclobbered,
4242                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4243 	       decl);
4244 }
4245 
4246 /* Generate warning messages for variables live across setjmp.  */
4247 
4248 void
generate_setjmp_warnings(void)4249 generate_setjmp_warnings (void)
4250 {
4251   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4252 
4253   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4254       || bitmap_empty_p (setjmp_crosses))
4255     return;
4256 
4257   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4258   setjmp_args_warning (setjmp_crosses);
4259 }
4260 
4261 
4262 /* Reverse the order of elements in the fragment chain T of blocks,
4263    and return the new head of the chain (old last element).
4264    In addition to that clear BLOCK_SAME_RANGE flags when needed
4265    and adjust BLOCK_SUPERCONTEXT from the super fragment to
4266    its super fragment origin.  */
4267 
4268 static tree
block_fragments_nreverse(tree t)4269 block_fragments_nreverse (tree t)
4270 {
4271   tree prev = 0, block, next, prev_super = 0;
4272   tree super = BLOCK_SUPERCONTEXT (t);
4273   if (BLOCK_FRAGMENT_ORIGIN (super))
4274     super = BLOCK_FRAGMENT_ORIGIN (super);
4275   for (block = t; block; block = next)
4276     {
4277       next = BLOCK_FRAGMENT_CHAIN (block);
4278       BLOCK_FRAGMENT_CHAIN (block) = prev;
4279       if ((prev && !BLOCK_SAME_RANGE (prev))
4280 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4281 	      != prev_super))
4282 	BLOCK_SAME_RANGE (block) = 0;
4283       prev_super = BLOCK_SUPERCONTEXT (block);
4284       BLOCK_SUPERCONTEXT (block) = super;
4285       prev = block;
4286     }
4287   t = BLOCK_FRAGMENT_ORIGIN (t);
4288   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4289       != prev_super)
4290     BLOCK_SAME_RANGE (t) = 0;
4291   BLOCK_SUPERCONTEXT (t) = super;
4292   return prev;
4293 }
4294 
4295 /* Reverse the order of elements in the chain T of blocks,
4296    and return the new head of the chain (old last element).
4297    Also do the same on subblocks and reverse the order of elements
4298    in BLOCK_FRAGMENT_CHAIN as well.  */
4299 
4300 static tree
blocks_nreverse_all(tree t)4301 blocks_nreverse_all (tree t)
4302 {
4303   tree prev = 0, block, next;
4304   for (block = t; block; block = next)
4305     {
4306       next = BLOCK_CHAIN (block);
4307       BLOCK_CHAIN (block) = prev;
4308       if (BLOCK_FRAGMENT_CHAIN (block)
4309 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4310 	{
4311 	  BLOCK_FRAGMENT_CHAIN (block)
4312 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4313 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4314 	    BLOCK_SAME_RANGE (block) = 0;
4315 	}
4316       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4317       prev = block;
4318     }
4319   return prev;
4320 }
4321 
4322 
4323 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4324    and create duplicate blocks.  */
4325 /* ??? Need an option to either create block fragments or to create
4326    abstract origin duplicates of a source block.  It really depends
4327    on what optimization has been performed.  */
4328 
4329 void
reorder_blocks(void)4330 reorder_blocks (void)
4331 {
4332   tree block = DECL_INITIAL (current_function_decl);
4333 
4334   if (block == NULL_TREE)
4335     return;
4336 
4337   auto_vec<tree, 10> block_stack;
4338 
4339   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4340   clear_block_marks (block);
4341 
4342   /* Prune the old trees away, so that they don't get in the way.  */
4343   BLOCK_SUBBLOCKS (block) = NULL_TREE;
4344   BLOCK_CHAIN (block) = NULL_TREE;
4345 
4346   /* Recreate the block tree from the note nesting.  */
4347   reorder_blocks_1 (get_insns (), block, &block_stack);
4348   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4349 }
4350 
4351 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4352 
4353 void
clear_block_marks(tree block)4354 clear_block_marks (tree block)
4355 {
4356   while (block)
4357     {
4358       TREE_ASM_WRITTEN (block) = 0;
4359       clear_block_marks (BLOCK_SUBBLOCKS (block));
4360       block = BLOCK_CHAIN (block);
4361     }
4362 }
4363 
4364 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4365 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4366 		  vec<tree> *p_block_stack)
4367 {
4368   rtx_insn *insn;
4369   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4370 
4371   for (insn = insns; insn; insn = NEXT_INSN (insn))
4372     {
4373       if (NOTE_P (insn))
4374 	{
4375 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4376 	    {
4377 	      tree block = NOTE_BLOCK (insn);
4378 	      tree origin;
4379 
4380 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4381 	      origin = block;
4382 
4383 	      if (prev_end)
4384 		BLOCK_SAME_RANGE (prev_end) = 0;
4385 	      prev_end = NULL_TREE;
4386 
4387 	      /* If we have seen this block before, that means it now
4388 		 spans multiple address regions.  Create a new fragment.  */
4389 	      if (TREE_ASM_WRITTEN (block))
4390 		{
4391 		  tree new_block = copy_node (block);
4392 
4393 		  BLOCK_SAME_RANGE (new_block) = 0;
4394 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4395 		  BLOCK_FRAGMENT_CHAIN (new_block)
4396 		    = BLOCK_FRAGMENT_CHAIN (origin);
4397 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4398 
4399 		  NOTE_BLOCK (insn) = new_block;
4400 		  block = new_block;
4401 		}
4402 
4403 	      if (prev_beg == current_block && prev_beg)
4404 		BLOCK_SAME_RANGE (block) = 1;
4405 
4406 	      prev_beg = origin;
4407 
4408 	      BLOCK_SUBBLOCKS (block) = 0;
4409 	      TREE_ASM_WRITTEN (block) = 1;
4410 	      /* When there's only one block for the entire function,
4411 		 current_block == block and we mustn't do this, it
4412 		 will cause infinite recursion.  */
4413 	      if (block != current_block)
4414 		{
4415 		  tree super;
4416 		  if (block != origin)
4417 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4418 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4419 								      (origin))
4420 				   == current_block);
4421 		  if (p_block_stack->is_empty ())
4422 		    super = current_block;
4423 		  else
4424 		    {
4425 		      super = p_block_stack->last ();
4426 		      gcc_assert (super == current_block
4427 				  || BLOCK_FRAGMENT_ORIGIN (super)
4428 				     == current_block);
4429 		    }
4430 		  BLOCK_SUPERCONTEXT (block) = super;
4431 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4432 		  BLOCK_SUBBLOCKS (current_block) = block;
4433 		  current_block = origin;
4434 		}
4435 	      p_block_stack->safe_push (block);
4436 	    }
4437 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4438 	    {
4439 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4440 	      current_block = BLOCK_SUPERCONTEXT (current_block);
4441 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4442 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4443 	      prev_beg = NULL_TREE;
4444 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4445 			 ? NOTE_BLOCK (insn) : NULL_TREE;
4446 	    }
4447 	}
4448       else
4449 	{
4450 	  prev_beg = NULL_TREE;
4451 	  if (prev_end)
4452 	    BLOCK_SAME_RANGE (prev_end) = 0;
4453 	  prev_end = NULL_TREE;
4454 	}
4455     }
4456 }
4457 
4458 /* Reverse the order of elements in the chain T of blocks,
4459    and return the new head of the chain (old last element).  */
4460 
4461 tree
blocks_nreverse(tree t)4462 blocks_nreverse (tree t)
4463 {
4464   tree prev = 0, block, next;
4465   for (block = t; block; block = next)
4466     {
4467       next = BLOCK_CHAIN (block);
4468       BLOCK_CHAIN (block) = prev;
4469       prev = block;
4470     }
4471   return prev;
4472 }
4473 
4474 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4475    by modifying the last node in chain 1 to point to chain 2.  */
4476 
4477 tree
block_chainon(tree op1,tree op2)4478 block_chainon (tree op1, tree op2)
4479 {
4480   tree t1;
4481 
4482   if (!op1)
4483     return op2;
4484   if (!op2)
4485     return op1;
4486 
4487   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4488     continue;
4489   BLOCK_CHAIN (t1) = op2;
4490 
4491 #ifdef ENABLE_TREE_CHECKING
4492   {
4493     tree t2;
4494     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4495       gcc_assert (t2 != t1);
4496   }
4497 #endif
4498 
4499   return op1;
4500 }
4501 
4502 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4503    non-NULL, list them all into VECTOR, in a depth-first preorder
4504    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4505    blocks.  */
4506 
4507 static int
all_blocks(tree block,tree * vector)4508 all_blocks (tree block, tree *vector)
4509 {
4510   int n_blocks = 0;
4511 
4512   while (block)
4513     {
4514       TREE_ASM_WRITTEN (block) = 0;
4515 
4516       /* Record this block.  */
4517       if (vector)
4518 	vector[n_blocks] = block;
4519 
4520       ++n_blocks;
4521 
4522       /* Record the subblocks, and their subblocks...  */
4523       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4524 			      vector ? vector + n_blocks : 0);
4525       block = BLOCK_CHAIN (block);
4526     }
4527 
4528   return n_blocks;
4529 }
4530 
4531 /* Return a vector containing all the blocks rooted at BLOCK.  The
4532    number of elements in the vector is stored in N_BLOCKS_P.  The
4533    vector is dynamically allocated; it is the caller's responsibility
4534    to call `free' on the pointer returned.  */
4535 
4536 static tree *
get_block_vector(tree block,int * n_blocks_p)4537 get_block_vector (tree block, int *n_blocks_p)
4538 {
4539   tree *block_vector;
4540 
4541   *n_blocks_p = all_blocks (block, NULL);
4542   block_vector = XNEWVEC (tree, *n_blocks_p);
4543   all_blocks (block, block_vector);
4544 
4545   return block_vector;
4546 }
4547 
4548 static GTY(()) int next_block_index = 2;
4549 
4550 /* Set BLOCK_NUMBER for all the blocks in FN.  */
4551 
4552 void
number_blocks(tree fn)4553 number_blocks (tree fn)
4554 {
4555   int i;
4556   int n_blocks;
4557   tree *block_vector;
4558 
4559   /* For XCOFF debugging output, we start numbering the blocks
4560      from 1 within each function, rather than keeping a running
4561      count.  */
4562 #if defined (XCOFF_DEBUGGING_INFO)
4563   if (write_symbols == XCOFF_DEBUG)
4564     next_block_index = 1;
4565 #endif
4566 
4567   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4568 
4569   /* The top-level BLOCK isn't numbered at all.  */
4570   for (i = 1; i < n_blocks; ++i)
4571     /* We number the blocks from two.  */
4572     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4573 
4574   free (block_vector);
4575 
4576   return;
4577 }
4578 
4579 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
4580 
4581 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4582 debug_find_var_in_block_tree (tree var, tree block)
4583 {
4584   tree t;
4585 
4586   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4587     if (t == var)
4588       return block;
4589 
4590   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4591     {
4592       tree ret = debug_find_var_in_block_tree (var, t);
4593       if (ret)
4594 	return ret;
4595     }
4596 
4597   return NULL_TREE;
4598 }
4599 
4600 /* Keep track of whether we're in a dummy function context.  If we are,
4601    we don't want to invoke the set_current_function hook, because we'll
4602    get into trouble if the hook calls target_reinit () recursively or
4603    when the initial initialization is not yet complete.  */
4604 
4605 static bool in_dummy_function;
4606 
4607 /* Invoke the target hook when setting cfun.  Update the optimization options
4608    if the function uses different options than the default.  */
4609 
4610 static void
invoke_set_current_function_hook(tree fndecl)4611 invoke_set_current_function_hook (tree fndecl)
4612 {
4613   if (!in_dummy_function)
4614     {
4615       tree opts = ((fndecl)
4616 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4617 		   : optimization_default_node);
4618 
4619       if (!opts)
4620 	opts = optimization_default_node;
4621 
4622       /* Change optimization options if needed.  */
4623       if (optimization_current_node != opts)
4624 	{
4625 	  optimization_current_node = opts;
4626 	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4627 	}
4628 
4629       targetm.set_current_function (fndecl);
4630       this_fn_optabs = this_target_optabs;
4631 
4632       /* Initialize global alignment variables after op.  */
4633       parse_alignment_opts ();
4634 
4635       if (opts != optimization_default_node)
4636 	{
4637 	  init_tree_optimization_optabs (opts);
4638 	  if (TREE_OPTIMIZATION_OPTABS (opts))
4639 	    this_fn_optabs = (struct target_optabs *)
4640 	      TREE_OPTIMIZATION_OPTABS (opts);
4641 	}
4642     }
4643 }
4644 
4645 /* cfun should never be set directly; use this function.  */
4646 
4647 void
set_cfun(struct function * new_cfun,bool force)4648 set_cfun (struct function *new_cfun, bool force)
4649 {
4650   if (cfun != new_cfun || force)
4651     {
4652       cfun = new_cfun;
4653       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4654       redirect_edge_var_map_empty ();
4655     }
4656 }
4657 
4658 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
4659 
4660 static vec<function *> cfun_stack;
4661 
4662 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4663    current_function_decl accordingly.  */
4664 
4665 void
push_cfun(struct function * new_cfun)4666 push_cfun (struct function *new_cfun)
4667 {
4668   gcc_assert ((!cfun && !current_function_decl)
4669 	      || (cfun && current_function_decl == cfun->decl));
4670   cfun_stack.safe_push (cfun);
4671   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4672   set_cfun (new_cfun);
4673 }
4674 
4675 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4676 
4677 void
pop_cfun(void)4678 pop_cfun (void)
4679 {
4680   struct function *new_cfun = cfun_stack.pop ();
4681   /* When in_dummy_function, we do have a cfun but current_function_decl is
4682      NULL.  We also allow pushing NULL cfun and subsequently changing
4683      current_function_decl to something else and have both restored by
4684      pop_cfun.  */
4685   gcc_checking_assert (in_dummy_function
4686 		       || !cfun
4687 		       || current_function_decl == cfun->decl);
4688   set_cfun (new_cfun);
4689   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4690 }
4691 
4692 /* Return value of funcdef and increase it.  */
4693 int
get_next_funcdef_no(void)4694 get_next_funcdef_no (void)
4695 {
4696   return funcdef_no++;
4697 }
4698 
4699 /* Return value of funcdef.  */
4700 int
get_last_funcdef_no(void)4701 get_last_funcdef_no (void)
4702 {
4703   return funcdef_no;
4704 }
4705 
4706 /* Allocate a function structure for FNDECL and set its contents
4707    to the defaults.  Set cfun to the newly-allocated object.
4708    Some of the helper functions invoked during initialization assume
4709    that cfun has already been set.  Therefore, assign the new object
4710    directly into cfun and invoke the back end hook explicitly at the
4711    very end, rather than initializing a temporary and calling set_cfun
4712    on it.
4713 
4714    ABSTRACT_P is true if this is a function that will never be seen by
4715    the middle-end.  Such functions are front-end concepts (like C++
4716    function templates) that do not correspond directly to functions
4717    placed in object files.  */
4718 
4719 void
allocate_struct_function(tree fndecl,bool abstract_p)4720 allocate_struct_function (tree fndecl, bool abstract_p)
4721 {
4722   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4723 
4724   cfun = ggc_cleared_alloc<function> ();
4725 
4726   init_eh_for_function ();
4727 
4728   if (init_machine_status)
4729     cfun->machine = (*init_machine_status) ();
4730 
4731 #ifdef OVERRIDE_ABI_FORMAT
4732   OVERRIDE_ABI_FORMAT (fndecl);
4733 #endif
4734 
4735   if (fndecl != NULL_TREE)
4736     {
4737       DECL_STRUCT_FUNCTION (fndecl) = cfun;
4738       cfun->decl = fndecl;
4739       current_function_funcdef_no = get_next_funcdef_no ();
4740     }
4741 
4742   invoke_set_current_function_hook (fndecl);
4743 
4744   if (fndecl != NULL_TREE)
4745     {
4746       tree result = DECL_RESULT (fndecl);
4747 
4748       if (!abstract_p)
4749 	{
4750 	  /* Now that we have activated any function-specific attributes
4751 	     that might affect layout, particularly vector modes, relayout
4752 	     each of the parameters and the result.  */
4753 	  relayout_decl (result);
4754 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4755 	       parm = DECL_CHAIN (parm))
4756 	    relayout_decl (parm);
4757 
4758 	  /* Similarly relayout the function decl.  */
4759 	  targetm.target_option.relayout_function (fndecl);
4760 	}
4761 
4762       if (!abstract_p && aggregate_value_p (result, fndecl))
4763 	{
4764 #ifdef PCC_STATIC_STRUCT_RETURN
4765 	  cfun->returns_pcc_struct = 1;
4766 #endif
4767 	  cfun->returns_struct = 1;
4768 	}
4769 
4770       cfun->stdarg = stdarg_p (fntype);
4771 
4772       /* Assume all registers in stdarg functions need to be saved.  */
4773       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4774       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4775 
4776       /* ??? This could be set on a per-function basis by the front-end
4777          but is this worth the hassle?  */
4778       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4779       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4780 
4781       if (!profile_flag && !flag_instrument_function_entry_exit)
4782 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4783     }
4784 
4785   /* Don't enable begin stmt markers if var-tracking at assignments is
4786      disabled.  The markers make little sense without the variable
4787      binding annotations among them.  */
4788   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4789     && MAY_HAVE_DEBUG_MARKER_STMTS;
4790 }
4791 
4792 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4793    instead of just setting it.  */
4794 
4795 void
push_struct_function(tree fndecl)4796 push_struct_function (tree fndecl)
4797 {
4798   /* When in_dummy_function we might be in the middle of a pop_cfun and
4799      current_function_decl and cfun may not match.  */
4800   gcc_assert (in_dummy_function
4801 	      || (!cfun && !current_function_decl)
4802 	      || (cfun && current_function_decl == cfun->decl));
4803   cfun_stack.safe_push (cfun);
4804   current_function_decl = fndecl;
4805   allocate_struct_function (fndecl, false);
4806 }
4807 
4808 /* Reset crtl and other non-struct-function variables to defaults as
4809    appropriate for emitting rtl at the start of a function.  */
4810 
4811 static void
prepare_function_start(void)4812 prepare_function_start (void)
4813 {
4814   gcc_assert (!get_last_insn ());
4815   init_temp_slots ();
4816   init_emit ();
4817   init_varasm_status ();
4818   init_expr ();
4819   default_rtl_profile ();
4820 
4821   if (flag_stack_usage_info)
4822     {
4823       cfun->su = ggc_cleared_alloc<stack_usage> ();
4824       cfun->su->static_stack_size = -1;
4825     }
4826 
4827   cse_not_expected = ! optimize;
4828 
4829   /* Caller save not needed yet.  */
4830   caller_save_needed = 0;
4831 
4832   /* We haven't done register allocation yet.  */
4833   reg_renumber = 0;
4834 
4835   /* Indicate that we have not instantiated virtual registers yet.  */
4836   virtuals_instantiated = 0;
4837 
4838   /* Indicate that we want CONCATs now.  */
4839   generating_concat_p = 1;
4840 
4841   /* Indicate we have no need of a frame pointer yet.  */
4842   frame_pointer_needed = 0;
4843 }
4844 
4845 void
push_dummy_function(bool with_decl)4846 push_dummy_function (bool with_decl)
4847 {
4848   tree fn_decl, fn_type, fn_result_decl;
4849 
4850   gcc_assert (!in_dummy_function);
4851   in_dummy_function = true;
4852 
4853   if (with_decl)
4854     {
4855       fn_type = build_function_type_list (void_type_node, NULL_TREE);
4856       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4857 			    fn_type);
4858       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4859 					 NULL_TREE, void_type_node);
4860       DECL_RESULT (fn_decl) = fn_result_decl;
4861     }
4862   else
4863     fn_decl = NULL_TREE;
4864 
4865   push_struct_function (fn_decl);
4866 }
4867 
4868 /* Initialize the rtl expansion mechanism so that we can do simple things
4869    like generate sequences.  This is used to provide a context during global
4870    initialization of some passes.  You must call expand_dummy_function_end
4871    to exit this context.  */
4872 
4873 void
init_dummy_function_start(void)4874 init_dummy_function_start (void)
4875 {
4876   push_dummy_function (false);
4877   prepare_function_start ();
4878 }
4879 
4880 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4881    and initialize static variables for generating RTL for the statements
4882    of the function.  */
4883 
4884 void
init_function_start(tree subr)4885 init_function_start (tree subr)
4886 {
4887   /* Initialize backend, if needed.  */
4888   initialize_rtl ();
4889 
4890   prepare_function_start ();
4891   decide_function_section (subr);
4892 
4893   /* Warn if this value is an aggregate type,
4894      regardless of which calling convention we are using for it.  */
4895   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4896     warning (OPT_Waggregate_return, "function returns an aggregate");
4897 }
4898 
4899 /* Expand code to verify the stack_protect_guard.  This is invoked at
4900    the end of a function to be protected.  */
4901 
4902 void
stack_protect_epilogue(void)4903 stack_protect_epilogue (void)
4904 {
4905   tree guard_decl = crtl->stack_protect_guard_decl;
4906   rtx_code_label *label = gen_label_rtx ();
4907   rtx x, y;
4908   rtx_insn *seq = NULL;
4909 
4910   x = expand_normal (crtl->stack_protect_guard);
4911 
4912   if (targetm.have_stack_protect_combined_test () && guard_decl)
4913     {
4914       gcc_assert (DECL_P (guard_decl));
4915       y = DECL_RTL (guard_decl);
4916       /* Allow the target to compute address of Y and compare it with X without
4917 	 leaking Y into a register.  This combined address + compare pattern
4918 	 allows the target to prevent spilling of any intermediate results by
4919 	 splitting it after register allocator.  */
4920       seq = targetm.gen_stack_protect_combined_test (x, y, label);
4921     }
4922   else
4923     {
4924       if (guard_decl)
4925 	y = expand_normal (guard_decl);
4926       else
4927 	y = const0_rtx;
4928 
4929       /* Allow the target to compare Y with X without leaking either into
4930 	 a register.  */
4931       if (targetm.have_stack_protect_test ())
4932 	seq = targetm.gen_stack_protect_test (x, y, label);
4933     }
4934 
4935   if (seq)
4936     emit_insn (seq);
4937   else
4938     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4939 
4940   /* The noreturn predictor has been moved to the tree level.  The rtl-level
4941      predictors estimate this branch about 20%, which isn't enough to get
4942      things moved out of line.  Since this is the only extant case of adding
4943      a noreturn function at the rtl level, it doesn't seem worth doing ought
4944      except adding the prediction by hand.  */
4945   rtx_insn *tmp = get_last_insn ();
4946   if (JUMP_P (tmp))
4947     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4948 
4949   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4950   free_temp_slots ();
4951   emit_label (label);
4952 }
4953 
4954 /* Start the RTL for a new function, and set variables used for
4955    emitting RTL.
4956    SUBR is the FUNCTION_DECL node.
4957    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4958    the function's parameters, which must be run at any return statement.  */
4959 
4960 void
expand_function_start(tree subr)4961 expand_function_start (tree subr)
4962 {
4963   /* Make sure volatile mem refs aren't considered
4964      valid operands of arithmetic insns.  */
4965   init_recog_no_volatile ();
4966 
4967   crtl->profile
4968     = (profile_flag
4969        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4970 
4971   crtl->limit_stack
4972     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4973 
4974   /* Make the label for return statements to jump to.  Do not special
4975      case machines with special return instructions -- they will be
4976      handled later during jump, ifcvt, or epilogue creation.  */
4977   return_label = gen_label_rtx ();
4978 
4979   /* Initialize rtx used to return the value.  */
4980   /* Do this before assign_parms so that we copy the struct value address
4981      before any library calls that assign parms might generate.  */
4982 
4983   /* Decide whether to return the value in memory or in a register.  */
4984   tree res = DECL_RESULT (subr);
4985   if (aggregate_value_p (res, subr))
4986     {
4987       /* Returning something that won't go in a register.  */
4988       rtx value_address = 0;
4989 
4990 #ifdef PCC_STATIC_STRUCT_RETURN
4991       if (cfun->returns_pcc_struct)
4992 	{
4993 	  int size = int_size_in_bytes (TREE_TYPE (res));
4994 	  value_address = assemble_static_space (size);
4995 	}
4996       else
4997 #endif
4998 	{
4999 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5000 	  /* Expect to be passed the address of a place to store the value.
5001 	     If it is passed as an argument, assign_parms will take care of
5002 	     it.  */
5003 	  if (sv)
5004 	    {
5005 	      value_address = gen_reg_rtx (Pmode);
5006 	      emit_move_insn (value_address, sv);
5007 	    }
5008 	}
5009       if (value_address)
5010 	{
5011 	  rtx x = value_address;
5012 	  if (!DECL_BY_REFERENCE (res))
5013 	    {
5014 	      x = gen_rtx_MEM (DECL_MODE (res), x);
5015 	      set_mem_attributes (x, res, 1);
5016 	    }
5017 	  set_parm_rtl (res, x);
5018 	}
5019     }
5020   else if (DECL_MODE (res) == VOIDmode)
5021     /* If return mode is void, this decl rtl should not be used.  */
5022     set_parm_rtl (res, NULL_RTX);
5023   else
5024     {
5025       /* Compute the return values into a pseudo reg, which we will copy
5026 	 into the true return register after the cleanups are done.  */
5027       tree return_type = TREE_TYPE (res);
5028 
5029       /* If we may coalesce this result, make sure it has the expected mode
5030 	 in case it was promoted.  But we need not bother about BLKmode.  */
5031       machine_mode promoted_mode
5032 	= flag_tree_coalesce_vars && is_gimple_reg (res)
5033 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5034 	  : BLKmode;
5035 
5036       if (promoted_mode != BLKmode)
5037 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5038       else if (TYPE_MODE (return_type) != BLKmode
5039 	       && targetm.calls.return_in_msb (return_type))
5040 	/* expand_function_end will insert the appropriate padding in
5041 	   this case.  Use the return value's natural (unpadded) mode
5042 	   within the function proper.  */
5043 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5044       else
5045 	{
5046 	  /* In order to figure out what mode to use for the pseudo, we
5047 	     figure out what the mode of the eventual return register will
5048 	     actually be, and use that.  */
5049 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5050 
5051 	  /* Structures that are returned in registers are not
5052 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5053 	  if (REG_P (hard_reg))
5054 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5055 	  else
5056 	    {
5057 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5058 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
5059 	    }
5060 	}
5061 
5062       /* Set DECL_REGISTER flag so that expand_function_end will copy the
5063 	 result to the real return register(s).  */
5064       DECL_REGISTER (res) = 1;
5065     }
5066 
5067   /* Initialize rtx for parameters and local variables.
5068      In some cases this requires emitting insns.  */
5069   assign_parms (subr);
5070 
5071   /* If function gets a static chain arg, store it.  */
5072   if (cfun->static_chain_decl)
5073     {
5074       tree parm = cfun->static_chain_decl;
5075       rtx local, chain;
5076       rtx_insn *insn;
5077       int unsignedp;
5078 
5079       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5080       chain = targetm.calls.static_chain (current_function_decl, true);
5081 
5082       set_decl_incoming_rtl (parm, chain, false);
5083       set_parm_rtl (parm, local);
5084       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5085 
5086       if (GET_MODE (local) != GET_MODE (chain))
5087 	{
5088 	  convert_move (local, chain, unsignedp);
5089 	  insn = get_last_insn ();
5090 	}
5091       else
5092 	insn = emit_move_insn (local, chain);
5093 
5094       /* Mark the register as eliminable, similar to parameters.  */
5095       if (MEM_P (chain)
5096 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5097 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5098 
5099       /* If we aren't optimizing, save the static chain onto the stack.  */
5100       if (!optimize)
5101 	{
5102 	  tree saved_static_chain_decl
5103 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5104 			  DECL_NAME (parm), TREE_TYPE (parm));
5105 	  rtx saved_static_chain_rtx
5106 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5107 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5108 	  emit_move_insn (saved_static_chain_rtx, chain);
5109 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5110 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5111 	}
5112     }
5113 
5114   /* The following was moved from init_function_start.
5115      The move was supposed to make sdb output more accurate.  */
5116   /* Indicate the beginning of the function body,
5117      as opposed to parm setup.  */
5118   emit_note (NOTE_INSN_FUNCTION_BEG);
5119 
5120   gcc_assert (NOTE_P (get_last_insn ()));
5121 
5122   parm_birth_insn = get_last_insn ();
5123 
5124   /* If the function receives a non-local goto, then store the
5125      bits we need to restore the frame pointer.  */
5126   if (cfun->nonlocal_goto_save_area)
5127     {
5128       tree t_save;
5129       rtx r_save;
5130 
5131       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5132       gcc_assert (DECL_RTL_SET_P (var));
5133 
5134       t_save = build4 (ARRAY_REF,
5135 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5136 		       cfun->nonlocal_goto_save_area,
5137 		       integer_zero_node, NULL_TREE, NULL_TREE);
5138       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5139       gcc_assert (GET_MODE (r_save) == Pmode);
5140 
5141       emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5142       update_nonlocal_goto_save_area ();
5143     }
5144 
5145   if (crtl->profile)
5146     {
5147 #ifdef PROFILE_HOOK
5148       PROFILE_HOOK (current_function_funcdef_no);
5149 #endif
5150     }
5151 
5152   /* If we are doing generic stack checking, the probe should go here.  */
5153   if (flag_stack_check == GENERIC_STACK_CHECK)
5154     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5155 }
5156 
5157 void
pop_dummy_function(void)5158 pop_dummy_function (void)
5159 {
5160   pop_cfun ();
5161   in_dummy_function = false;
5162 }
5163 
5164 /* Undo the effects of init_dummy_function_start.  */
5165 void
expand_dummy_function_end(void)5166 expand_dummy_function_end (void)
5167 {
5168   gcc_assert (in_dummy_function);
5169 
5170   /* End any sequences that failed to be closed due to syntax errors.  */
5171   while (in_sequence_p ())
5172     end_sequence ();
5173 
5174   /* Outside function body, can't compute type's actual size
5175      until next function's body starts.  */
5176 
5177   free_after_parsing (cfun);
5178   free_after_compilation (cfun);
5179   pop_dummy_function ();
5180 }
5181 
5182 /* Helper for diddle_return_value.  */
5183 
5184 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5185 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5186 {
5187   if (! outgoing)
5188     return;
5189 
5190   if (REG_P (outgoing))
5191     (*doit) (outgoing, arg);
5192   else if (GET_CODE (outgoing) == PARALLEL)
5193     {
5194       int i;
5195 
5196       for (i = 0; i < XVECLEN (outgoing, 0); i++)
5197 	{
5198 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5199 
5200 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5201 	    (*doit) (x, arg);
5202 	}
5203     }
5204 }
5205 
5206 /* Call DOIT for each hard register used as a return value from
5207    the current function.  */
5208 
5209 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5210 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5211 {
5212   diddle_return_value_1 (doit, arg, crtl->return_rtx);
5213 }
5214 
5215 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5216 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5217 {
5218   emit_clobber (reg);
5219 }
5220 
5221 void
clobber_return_register(void)5222 clobber_return_register (void)
5223 {
5224   diddle_return_value (do_clobber_return_reg, NULL);
5225 
5226   /* In case we do use pseudo to return value, clobber it too.  */
5227   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5228     {
5229       tree decl_result = DECL_RESULT (current_function_decl);
5230       rtx decl_rtl = DECL_RTL (decl_result);
5231       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5232 	{
5233 	  do_clobber_return_reg (decl_rtl, NULL);
5234 	}
5235     }
5236 }
5237 
5238 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5239 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5240 {
5241   emit_use (reg);
5242 }
5243 
5244 static void
use_return_register(void)5245 use_return_register (void)
5246 {
5247   diddle_return_value (do_use_return_reg, NULL);
5248 }
5249 
5250 /* Set the location of the insn chain starting at INSN to LOC.  */
5251 
5252 static void
set_insn_locations(rtx_insn * insn,int loc)5253 set_insn_locations (rtx_insn *insn, int loc)
5254 {
5255   while (insn != NULL)
5256     {
5257       if (INSN_P (insn))
5258 	INSN_LOCATION (insn) = loc;
5259       insn = NEXT_INSN (insn);
5260     }
5261 }
5262 
5263 /* Generate RTL for the end of the current function.  */
5264 
5265 void
expand_function_end(void)5266 expand_function_end (void)
5267 {
5268   /* If arg_pointer_save_area was referenced only from a nested
5269      function, we will not have initialized it yet.  Do that now.  */
5270   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5271     get_arg_pointer_save_area ();
5272 
5273   /* If we are doing generic stack checking and this function makes calls,
5274      do a stack probe at the start of the function to ensure we have enough
5275      space for another stack frame.  */
5276   if (flag_stack_check == GENERIC_STACK_CHECK)
5277     {
5278       rtx_insn *insn, *seq;
5279 
5280       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5281 	if (CALL_P (insn))
5282 	  {
5283 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5284 	    start_sequence ();
5285 	    if (STACK_CHECK_MOVING_SP)
5286 	      anti_adjust_stack_and_probe (max_frame_size, true);
5287 	    else
5288 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5289 	    seq = get_insns ();
5290 	    end_sequence ();
5291 	    set_insn_locations (seq, prologue_location);
5292 	    emit_insn_before (seq, stack_check_probe_note);
5293 	    break;
5294 	  }
5295     }
5296 
5297   /* End any sequences that failed to be closed due to syntax errors.  */
5298   while (in_sequence_p ())
5299     end_sequence ();
5300 
5301   clear_pending_stack_adjust ();
5302   do_pending_stack_adjust ();
5303 
5304   /* Output a linenumber for the end of the function.
5305      SDB depended on this.  */
5306   set_curr_insn_location (input_location);
5307 
5308   /* Before the return label (if any), clobber the return
5309      registers so that they are not propagated live to the rest of
5310      the function.  This can only happen with functions that drop
5311      through; if there had been a return statement, there would
5312      have either been a return rtx, or a jump to the return label.
5313 
5314      We delay actual code generation after the current_function_value_rtx
5315      is computed.  */
5316   rtx_insn *clobber_after = get_last_insn ();
5317 
5318   /* Output the label for the actual return from the function.  */
5319   emit_label (return_label);
5320 
5321   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5322     {
5323       /* Let except.c know where it should emit the call to unregister
5324 	 the function context for sjlj exceptions.  */
5325       if (flag_exceptions)
5326 	sjlj_emit_function_exit_after (get_last_insn ());
5327     }
5328 
5329   /* If this is an implementation of throw, do what's necessary to
5330      communicate between __builtin_eh_return and the epilogue.  */
5331   expand_eh_return ();
5332 
5333   /* If stack protection is enabled for this function, check the guard.  */
5334   if (crtl->stack_protect_guard
5335       && targetm.stack_protect_runtime_enabled_p ()
5336       && naked_return_label == NULL_RTX)
5337     stack_protect_epilogue ();
5338 
5339   /* If scalar return value was computed in a pseudo-reg, or was a named
5340      return value that got dumped to the stack, copy that to the hard
5341      return register.  */
5342   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5343     {
5344       tree decl_result = DECL_RESULT (current_function_decl);
5345       rtx decl_rtl = DECL_RTL (decl_result);
5346 
5347       if (REG_P (decl_rtl)
5348 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5349 	  : DECL_REGISTER (decl_result))
5350 	{
5351 	  rtx real_decl_rtl = crtl->return_rtx;
5352 	  complex_mode cmode;
5353 
5354 	  /* This should be set in assign_parms.  */
5355 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5356 
5357 	  /* If this is a BLKmode structure being returned in registers,
5358 	     then use the mode computed in expand_return.  Note that if
5359 	     decl_rtl is memory, then its mode may have been changed,
5360 	     but that crtl->return_rtx has not.  */
5361 	  if (GET_MODE (real_decl_rtl) == BLKmode)
5362 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5363 
5364 	  /* If a non-BLKmode return value should be padded at the least
5365 	     significant end of the register, shift it left by the appropriate
5366 	     amount.  BLKmode results are handled using the group load/store
5367 	     machinery.  */
5368 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5369 	      && REG_P (real_decl_rtl)
5370 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5371 	    {
5372 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5373 					   REGNO (real_decl_rtl)),
5374 			      decl_rtl);
5375 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5376 	    }
5377 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5378 	    {
5379 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5380 		 move the result to the real return registers.  Otherwise, do
5381 		 a group load from decl_rtl for a named return.  */
5382 	      if (GET_CODE (decl_rtl) == PARALLEL)
5383 		emit_group_move (real_decl_rtl, decl_rtl);
5384 	      else
5385 		emit_group_load (real_decl_rtl, decl_rtl,
5386 				 TREE_TYPE (decl_result),
5387 				 int_size_in_bytes (TREE_TYPE (decl_result)));
5388 	    }
5389 	  /* In the case of complex integer modes smaller than a word, we'll
5390 	     need to generate some non-trivial bitfield insertions.  Do that
5391 	     on a pseudo and not the hard register.  */
5392 	  else if (GET_CODE (decl_rtl) == CONCAT
5393 		   && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5394 		   && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5395 	    {
5396 	      int old_generating_concat_p;
5397 	      rtx tmp;
5398 
5399 	      old_generating_concat_p = generating_concat_p;
5400 	      generating_concat_p = 0;
5401 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5402 	      generating_concat_p = old_generating_concat_p;
5403 
5404 	      emit_move_insn (tmp, decl_rtl);
5405 	      emit_move_insn (real_decl_rtl, tmp);
5406 	    }
5407 	  /* If a named return value dumped decl_return to memory, then
5408 	     we may need to re-do the PROMOTE_MODE signed/unsigned
5409 	     extension.  */
5410 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5411 	    {
5412 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5413 	      promote_function_mode (TREE_TYPE (decl_result),
5414 				     GET_MODE (decl_rtl), &unsignedp,
5415 				     TREE_TYPE (current_function_decl), 1);
5416 
5417 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5418 	    }
5419 	  else
5420 	    emit_move_insn (real_decl_rtl, decl_rtl);
5421 	}
5422     }
5423 
5424   /* If returning a structure, arrange to return the address of the value
5425      in a place where debuggers expect to find it.
5426 
5427      If returning a structure PCC style,
5428      the caller also depends on this value.
5429      And cfun->returns_pcc_struct is not necessarily set.  */
5430   if ((cfun->returns_struct || cfun->returns_pcc_struct)
5431       && !targetm.calls.omit_struct_return_reg)
5432     {
5433       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5434       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5435       rtx outgoing;
5436 
5437       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5438 	type = TREE_TYPE (type);
5439       else
5440 	value_address = XEXP (value_address, 0);
5441 
5442       outgoing = targetm.calls.function_value (build_pointer_type (type),
5443 					       current_function_decl, true);
5444 
5445       /* Mark this as a function return value so integrate will delete the
5446 	 assignment and USE below when inlining this function.  */
5447       REG_FUNCTION_VALUE_P (outgoing) = 1;
5448 
5449       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5450       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5451       value_address = convert_memory_address (mode, value_address);
5452 
5453       emit_move_insn (outgoing, value_address);
5454 
5455       /* Show return register used to hold result (in this case the address
5456 	 of the result.  */
5457       crtl->return_rtx = outgoing;
5458     }
5459 
5460   /* Emit the actual code to clobber return register.  Don't emit
5461      it if clobber_after is a barrier, then the previous basic block
5462      certainly doesn't fall thru into the exit block.  */
5463   if (!BARRIER_P (clobber_after))
5464     {
5465       start_sequence ();
5466       clobber_return_register ();
5467       rtx_insn *seq = get_insns ();
5468       end_sequence ();
5469 
5470       emit_insn_after (seq, clobber_after);
5471     }
5472 
5473   /* Output the label for the naked return from the function.  */
5474   if (naked_return_label)
5475     emit_label (naked_return_label);
5476 
5477   /* @@@ This is a kludge.  We want to ensure that instructions that
5478      may trap are not moved into the epilogue by scheduling, because
5479      we don't always emit unwind information for the epilogue.  */
5480   if (cfun->can_throw_non_call_exceptions
5481       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5482     emit_insn (gen_blockage ());
5483 
5484   /* If stack protection is enabled for this function, check the guard.  */
5485   if (crtl->stack_protect_guard
5486       && targetm.stack_protect_runtime_enabled_p ()
5487       && naked_return_label)
5488     stack_protect_epilogue ();
5489 
5490   /* If we had calls to alloca, and this machine needs
5491      an accurate stack pointer to exit the function,
5492      insert some code to save and restore the stack pointer.  */
5493   if (! EXIT_IGNORE_STACK
5494       && cfun->calls_alloca)
5495     {
5496       rtx tem = 0;
5497 
5498       start_sequence ();
5499       emit_stack_save (SAVE_FUNCTION, &tem);
5500       rtx_insn *seq = get_insns ();
5501       end_sequence ();
5502       emit_insn_before (seq, parm_birth_insn);
5503 
5504       emit_stack_restore (SAVE_FUNCTION, tem);
5505     }
5506 
5507   /* ??? This should no longer be necessary since stupid is no longer with
5508      us, but there are some parts of the compiler (eg reload_combine, and
5509      sh mach_dep_reorg) that still try and compute their own lifetime info
5510      instead of using the general framework.  */
5511   use_return_register ();
5512 }
5513 
5514 rtx
get_arg_pointer_save_area(void)5515 get_arg_pointer_save_area (void)
5516 {
5517   rtx ret = arg_pointer_save_area;
5518 
5519   if (! ret)
5520     {
5521       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5522       arg_pointer_save_area = ret;
5523     }
5524 
5525   if (! crtl->arg_pointer_save_area_init)
5526     {
5527       /* Save the arg pointer at the beginning of the function.  The
5528 	 generated stack slot may not be a valid memory address, so we
5529 	 have to check it and fix it if necessary.  */
5530       start_sequence ();
5531       emit_move_insn (validize_mem (copy_rtx (ret)),
5532                       crtl->args.internal_arg_pointer);
5533       rtx_insn *seq = get_insns ();
5534       end_sequence ();
5535 
5536       push_topmost_sequence ();
5537       emit_insn_after (seq, entry_of_function ());
5538       pop_topmost_sequence ();
5539 
5540       crtl->arg_pointer_save_area_init = true;
5541     }
5542 
5543   return ret;
5544 }
5545 
5546 
5547 /* If debugging dumps are requested, dump information about how the
5548    target handled -fstack-check=clash for the prologue.
5549 
5550    PROBES describes what if any probes were emitted.
5551 
5552    RESIDUALS indicates if the prologue had any residual allocation
5553    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
5554 
5555 void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)5556 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5557 {
5558   if (!dump_file)
5559     return;
5560 
5561   switch (probes)
5562     {
5563     case NO_PROBE_NO_FRAME:
5564       fprintf (dump_file,
5565 	       "Stack clash no probe no stack adjustment in prologue.\n");
5566       break;
5567     case NO_PROBE_SMALL_FRAME:
5568       fprintf (dump_file,
5569 	       "Stack clash no probe small stack adjustment in prologue.\n");
5570       break;
5571     case PROBE_INLINE:
5572       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5573       break;
5574     case PROBE_LOOP:
5575       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5576       break;
5577     }
5578 
5579   if (residuals)
5580     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5581   else
5582     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5583 
5584   if (frame_pointer_needed)
5585     fprintf (dump_file, "Stack clash frame pointer needed.\n");
5586   else
5587     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5588 
5589   if (TREE_THIS_VOLATILE (cfun->decl))
5590     fprintf (dump_file,
5591 	     "Stack clash noreturn prologue, assuming no implicit"
5592 	     " probes in caller.\n");
5593   else
5594     fprintf (dump_file,
5595 	     "Stack clash not noreturn prologue.\n");
5596 }
5597 
5598 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5599    for the first time.  */
5600 
5601 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5602 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5603 {
5604   rtx_insn *tmp;
5605   hash_table<insn_cache_hasher> *hash = *hashp;
5606 
5607   if (hash == NULL)
5608     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5609 
5610   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5611     {
5612       rtx *slot = hash->find_slot (tmp, INSERT);
5613       gcc_assert (*slot == NULL);
5614       *slot = tmp;
5615     }
5616 }
5617 
5618 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5619    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5620    insn, then record COPY as well.  */
5621 
5622 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5623 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5624 {
5625   hash_table<insn_cache_hasher> *hash;
5626   rtx *slot;
5627 
5628   hash = epilogue_insn_hash;
5629   if (!hash || !hash->find (insn))
5630     {
5631       hash = prologue_insn_hash;
5632       if (!hash || !hash->find (insn))
5633 	return;
5634     }
5635 
5636   slot = hash->find_slot (copy, INSERT);
5637   gcc_assert (*slot == NULL);
5638   *slot = copy;
5639 }
5640 
5641 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5642    we can be running after reorg, SEQUENCE rtl is possible.  */
5643 
5644 static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)5645 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5646 {
5647   if (hash == NULL)
5648     return false;
5649 
5650   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5651     {
5652       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5653       int i;
5654       for (i = seq->len () - 1; i >= 0; i--)
5655 	if (hash->find (seq->element (i)))
5656 	  return true;
5657       return false;
5658     }
5659 
5660   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5661 }
5662 
5663 int
prologue_contains(const rtx_insn * insn)5664 prologue_contains (const rtx_insn *insn)
5665 {
5666   return contains (insn, prologue_insn_hash);
5667 }
5668 
5669 int
epilogue_contains(const rtx_insn * insn)5670 epilogue_contains (const rtx_insn *insn)
5671 {
5672   return contains (insn, epilogue_insn_hash);
5673 }
5674 
5675 int
prologue_epilogue_contains(const rtx_insn * insn)5676 prologue_epilogue_contains (const rtx_insn *insn)
5677 {
5678   if (contains (insn, prologue_insn_hash))
5679     return 1;
5680   if (contains (insn, epilogue_insn_hash))
5681     return 1;
5682   return 0;
5683 }
5684 
5685 void
record_prologue_seq(rtx_insn * seq)5686 record_prologue_seq (rtx_insn *seq)
5687 {
5688   record_insns (seq, NULL, &prologue_insn_hash);
5689 }
5690 
5691 void
record_epilogue_seq(rtx_insn * seq)5692 record_epilogue_seq (rtx_insn *seq)
5693 {
5694   record_insns (seq, NULL, &epilogue_insn_hash);
5695 }
5696 
5697 /* Set JUMP_LABEL for a return insn.  */
5698 
5699 void
set_return_jump_label(rtx_insn * returnjump)5700 set_return_jump_label (rtx_insn *returnjump)
5701 {
5702   rtx pat = PATTERN (returnjump);
5703   if (GET_CODE (pat) == PARALLEL)
5704     pat = XVECEXP (pat, 0, 0);
5705   if (ANY_RETURN_P (pat))
5706     JUMP_LABEL (returnjump) = pat;
5707   else
5708     JUMP_LABEL (returnjump) = ret_rtx;
5709 }
5710 
5711 /* Return a sequence to be used as the split prologue for the current
5712    function, or NULL.  */
5713 
5714 static rtx_insn *
make_split_prologue_seq(void)5715 make_split_prologue_seq (void)
5716 {
5717   if (!flag_split_stack
5718       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5719     return NULL;
5720 
5721   start_sequence ();
5722   emit_insn (targetm.gen_split_stack_prologue ());
5723   rtx_insn *seq = get_insns ();
5724   end_sequence ();
5725 
5726   record_insns (seq, NULL, &prologue_insn_hash);
5727   set_insn_locations (seq, prologue_location);
5728 
5729   return seq;
5730 }
5731 
5732 /* Return a sequence to be used as the prologue for the current function,
5733    or NULL.  */
5734 
5735 static rtx_insn *
make_prologue_seq(void)5736 make_prologue_seq (void)
5737 {
5738   if (!targetm.have_prologue ())
5739     return NULL;
5740 
5741   start_sequence ();
5742   rtx_insn *seq = targetm.gen_prologue ();
5743   emit_insn (seq);
5744 
5745   /* Insert an explicit USE for the frame pointer
5746      if the profiling is on and the frame pointer is required.  */
5747   if (crtl->profile && frame_pointer_needed)
5748     emit_use (hard_frame_pointer_rtx);
5749 
5750   /* Retain a map of the prologue insns.  */
5751   record_insns (seq, NULL, &prologue_insn_hash);
5752   emit_note (NOTE_INSN_PROLOGUE_END);
5753 
5754   /* Ensure that instructions are not moved into the prologue when
5755      profiling is on.  The call to the profiling routine can be
5756      emitted within the live range of a call-clobbered register.  */
5757   if (!targetm.profile_before_prologue () && crtl->profile)
5758     emit_insn (gen_blockage ());
5759 
5760   seq = get_insns ();
5761   end_sequence ();
5762   set_insn_locations (seq, prologue_location);
5763 
5764   return seq;
5765 }
5766 
5767 /* Return a sequence to be used as the epilogue for the current function,
5768    or NULL.  */
5769 
5770 static rtx_insn *
make_epilogue_seq(void)5771 make_epilogue_seq (void)
5772 {
5773   if (!targetm.have_epilogue ())
5774     return NULL;
5775 
5776   start_sequence ();
5777   emit_note (NOTE_INSN_EPILOGUE_BEG);
5778   rtx_insn *seq = targetm.gen_epilogue ();
5779   if (seq)
5780     emit_jump_insn (seq);
5781 
5782   /* Retain a map of the epilogue insns.  */
5783   record_insns (seq, NULL, &epilogue_insn_hash);
5784   set_insn_locations (seq, epilogue_location);
5785 
5786   seq = get_insns ();
5787   rtx_insn *returnjump = get_last_insn ();
5788   end_sequence ();
5789 
5790   if (JUMP_P (returnjump))
5791     set_return_jump_label (returnjump);
5792 
5793   return seq;
5794 }
5795 
5796 
5797 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5798    this into place with notes indicating where the prologue ends and where
5799    the epilogue begins.  Update the basic block information when possible.
5800 
5801    Notes on epilogue placement:
5802    There are several kinds of edges to the exit block:
5803    * a single fallthru edge from LAST_BB
5804    * possibly, edges from blocks containing sibcalls
5805    * possibly, fake edges from infinite loops
5806 
5807    The epilogue is always emitted on the fallthru edge from the last basic
5808    block in the function, LAST_BB, into the exit block.
5809 
5810    If LAST_BB is empty except for a label, it is the target of every
5811    other basic block in the function that ends in a return.  If a
5812    target has a return or simple_return pattern (possibly with
5813    conditional variants), these basic blocks can be changed so that a
5814    return insn is emitted into them, and their target is adjusted to
5815    the real exit block.
5816 
5817    Notes on shrink wrapping: We implement a fairly conservative
5818    version of shrink-wrapping rather than the textbook one.  We only
5819    generate a single prologue and a single epilogue.  This is
5820    sufficient to catch a number of interesting cases involving early
5821    exits.
5822 
5823    First, we identify the blocks that require the prologue to occur before
5824    them.  These are the ones that modify a call-saved register, or reference
5825    any of the stack or frame pointer registers.  To simplify things, we then
5826    mark everything reachable from these blocks as also requiring a prologue.
5827    This takes care of loops automatically, and avoids the need to examine
5828    whether MEMs reference the frame, since it is sufficient to check for
5829    occurrences of the stack or frame pointer.
5830 
5831    We then compute the set of blocks for which the need for a prologue
5832    is anticipatable (borrowing terminology from the shrink-wrapping
5833    description in Muchnick's book).  These are the blocks which either
5834    require a prologue themselves, or those that have only successors
5835    where the prologue is anticipatable.  The prologue needs to be
5836    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5837    is not.  For the moment, we ensure that only one such edge exists.
5838 
5839    The epilogue is placed as described above, but we make a
5840    distinction between inserting return and simple_return patterns
5841    when modifying other blocks that end in a return.  Blocks that end
5842    in a sibcall omit the sibcall_epilogue if the block is not in
5843    ANTIC.  */
5844 
5845 void
thread_prologue_and_epilogue_insns(void)5846 thread_prologue_and_epilogue_insns (void)
5847 {
5848   df_analyze ();
5849 
5850   /* Can't deal with multiple successors of the entry block at the
5851      moment.  Function should always have at least one entry
5852      point.  */
5853   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5854 
5855   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5856   edge orig_entry_edge = entry_edge;
5857 
5858   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5859   rtx_insn *prologue_seq = make_prologue_seq ();
5860   rtx_insn *epilogue_seq = make_epilogue_seq ();
5861 
5862   /* Try to perform a kind of shrink-wrapping, making sure the
5863      prologue/epilogue is emitted only around those parts of the
5864      function that require it.  */
5865   try_shrink_wrapping (&entry_edge, prologue_seq);
5866 
5867   /* If the target can handle splitting the prologue/epilogue into separate
5868      components, try to shrink-wrap these components separately.  */
5869   try_shrink_wrapping_separate (entry_edge->dest);
5870 
5871   /* If that did anything for any component we now need the generate the
5872      "main" prologue again.  Because some targets require some of these
5873      to be called in a specific order (i386 requires the split prologue
5874      to be first, for example), we create all three sequences again here.
5875      If this does not work for some target, that target should not enable
5876      separate shrink-wrapping.  */
5877   if (crtl->shrink_wrapped_separate)
5878     {
5879       split_prologue_seq = make_split_prologue_seq ();
5880       prologue_seq = make_prologue_seq ();
5881       epilogue_seq = make_epilogue_seq ();
5882     }
5883 
5884   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5885 
5886   /* A small fib -- epilogue is not yet completed, but we wish to re-use
5887      this marker for the splits of EH_RETURN patterns, and nothing else
5888      uses the flag in the meantime.  */
5889   epilogue_completed = 1;
5890 
5891   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
5892      some targets, these get split to a special version of the epilogue
5893      code.  In order to be able to properly annotate these with unwind
5894      info, try to split them now.  If we get a valid split, drop an
5895      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
5896   edge e;
5897   edge_iterator ei;
5898   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5899     {
5900       rtx_insn *prev, *last, *trial;
5901 
5902       if (e->flags & EDGE_FALLTHRU)
5903 	continue;
5904       last = BB_END (e->src);
5905       if (!eh_returnjump_p (last))
5906 	continue;
5907 
5908       prev = PREV_INSN (last);
5909       trial = try_split (PATTERN (last), last, 1);
5910       if (trial == last)
5911 	continue;
5912 
5913       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5914       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5915     }
5916 
5917   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5918 
5919   if (exit_fallthru_edge)
5920     {
5921       if (epilogue_seq)
5922 	{
5923 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
5924 	  commit_edge_insertions ();
5925 
5926 	  /* The epilogue insns we inserted may cause the exit edge to no longer
5927 	     be fallthru.  */
5928 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5929 	    {
5930 	      if (((e->flags & EDGE_FALLTHRU) != 0)
5931 		  && returnjump_p (BB_END (e->src)))
5932 		e->flags &= ~EDGE_FALLTHRU;
5933 	    }
5934 	}
5935       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
5936 	{
5937 	  /* We have a fall-through edge to the exit block, the source is not
5938 	     at the end of the function, and there will be an assembler epilogue
5939 	     at the end of the function.
5940 	     We can't use force_nonfallthru here, because that would try to
5941 	     use return.  Inserting a jump 'by hand' is extremely messy, so
5942 	     we take advantage of cfg_layout_finalize using
5943 	     fixup_fallthru_exit_predecessor.  */
5944 	  cfg_layout_initialize (0);
5945 	  basic_block cur_bb;
5946 	  FOR_EACH_BB_FN (cur_bb, cfun)
5947 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
5948 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5949 	      cur_bb->aux = cur_bb->next_bb;
5950 	  cfg_layout_finalize ();
5951 	}
5952     }
5953 
5954   /* Insert the prologue.  */
5955 
5956   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5957 
5958   if (split_prologue_seq || prologue_seq)
5959     {
5960       rtx_insn *split_prologue_insn = split_prologue_seq;
5961       if (split_prologue_seq)
5962 	{
5963 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
5964 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
5965 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5966 	}
5967 
5968       rtx_insn *prologue_insn = prologue_seq;
5969       if (prologue_seq)
5970 	{
5971 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
5972 	    prologue_insn = NEXT_INSN (prologue_insn);
5973 	  insert_insn_on_edge (prologue_seq, entry_edge);
5974 	}
5975 
5976       commit_edge_insertions ();
5977 
5978       /* Look for basic blocks within the prologue insns.  */
5979       if (split_prologue_insn
5980 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
5981 	split_prologue_insn = NULL;
5982       if (prologue_insn
5983 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
5984 	prologue_insn = NULL;
5985       if (split_prologue_insn || prologue_insn)
5986 	{
5987 	  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
5988 	  bitmap_clear (blocks);
5989 	  if (split_prologue_insn)
5990 	    bitmap_set_bit (blocks,
5991 			    BLOCK_FOR_INSN (split_prologue_insn)->index);
5992 	  if (prologue_insn)
5993 	    bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
5994 	  find_many_sub_basic_blocks (blocks);
5995 	}
5996     }
5997 
5998   default_rtl_profile ();
5999 
6000   /* Emit sibling epilogues before any sibling call sites.  */
6001   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6002        (e = ei_safe_edge (ei));
6003        ei_next (&ei))
6004     {
6005       /* Skip those already handled, the ones that run without prologue.  */
6006       if (e->flags & EDGE_IGNORE)
6007 	{
6008 	  e->flags &= ~EDGE_IGNORE;
6009 	  continue;
6010 	}
6011 
6012       rtx_insn *insn = BB_END (e->src);
6013 
6014       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6015 	continue;
6016 
6017       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6018 	{
6019 	  start_sequence ();
6020 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6021 	  emit_insn (ep_seq);
6022 	  rtx_insn *seq = get_insns ();
6023 	  end_sequence ();
6024 
6025 	  /* Retain a map of the epilogue insns.  Used in life analysis to
6026 	     avoid getting rid of sibcall epilogue insns.  Do this before we
6027 	     actually emit the sequence.  */
6028 	  record_insns (seq, NULL, &epilogue_insn_hash);
6029 	  set_insn_locations (seq, epilogue_location);
6030 
6031 	  emit_insn_before (seq, insn);
6032 	}
6033     }
6034 
6035   if (epilogue_seq)
6036     {
6037       rtx_insn *insn, *next;
6038 
6039       /* Similarly, move any line notes that appear after the epilogue.
6040          There is no need, however, to be quite so anal about the existence
6041 	 of such a note.  Also possibly move
6042 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6043 	 info generation.  */
6044       for (insn = epilogue_seq; insn; insn = next)
6045 	{
6046 	  next = NEXT_INSN (insn);
6047 	  if (NOTE_P (insn)
6048 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6049 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6050 	}
6051     }
6052 
6053   /* Threading the prologue and epilogue changes the artificial refs
6054      in the entry and exit blocks.  */
6055   epilogue_completed = 1;
6056   df_update_entry_exit_and_calls ();
6057 }
6058 
6059 /* Reposition the prologue-end and epilogue-begin notes after
6060    instruction scheduling.  */
6061 
6062 void
reposition_prologue_and_epilogue_notes(void)6063 reposition_prologue_and_epilogue_notes (void)
6064 {
6065   if (!targetm.have_prologue ()
6066       && !targetm.have_epilogue ()
6067       && !targetm.have_sibcall_epilogue ())
6068     return;
6069 
6070   /* Since the hash table is created on demand, the fact that it is
6071      non-null is a signal that it is non-empty.  */
6072   if (prologue_insn_hash != NULL)
6073     {
6074       size_t len = prologue_insn_hash->elements ();
6075       rtx_insn *insn, *last = NULL, *note = NULL;
6076 
6077       /* Scan from the beginning until we reach the last prologue insn.  */
6078       /* ??? While we do have the CFG intact, there are two problems:
6079 	 (1) The prologue can contain loops (typically probing the stack),
6080 	     which means that the end of the prologue isn't in the first bb.
6081 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6082       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6083 	{
6084 	  if (NOTE_P (insn))
6085 	    {
6086 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6087 		note = insn;
6088 	    }
6089 	  else if (contains (insn, prologue_insn_hash))
6090 	    {
6091 	      last = insn;
6092 	      if (--len == 0)
6093 		break;
6094 	    }
6095 	}
6096 
6097       if (last)
6098 	{
6099 	  if (note == NULL)
6100 	    {
6101 	      /* Scan forward looking for the PROLOGUE_END note.  It should
6102 		 be right at the beginning of the block, possibly with other
6103 		 insn notes that got moved there.  */
6104 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6105 		{
6106 		  if (NOTE_P (note)
6107 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6108 		    break;
6109 		}
6110 	    }
6111 
6112 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6113 	  if (LABEL_P (last))
6114 	    last = NEXT_INSN (last);
6115 	  reorder_insns (note, note, last);
6116 	}
6117     }
6118 
6119   if (epilogue_insn_hash != NULL)
6120     {
6121       edge_iterator ei;
6122       edge e;
6123 
6124       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6125 	{
6126 	  rtx_insn *insn, *first = NULL, *note = NULL;
6127 	  basic_block bb = e->src;
6128 
6129 	  /* Scan from the beginning until we reach the first epilogue insn. */
6130 	  FOR_BB_INSNS (bb, insn)
6131 	    {
6132 	      if (NOTE_P (insn))
6133 		{
6134 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6135 		    {
6136 		      note = insn;
6137 		      if (first != NULL)
6138 			break;
6139 		    }
6140 		}
6141 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6142 		{
6143 		  first = insn;
6144 		  if (note != NULL)
6145 		    break;
6146 		}
6147 	    }
6148 
6149 	  if (note)
6150 	    {
6151 	      /* If the function has a single basic block, and no real
6152 		 epilogue insns (e.g. sibcall with no cleanup), the
6153 		 epilogue note can get scheduled before the prologue
6154 		 note.  If we have frame related prologue insns, having
6155 		 them scanned during the epilogue will result in a crash.
6156 		 In this case re-order the epilogue note to just before
6157 		 the last insn in the block.  */
6158 	      if (first == NULL)
6159 		first = BB_END (bb);
6160 
6161 	      if (PREV_INSN (first) != note)
6162 		reorder_insns (note, note, PREV_INSN (first));
6163 	    }
6164 	}
6165     }
6166 }
6167 
6168 /* Returns the name of function declared by FNDECL.  */
6169 const char *
fndecl_name(tree fndecl)6170 fndecl_name (tree fndecl)
6171 {
6172   if (fndecl == NULL)
6173     return "(nofn)";
6174   return lang_hooks.decl_printable_name (fndecl, 1);
6175 }
6176 
6177 /* Returns the name of function FN.  */
6178 const char *
function_name(struct function * fn)6179 function_name (struct function *fn)
6180 {
6181   tree fndecl = (fn == NULL) ? NULL : fn->decl;
6182   return fndecl_name (fndecl);
6183 }
6184 
6185 /* Returns the name of the current function.  */
6186 const char *
current_function_name(void)6187 current_function_name (void)
6188 {
6189   return function_name (cfun);
6190 }
6191 
6192 
6193 static unsigned int
rest_of_handle_check_leaf_regs(void)6194 rest_of_handle_check_leaf_regs (void)
6195 {
6196 #ifdef LEAF_REGISTERS
6197   crtl->uses_only_leaf_regs
6198     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6199 #endif
6200   return 0;
6201 }
6202 
6203 /* Insert a TYPE into the used types hash table of CFUN.  */
6204 
6205 static void
used_types_insert_helper(tree type,struct function * func)6206 used_types_insert_helper (tree type, struct function *func)
6207 {
6208   if (type != NULL && func != NULL)
6209     {
6210       if (func->used_types_hash == NULL)
6211 	func->used_types_hash = hash_set<tree>::create_ggc (37);
6212 
6213       func->used_types_hash->add (type);
6214     }
6215 }
6216 
6217 /* Given a type, insert it into the used hash table in cfun.  */
6218 void
used_types_insert(tree t)6219 used_types_insert (tree t)
6220 {
6221   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6222     if (TYPE_NAME (t))
6223       break;
6224     else
6225       t = TREE_TYPE (t);
6226   if (TREE_CODE (t) == ERROR_MARK)
6227     return;
6228   if (TYPE_NAME (t) == NULL_TREE
6229       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6230     t = TYPE_MAIN_VARIANT (t);
6231   if (debug_info_level > DINFO_LEVEL_NONE)
6232     {
6233       if (cfun)
6234 	used_types_insert_helper (t, cfun);
6235       else
6236 	{
6237 	  /* So this might be a type referenced by a global variable.
6238 	     Record that type so that we can later decide to emit its
6239 	     debug information.  */
6240 	  vec_safe_push (types_used_by_cur_var_decl, t);
6241 	}
6242     }
6243 }
6244 
6245 /* Helper to Hash a struct types_used_by_vars_entry.  */
6246 
6247 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6248 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6249 {
6250   gcc_assert (entry && entry->var_decl && entry->type);
6251 
6252   return iterative_hash_object (entry->type,
6253 				iterative_hash_object (entry->var_decl, 0));
6254 }
6255 
6256 /* Hash function of the types_used_by_vars_entry hash table.  */
6257 
6258 hashval_t
hash(types_used_by_vars_entry * entry)6259 used_type_hasher::hash (types_used_by_vars_entry *entry)
6260 {
6261   return hash_types_used_by_vars_entry (entry);
6262 }
6263 
6264 /*Equality function of the types_used_by_vars_entry hash table.  */
6265 
6266 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6267 used_type_hasher::equal (types_used_by_vars_entry *e1,
6268 			 types_used_by_vars_entry *e2)
6269 {
6270   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6271 }
6272 
6273 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6274 
6275 void
types_used_by_var_decl_insert(tree type,tree var_decl)6276 types_used_by_var_decl_insert (tree type, tree var_decl)
6277 {
6278   if (type != NULL && var_decl != NULL)
6279     {
6280       types_used_by_vars_entry **slot;
6281       struct types_used_by_vars_entry e;
6282       e.var_decl = var_decl;
6283       e.type = type;
6284       if (types_used_by_vars_hash == NULL)
6285 	types_used_by_vars_hash
6286 	  = hash_table<used_type_hasher>::create_ggc (37);
6287 
6288       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6289       if (*slot == NULL)
6290 	{
6291 	  struct types_used_by_vars_entry *entry;
6292 	  entry = ggc_alloc<types_used_by_vars_entry> ();
6293 	  entry->type = type;
6294 	  entry->var_decl = var_decl;
6295 	  *slot = entry;
6296 	}
6297     }
6298 }
6299 
6300 namespace {
6301 
6302 const pass_data pass_data_leaf_regs =
6303 {
6304   RTL_PASS, /* type */
6305   "*leaf_regs", /* name */
6306   OPTGROUP_NONE, /* optinfo_flags */
6307   TV_NONE, /* tv_id */
6308   0, /* properties_required */
6309   0, /* properties_provided */
6310   0, /* properties_destroyed */
6311   0, /* todo_flags_start */
6312   0, /* todo_flags_finish */
6313 };
6314 
6315 class pass_leaf_regs : public rtl_opt_pass
6316 {
6317 public:
pass_leaf_regs(gcc::context * ctxt)6318   pass_leaf_regs (gcc::context *ctxt)
6319     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6320   {}
6321 
6322   /* opt_pass methods: */
execute(function *)6323   virtual unsigned int execute (function *)
6324     {
6325       return rest_of_handle_check_leaf_regs ();
6326     }
6327 
6328 }; // class pass_leaf_regs
6329 
6330 } // anon namespace
6331 
6332 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6333 make_pass_leaf_regs (gcc::context *ctxt)
6334 {
6335   return new pass_leaf_regs (ctxt);
6336 }
6337 
6338 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6339 rest_of_handle_thread_prologue_and_epilogue (void)
6340 {
6341   /* prepare_shrink_wrap is sensitive to the block structure of the control
6342      flow graph, so clean it up first.  */
6343   if (optimize)
6344     cleanup_cfg (0);
6345 
6346   /* On some machines, the prologue and epilogue code, or parts thereof,
6347      can be represented as RTL.  Doing so lets us schedule insns between
6348      it and the rest of the code and also allows delayed branch
6349      scheduling to operate in the epilogue.  */
6350   thread_prologue_and_epilogue_insns ();
6351 
6352   /* Some non-cold blocks may now be only reachable from cold blocks.
6353      Fix that up.  */
6354   fixup_partitions ();
6355 
6356   /* Shrink-wrapping can result in unreachable edges in the epilogue,
6357      see PR57320.  */
6358   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6359 
6360   /* The stack usage info is finalized during prologue expansion.  */
6361   if (flag_stack_usage_info)
6362     output_stack_usage ();
6363 
6364   return 0;
6365 }
6366 
6367 namespace {
6368 
6369 const pass_data pass_data_thread_prologue_and_epilogue =
6370 {
6371   RTL_PASS, /* type */
6372   "pro_and_epilogue", /* name */
6373   OPTGROUP_NONE, /* optinfo_flags */
6374   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6375   0, /* properties_required */
6376   0, /* properties_provided */
6377   0, /* properties_destroyed */
6378   0, /* todo_flags_start */
6379   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6380 };
6381 
6382 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6383 {
6384 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6385   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6386     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6387   {}
6388 
6389   /* opt_pass methods: */
execute(function *)6390   virtual unsigned int execute (function *)
6391     {
6392       return rest_of_handle_thread_prologue_and_epilogue ();
6393     }
6394 
6395 }; // class pass_thread_prologue_and_epilogue
6396 
6397 } // anon namespace
6398 
6399 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6400 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6401 {
6402   return new pass_thread_prologue_and_epilogue (ctxt);
6403 }
6404 
6405 
6406 /* If CONSTRAINT is a matching constraint, then return its number.
6407    Otherwise, return -1.  */
6408 
6409 static int
matching_constraint_num(const char * constraint)6410 matching_constraint_num (const char *constraint)
6411 {
6412   if (*constraint == '%')
6413     constraint++;
6414 
6415   if (IN_RANGE (*constraint, '0', '9'))
6416     return strtoul (constraint, NULL, 10);
6417 
6418   return -1;
6419 }
6420 
6421 /* This mini-pass fixes fall-out from SSA in asm statements that have
6422    in-out constraints.  Say you start with
6423 
6424      orig = inout;
6425      asm ("": "+mr" (inout));
6426      use (orig);
6427 
6428    which is transformed very early to use explicit output and match operands:
6429 
6430      orig = inout;
6431      asm ("": "=mr" (inout) : "0" (inout));
6432      use (orig);
6433 
6434    Or, after SSA and copyprop,
6435 
6436      asm ("": "=mr" (inout_2) : "0" (inout_1));
6437      use (inout_1);
6438 
6439    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6440    they represent two separate values, so they will get different pseudo
6441    registers during expansion.  Then, since the two operands need to match
6442    per the constraints, but use different pseudo registers, reload can
6443    only register a reload for these operands.  But reloads can only be
6444    satisfied by hardregs, not by memory, so we need a register for this
6445    reload, just because we are presented with non-matching operands.
6446    So, even though we allow memory for this operand, no memory can be
6447    used for it, just because the two operands don't match.  This can
6448    cause reload failures on register-starved targets.
6449 
6450    So it's a symptom of reload not being able to use memory for reloads
6451    or, alternatively it's also a symptom of both operands not coming into
6452    reload as matching (in which case the pseudo could go to memory just
6453    fine, as the alternative allows it, and no reload would be necessary).
6454    We fix the latter problem here, by transforming
6455 
6456      asm ("": "=mr" (inout_2) : "0" (inout_1));
6457 
6458    back to
6459 
6460      inout_2 = inout_1;
6461      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6462 
6463 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6464 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6465 {
6466   int i;
6467   bool changed = false;
6468   rtx op = SET_SRC (p_sets[0]);
6469   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6470   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6471   bool *output_matched = XALLOCAVEC (bool, noutputs);
6472 
6473   memset (output_matched, 0, noutputs * sizeof (bool));
6474   for (i = 0; i < ninputs; i++)
6475     {
6476       rtx input, output;
6477       rtx_insn *insns;
6478       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6479       int match, j;
6480 
6481       match = matching_constraint_num (constraint);
6482       if (match < 0)
6483 	continue;
6484 
6485       gcc_assert (match < noutputs);
6486       output = SET_DEST (p_sets[match]);
6487       input = RTVEC_ELT (inputs, i);
6488       /* Only do the transformation for pseudos.  */
6489       if (! REG_P (output)
6490 	  || rtx_equal_p (output, input)
6491 	  || !(REG_P (input) || SUBREG_P (input)
6492 	       || MEM_P (input) || CONSTANT_P (input))
6493 	  || !general_operand (input, GET_MODE (output)))
6494 	continue;
6495 
6496       /* We can't do anything if the output is also used as input,
6497 	 as we're going to overwrite it.  */
6498       for (j = 0; j < ninputs; j++)
6499 	if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6500 	  break;
6501       if (j != ninputs)
6502 	continue;
6503 
6504       /* Avoid changing the same input several times.  For
6505 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6506 	 only change it once (to out1), rather than changing it
6507 	 first to out1 and afterwards to out2.  */
6508       if (i > 0)
6509 	{
6510 	  for (j = 0; j < noutputs; j++)
6511 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6512 	      break;
6513 	  if (j != noutputs)
6514 	    continue;
6515 	}
6516       output_matched[match] = true;
6517 
6518       start_sequence ();
6519       emit_move_insn (output, copy_rtx (input));
6520       insns = get_insns ();
6521       end_sequence ();
6522       emit_insn_before (insns, insn);
6523 
6524       constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6525       bool early_clobber_p = strchr (constraint, '&') != NULL;
6526 
6527       /* Now replace all mentions of the input with output.  We can't
6528 	 just replace the occurrence in inputs[i], as the register might
6529 	 also be used in some other input (or even in an address of an
6530 	 output), which would mean possibly increasing the number of
6531 	 inputs by one (namely 'output' in addition), which might pose
6532 	 a too complicated problem for reload to solve.  E.g. this situation:
6533 
6534 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6535 
6536 	 Here 'input' is used in two occurrences as input (once for the
6537 	 input operand, once for the address in the second output operand).
6538 	 If we would replace only the occurrence of the input operand (to
6539 	 make the matching) we would be left with this:
6540 
6541 	   output = input
6542 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6543 
6544 	 Now we suddenly have two different input values (containing the same
6545 	 value, but different pseudos) where we formerly had only one.
6546 	 With more complicated asms this might lead to reload failures
6547 	 which wouldn't have happen without this pass.  So, iterate over
6548 	 all operands and replace all occurrences of the register used.
6549 
6550 	 However, if one or more of the 'input' uses have a non-matching
6551 	 constraint and the matched output operand is an early clobber
6552 	 operand, then do not replace the input operand, since by definition
6553 	 it conflicts with the output operand and cannot share the same
6554 	 register.  See PR89313 for details.  */
6555 
6556       for (j = 0; j < noutputs; j++)
6557 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6558 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6559 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6560 					      input, output);
6561       for (j = 0; j < ninputs; j++)
6562 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6563 	  {
6564 	    if (!early_clobber_p
6565 		|| match == matching_constraint_num
6566 			      (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6567 	      RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6568 						   input, output);
6569 	  }
6570 
6571       changed = true;
6572     }
6573 
6574   if (changed)
6575     df_insn_rescan (insn);
6576 }
6577 
6578 /* Add the decl D to the local_decls list of FUN.  */
6579 
6580 void
add_local_decl(struct function * fun,tree d)6581 add_local_decl (struct function *fun, tree d)
6582 {
6583   gcc_assert (VAR_P (d));
6584   vec_safe_push (fun->local_decls, d);
6585 }
6586 
6587 namespace {
6588 
6589 const pass_data pass_data_match_asm_constraints =
6590 {
6591   RTL_PASS, /* type */
6592   "asmcons", /* name */
6593   OPTGROUP_NONE, /* optinfo_flags */
6594   TV_NONE, /* tv_id */
6595   0, /* properties_required */
6596   0, /* properties_provided */
6597   0, /* properties_destroyed */
6598   0, /* todo_flags_start */
6599   0, /* todo_flags_finish */
6600 };
6601 
6602 class pass_match_asm_constraints : public rtl_opt_pass
6603 {
6604 public:
pass_match_asm_constraints(gcc::context * ctxt)6605   pass_match_asm_constraints (gcc::context *ctxt)
6606     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6607   {}
6608 
6609   /* opt_pass methods: */
6610   virtual unsigned int execute (function *);
6611 
6612 }; // class pass_match_asm_constraints
6613 
6614 unsigned
execute(function * fun)6615 pass_match_asm_constraints::execute (function *fun)
6616 {
6617   basic_block bb;
6618   rtx_insn *insn;
6619   rtx pat, *p_sets;
6620   int noutputs;
6621 
6622   if (!crtl->has_asm_statement)
6623     return 0;
6624 
6625   df_set_flags (DF_DEFER_INSN_RESCAN);
6626   FOR_EACH_BB_FN (bb, fun)
6627     {
6628       FOR_BB_INSNS (bb, insn)
6629 	{
6630 	  if (!INSN_P (insn))
6631 	    continue;
6632 
6633 	  pat = PATTERN (insn);
6634 	  if (GET_CODE (pat) == PARALLEL)
6635 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6636 	  else if (GET_CODE (pat) == SET)
6637 	    p_sets = &PATTERN (insn), noutputs = 1;
6638 	  else
6639 	    continue;
6640 
6641 	  if (GET_CODE (*p_sets) == SET
6642 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6643 	    match_asm_constraints_1 (insn, p_sets, noutputs);
6644 	 }
6645     }
6646 
6647   return TODO_df_finish;
6648 }
6649 
6650 } // anon namespace
6651 
6652 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6653 make_pass_match_asm_constraints (gcc::context *ctxt)
6654 {
6655   return new pass_match_asm_constraints (ctxt);
6656 }
6657 
6658 
6659 #include "gt-function.h"
6660