1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file handles the generation of rtl code from tree structure
21    at the level of the function as a whole.
22    It creates the rtl expressions for parameters and auto variables
23    and has full responsibility for allocating stack slots.
24 
25    `expand_function_start' is called at the beginning of a function,
26    before the function body is parsed, and `expand_function_end' is
27    called after parsing the body.
28 
29    Call `assign_stack_local' to allocate a stack slot for a local variable.
30    This is usually done during the RTL generation for the function body,
31    but it can also be done in the reload pass when a pseudo-register does
32    not get a hard register.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-chkp.h"
77 #include "rtl-chkp.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 #include "stringpool.h"
81 #include "attribs.h"
82 #include "gimple.h"
83 #include "options.h"
84 
85 /* So we can assign to cfun in this file.  */
86 #undef cfun
87 
88 #ifndef STACK_ALIGNMENT_NEEDED
89 #define STACK_ALIGNMENT_NEEDED 1
90 #endif
91 
92 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
93 
94 /* Round a value to the lowest integer less than it that is a multiple of
95    the required alignment.  Avoid using division in case the value is
96    negative.  Assume the alignment is a power of two.  */
97 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
98 
99 /* Similar, but round to the next highest integer that meets the
100    alignment.  */
101 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
102 
103 /* Nonzero once virtual register instantiation has been done.
104    assign_stack_local uses frame_pointer_rtx when this is nonzero.
105    calls.c:emit_library_call_value_1 uses it to set up
106    post-instantiation libcalls.  */
107 int virtuals_instantiated;
108 
109 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
110 static GTY(()) int funcdef_no;
111 
112 /* These variables hold pointers to functions to create and destroy
113    target specific, per-function data structures.  */
114 struct machine_function * (*init_machine_status) (void);
115 
116 /* The currently compiled function.  */
117 struct function *cfun = 0;
118 
119 /* These hashes record the prologue and epilogue insns.  */
120 
121 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
122 {
hashinsn_cache_hasher123   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher124   static bool equal (rtx a, rtx b) { return a == b; }
125 };
126 
127 static GTY((cache))
128   hash_table<insn_cache_hasher> *prologue_insn_hash;
129 static GTY((cache))
130   hash_table<insn_cache_hasher> *epilogue_insn_hash;
131 
132 
133 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
134 vec<tree, va_gc> *types_used_by_cur_var_decl;
135 
136 /* Forward declarations.  */
137 
138 static struct temp_slot *find_temp_slot_from_address (rtx);
139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140 static void pad_below (struct args_size *, machine_mode, tree);
141 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
142 static int all_blocks (tree, tree *);
143 static tree *get_block_vector (tree, int *);
144 extern tree debug_find_var_in_block_tree (tree, tree);
145 /* We always define `record_insns' even if it's not used so that we
146    can always export `prologue_epilogue_contains'.  */
147 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
148      ATTRIBUTE_UNUSED;
149 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
150 static void prepare_function_start (void);
151 static void do_clobber_return_reg (rtx, void *);
152 static void do_use_return_reg (rtx, void *);
153 
154 
155 /* Stack of nested functions.  */
156 /* Keep track of the cfun stack.  */
157 
158 static vec<function *> function_context_stack;
159 
160 /* Save the current context for compilation of a nested function.
161    This is called from language-specific code.  */
162 
163 void
push_function_context(void)164 push_function_context (void)
165 {
166   if (cfun == 0)
167     allocate_struct_function (NULL, false);
168 
169   function_context_stack.safe_push (cfun);
170   set_cfun (NULL);
171 }
172 
173 /* Restore the last saved context, at the end of a nested function.
174    This function is called from language-specific code.  */
175 
176 void
pop_function_context(void)177 pop_function_context (void)
178 {
179   struct function *p = function_context_stack.pop ();
180   set_cfun (p);
181   current_function_decl = p->decl;
182 
183   /* Reset variables that have known state during rtx generation.  */
184   virtuals_instantiated = 0;
185   generating_concat_p = 1;
186 }
187 
188 /* Clear out all parts of the state in F that can safely be discarded
189    after the function has been parsed, but not compiled, to let
190    garbage collection reclaim the memory.  */
191 
192 void
free_after_parsing(struct function * f)193 free_after_parsing (struct function *f)
194 {
195   f->language = 0;
196 }
197 
198 /* Clear out all parts of the state in F that can safely be discarded
199    after the function has been compiled, to let garbage collection
200    reclaim the memory.  */
201 
202 void
free_after_compilation(struct function * f)203 free_after_compilation (struct function *f)
204 {
205   prologue_insn_hash = NULL;
206   epilogue_insn_hash = NULL;
207 
208   free (crtl->emit.regno_pointer_align);
209 
210   memset (crtl, 0, sizeof (struct rtl_data));
211   f->eh = NULL;
212   f->machine = NULL;
213   f->cfg = NULL;
214   f->curr_properties &= ~PROP_cfg;
215 
216   regno_reg_rtx = NULL;
217 }
218 
219 /* Return size needed for stack frame based on slots so far allocated.
220    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
221    the caller may have to do that.  */
222 
223 poly_int64
get_frame_size(void)224 get_frame_size (void)
225 {
226   if (FRAME_GROWS_DOWNWARD)
227     return -frame_offset;
228   else
229     return frame_offset;
230 }
231 
232 /* Issue an error message and return TRUE if frame OFFSET overflows in
233    the signed target pointer arithmetics for function FUNC.  Otherwise
234    return FALSE.  */
235 
236 bool
frame_offset_overflow(poly_int64 offset,tree func)237 frame_offset_overflow (poly_int64 offset, tree func)
238 {
239   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
240   unsigned HOST_WIDE_INT limit
241     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
242        /* Leave room for the fixed part of the frame.  */
243        - 64 * UNITS_PER_WORD);
244 
245   if (!coeffs_in_range_p (size, 0U, limit))
246     {
247       error_at (DECL_SOURCE_LOCATION (func),
248 		"total size of local objects too large");
249       return true;
250     }
251 
252   return false;
253 }
254 
255 /* Return the minimum spill slot alignment for a register of mode MODE.  */
256 
257 unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)258 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
259 {
260   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
261 }
262 
263 /* Return stack slot alignment in bits for TYPE and MODE.  */
264 
265 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)266 get_stack_local_alignment (tree type, machine_mode mode)
267 {
268   unsigned int alignment;
269 
270   if (mode == BLKmode)
271     alignment = BIGGEST_ALIGNMENT;
272   else
273     alignment = GET_MODE_ALIGNMENT (mode);
274 
275   /* Allow the frond-end to (possibly) increase the alignment of this
276      stack slot.  */
277   if (! type)
278     type = lang_hooks.types.type_for_mode (mode, 0);
279 
280   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
281 }
282 
283 /* Determine whether it is possible to fit a stack slot of size SIZE and
284    alignment ALIGNMENT into an area in the stack frame that starts at
285    frame offset START and has a length of LENGTH.  If so, store the frame
286    offset to be used for the stack slot in *POFFSET and return true;
287    return false otherwise.  This function will extend the frame size when
288    given a start/length pair that lies at the end of the frame.  */
289 
290 static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)291 try_fit_stack_local (poly_int64 start, poly_int64 length,
292 		     poly_int64 size, unsigned int alignment,
293 		     poly_int64_pod *poffset)
294 {
295   poly_int64 this_frame_offset;
296   int frame_off, frame_alignment, frame_phase;
297 
298   /* Calculate how many bytes the start of local variables is off from
299      stack alignment.  */
300   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
301   frame_off = targetm.starting_frame_offset () % frame_alignment;
302   frame_phase = frame_off ? frame_alignment - frame_off : 0;
303 
304   /* Round the frame offset to the specified alignment.  */
305 
306   if (FRAME_GROWS_DOWNWARD)
307     this_frame_offset
308       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
309 	 + frame_phase);
310   else
311     this_frame_offset
312       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
313 
314   /* See if it fits.  If this space is at the edge of the frame,
315      consider extending the frame to make it fit.  Our caller relies on
316      this when allocating a new slot.  */
317   if (maybe_lt (this_frame_offset, start))
318     {
319       if (known_eq (frame_offset, start))
320 	frame_offset = this_frame_offset;
321       else
322 	return false;
323     }
324   else if (maybe_gt (this_frame_offset + size, start + length))
325     {
326       if (known_eq (frame_offset, start + length))
327 	frame_offset = this_frame_offset + size;
328       else
329 	return false;
330     }
331 
332   *poffset = this_frame_offset;
333   return true;
334 }
335 
336 /* Create a new frame_space structure describing free space in the stack
337    frame beginning at START and ending at END, and chain it into the
338    function's frame_space_list.  */
339 
340 static void
add_frame_space(poly_int64 start,poly_int64 end)341 add_frame_space (poly_int64 start, poly_int64 end)
342 {
343   struct frame_space *space = ggc_alloc<frame_space> ();
344   space->next = crtl->frame_space_list;
345   crtl->frame_space_list = space;
346   space->start = start;
347   space->length = end - start;
348 }
349 
350 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
351    with machine mode MODE.
352 
353    ALIGN controls the amount of alignment for the address of the slot:
354    0 means according to MODE,
355    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
356    -2 means use BITS_PER_UNIT,
357    positive specifies alignment boundary in bits.
358 
359    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
360    alignment and ASLK_RECORD_PAD bit set if we should remember
361    extra space we allocated for alignment purposes.  When we are
362    called from assign_stack_temp_for_type, it is not set so we don't
363    track the same stack slot in two independent lists.
364 
365    We do not round to stack_boundary here.  */
366 
367 rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)368 assign_stack_local_1 (machine_mode mode, poly_int64 size,
369 		      int align, int kind)
370 {
371   rtx x, addr;
372   poly_int64 bigend_correction = 0;
373   poly_int64 slot_offset = 0, old_frame_offset;
374   unsigned int alignment, alignment_in_bits;
375 
376   if (align == 0)
377     {
378       alignment = get_stack_local_alignment (NULL, mode);
379       alignment /= BITS_PER_UNIT;
380     }
381   else if (align == -1)
382     {
383       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
384       size = aligned_upper_bound (size, alignment);
385     }
386   else if (align == -2)
387     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
388   else
389     alignment = align / BITS_PER_UNIT;
390 
391   alignment_in_bits = alignment * BITS_PER_UNIT;
392 
393   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
394   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
395     {
396       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
397       alignment = alignment_in_bits / BITS_PER_UNIT;
398     }
399 
400   if (SUPPORTS_STACK_ALIGNMENT)
401     {
402       if (crtl->stack_alignment_estimated < alignment_in_bits)
403 	{
404           if (!crtl->stack_realign_processed)
405 	    crtl->stack_alignment_estimated = alignment_in_bits;
406           else
407 	    {
408 	      /* If stack is realigned and stack alignment value
409 		 hasn't been finalized, it is OK not to increase
410 		 stack_alignment_estimated.  The bigger alignment
411 		 requirement is recorded in stack_alignment_needed
412 		 below.  */
413 	      gcc_assert (!crtl->stack_realign_finalized);
414 	      if (!crtl->stack_realign_needed)
415 		{
416 		  /* It is OK to reduce the alignment as long as the
417 		     requested size is 0 or the estimated stack
418 		     alignment >= mode alignment.  */
419 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
420 			      || known_eq (size, 0)
421 			      || (crtl->stack_alignment_estimated
422 				  >= GET_MODE_ALIGNMENT (mode)));
423 		  alignment_in_bits = crtl->stack_alignment_estimated;
424 		  alignment = alignment_in_bits / BITS_PER_UNIT;
425 		}
426 	    }
427 	}
428     }
429 
430   if (crtl->stack_alignment_needed < alignment_in_bits)
431     crtl->stack_alignment_needed = alignment_in_bits;
432   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
433     crtl->max_used_stack_slot_alignment = alignment_in_bits;
434 
435   if (mode != BLKmode || maybe_ne (size, 0))
436     {
437       if (kind & ASLK_RECORD_PAD)
438 	{
439 	  struct frame_space **psp;
440 
441 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
442 	    {
443 	      struct frame_space *space = *psp;
444 	      if (!try_fit_stack_local (space->start, space->length, size,
445 					alignment, &slot_offset))
446 		continue;
447 	      *psp = space->next;
448 	      if (known_gt (slot_offset, space->start))
449 		add_frame_space (space->start, slot_offset);
450 	      if (known_lt (slot_offset + size, space->start + space->length))
451 		add_frame_space (slot_offset + size,
452 				 space->start + space->length);
453 	      goto found_space;
454 	    }
455 	}
456     }
457   else if (!STACK_ALIGNMENT_NEEDED)
458     {
459       slot_offset = frame_offset;
460       goto found_space;
461     }
462 
463   old_frame_offset = frame_offset;
464 
465   if (FRAME_GROWS_DOWNWARD)
466     {
467       frame_offset -= size;
468       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
469 
470       if (kind & ASLK_RECORD_PAD)
471 	{
472 	  if (known_gt (slot_offset, frame_offset))
473 	    add_frame_space (frame_offset, slot_offset);
474 	  if (known_lt (slot_offset + size, old_frame_offset))
475 	    add_frame_space (slot_offset + size, old_frame_offset);
476 	}
477     }
478   else
479     {
480       frame_offset += size;
481       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
482 
483       if (kind & ASLK_RECORD_PAD)
484 	{
485 	  if (known_gt (slot_offset, old_frame_offset))
486 	    add_frame_space (old_frame_offset, slot_offset);
487 	  if (known_lt (slot_offset + size, frame_offset))
488 	    add_frame_space (slot_offset + size, frame_offset);
489 	}
490     }
491 
492  found_space:
493   /* On a big-endian machine, if we are allocating more space than we will use,
494      use the least significant bytes of those that are allocated.  */
495   if (mode != BLKmode)
496     {
497       /* The slot size can sometimes be smaller than the mode size;
498 	 e.g. the rs6000 port allocates slots with a vector mode
499 	 that have the size of only one element.  However, the slot
500 	 size must always be ordered wrt to the mode size, in the
501 	 same way as for a subreg.  */
502       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
503       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
504 	bigend_correction = size - GET_MODE_SIZE (mode);
505     }
506 
507   /* If we have already instantiated virtual registers, return the actual
508      address relative to the frame pointer.  */
509   if (virtuals_instantiated)
510     addr = plus_constant (Pmode, frame_pointer_rtx,
511 			  trunc_int_for_mode
512 			  (slot_offset + bigend_correction
513 			   + targetm.starting_frame_offset (), Pmode));
514   else
515     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
516 			  trunc_int_for_mode
517 			  (slot_offset + bigend_correction,
518 			   Pmode));
519 
520   x = gen_rtx_MEM (mode, addr);
521   set_mem_align (x, alignment_in_bits);
522   MEM_NOTRAP_P (x) = 1;
523 
524   vec_safe_push (stack_slot_list, x);
525 
526   if (frame_offset_overflow (frame_offset, current_function_decl))
527     frame_offset = 0;
528 
529   return x;
530 }
531 
532 /* Wrap up assign_stack_local_1 with last parameter as false.  */
533 
534 rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)535 assign_stack_local (machine_mode mode, poly_int64 size, int align)
536 {
537   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
538 }
539 
540 /* In order to evaluate some expressions, such as function calls returning
541    structures in memory, we need to temporarily allocate stack locations.
542    We record each allocated temporary in the following structure.
543 
544    Associated with each temporary slot is a nesting level.  When we pop up
545    one level, all temporaries associated with the previous level are freed.
546    Normally, all temporaries are freed after the execution of the statement
547    in which they were created.  However, if we are inside a ({...}) grouping,
548    the result may be in a temporary and hence must be preserved.  If the
549    result could be in a temporary, we preserve it if we can determine which
550    one it is in.  If we cannot determine which temporary may contain the
551    result, all temporaries are preserved.  A temporary is preserved by
552    pretending it was allocated at the previous nesting level.  */
553 
554 struct GTY(()) temp_slot {
555   /* Points to next temporary slot.  */
556   struct temp_slot *next;
557   /* Points to previous temporary slot.  */
558   struct temp_slot *prev;
559   /* The rtx to used to reference the slot.  */
560   rtx slot;
561   /* The size, in units, of the slot.  */
562   poly_int64 size;
563   /* The type of the object in the slot, or zero if it doesn't correspond
564      to a type.  We use this to determine whether a slot can be reused.
565      It can be reused if objects of the type of the new slot will always
566      conflict with objects of the type of the old slot.  */
567   tree type;
568   /* The alignment (in bits) of the slot.  */
569   unsigned int align;
570   /* Nonzero if this temporary is currently in use.  */
571   char in_use;
572   /* Nesting level at which this slot is being used.  */
573   int level;
574   /* The offset of the slot from the frame_pointer, including extra space
575      for alignment.  This info is for combine_temp_slots.  */
576   poly_int64 base_offset;
577   /* The size of the slot, including extra space for alignment.  This
578      info is for combine_temp_slots.  */
579   poly_int64 full_size;
580 };
581 
582 /* Entry for the below hash table.  */
583 struct GTY((for_user)) temp_slot_address_entry {
584   hashval_t hash;
585   rtx address;
586   struct temp_slot *temp_slot;
587 };
588 
589 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
590 {
591   static hashval_t hash (temp_slot_address_entry *);
592   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
593 };
594 
595 /* A table of addresses that represent a stack slot.  The table is a mapping
596    from address RTXen to a temp slot.  */
597 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
598 static size_t n_temp_slots_in_use;
599 
600 /* Removes temporary slot TEMP from LIST.  */
601 
602 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)603 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
604 {
605   if (temp->next)
606     temp->next->prev = temp->prev;
607   if (temp->prev)
608     temp->prev->next = temp->next;
609   else
610     *list = temp->next;
611 
612   temp->prev = temp->next = NULL;
613 }
614 
615 /* Inserts temporary slot TEMP to LIST.  */
616 
617 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)618 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
619 {
620   temp->next = *list;
621   if (*list)
622     (*list)->prev = temp;
623   temp->prev = NULL;
624   *list = temp;
625 }
626 
627 /* Returns the list of used temp slots at LEVEL.  */
628 
629 static struct temp_slot **
temp_slots_at_level(int level)630 temp_slots_at_level (int level)
631 {
632   if (level >= (int) vec_safe_length (used_temp_slots))
633     vec_safe_grow_cleared (used_temp_slots, level + 1);
634 
635   return &(*used_temp_slots)[level];
636 }
637 
638 /* Returns the maximal temporary slot level.  */
639 
640 static int
max_slot_level(void)641 max_slot_level (void)
642 {
643   if (!used_temp_slots)
644     return -1;
645 
646   return used_temp_slots->length () - 1;
647 }
648 
649 /* Moves temporary slot TEMP to LEVEL.  */
650 
651 static void
move_slot_to_level(struct temp_slot * temp,int level)652 move_slot_to_level (struct temp_slot *temp, int level)
653 {
654   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
655   insert_slot_to_list (temp, temp_slots_at_level (level));
656   temp->level = level;
657 }
658 
659 /* Make temporary slot TEMP available.  */
660 
661 static void
make_slot_available(struct temp_slot * temp)662 make_slot_available (struct temp_slot *temp)
663 {
664   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
665   insert_slot_to_list (temp, &avail_temp_slots);
666   temp->in_use = 0;
667   temp->level = -1;
668   n_temp_slots_in_use--;
669 }
670 
671 /* Compute the hash value for an address -> temp slot mapping.
672    The value is cached on the mapping entry.  */
673 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)674 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
675 {
676   int do_not_record = 0;
677   return hash_rtx (t->address, GET_MODE (t->address),
678 		   &do_not_record, NULL, false);
679 }
680 
681 /* Return the hash value for an address -> temp slot mapping.  */
682 hashval_t
hash(temp_slot_address_entry * t)683 temp_address_hasher::hash (temp_slot_address_entry *t)
684 {
685   return t->hash;
686 }
687 
688 /* Compare two address -> temp slot mapping entries.  */
689 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)690 temp_address_hasher::equal (temp_slot_address_entry *t1,
691 			    temp_slot_address_entry *t2)
692 {
693   return exp_equiv_p (t1->address, t2->address, 0, true);
694 }
695 
696 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
697 static void
insert_temp_slot_address(rtx address,struct temp_slot * temp_slot)698 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
699 {
700   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
701   t->address = address;
702   t->temp_slot = temp_slot;
703   t->hash = temp_slot_address_compute_hash (t);
704   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
705 }
706 
707 /* Remove an address -> temp slot mapping entry if the temp slot is
708    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
709 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)710 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
711 {
712   const struct temp_slot_address_entry *t = *slot;
713   if (! t->temp_slot->in_use)
714     temp_slot_address_table->clear_slot (slot);
715   return 1;
716 }
717 
718 /* Remove all mappings of addresses to unused temp slots.  */
719 static void
remove_unused_temp_slot_addresses(void)720 remove_unused_temp_slot_addresses (void)
721 {
722   /* Use quicker clearing if there aren't any active temp slots.  */
723   if (n_temp_slots_in_use)
724     temp_slot_address_table->traverse
725       <void *, remove_unused_temp_slot_addresses_1> (NULL);
726   else
727     temp_slot_address_table->empty ();
728 }
729 
730 /* Find the temp slot corresponding to the object at address X.  */
731 
732 static struct temp_slot *
find_temp_slot_from_address(rtx x)733 find_temp_slot_from_address (rtx x)
734 {
735   struct temp_slot *p;
736   struct temp_slot_address_entry tmp, *t;
737 
738   /* First try the easy way:
739      See if X exists in the address -> temp slot mapping.  */
740   tmp.address = x;
741   tmp.temp_slot = NULL;
742   tmp.hash = temp_slot_address_compute_hash (&tmp);
743   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
744   if (t)
745     return t->temp_slot;
746 
747   /* If we have a sum involving a register, see if it points to a temp
748      slot.  */
749   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
750       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
751     return p;
752   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
753 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
754     return p;
755 
756   /* Last resort: Address is a virtual stack var address.  */
757   poly_int64 offset;
758   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
759     {
760       int i;
761       for (i = max_slot_level (); i >= 0; i--)
762 	for (p = *temp_slots_at_level (i); p; p = p->next)
763 	  if (known_in_range_p (offset, p->base_offset, p->full_size))
764 	    return p;
765     }
766 
767   return NULL;
768 }
769 
770 /* Allocate a temporary stack slot and record it for possible later
771    reuse.
772 
773    MODE is the machine mode to be given to the returned rtx.
774 
775    SIZE is the size in units of the space required.  We do no rounding here
776    since assign_stack_local will do any required rounding.
777 
778    TYPE is the type that will be used for the stack slot.  */
779 
780 rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)781 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
782 {
783   unsigned int align;
784   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
785   rtx slot;
786 
787   gcc_assert (known_size_p (size));
788 
789   align = get_stack_local_alignment (type, mode);
790 
791   /* Try to find an available, already-allocated temporary of the proper
792      mode which meets the size and alignment requirements.  Choose the
793      smallest one with the closest alignment.
794 
795      If assign_stack_temp is called outside of the tree->rtl expansion,
796      we cannot reuse the stack slots (that may still refer to
797      VIRTUAL_STACK_VARS_REGNUM).  */
798   if (!virtuals_instantiated)
799     {
800       for (p = avail_temp_slots; p; p = p->next)
801 	{
802 	  if (p->align >= align
803 	      && known_ge (p->size, size)
804 	      && GET_MODE (p->slot) == mode
805 	      && objects_must_conflict_p (p->type, type)
806 	      && (best_p == 0
807 		  || (known_eq (best_p->size, p->size)
808 		      ? best_p->align > p->align
809 		      : known_ge (best_p->size, p->size))))
810 	    {
811 	      if (p->align == align && known_eq (p->size, size))
812 		{
813 		  selected = p;
814 		  cut_slot_from_list (selected, &avail_temp_slots);
815 		  best_p = 0;
816 		  break;
817 		}
818 	      best_p = p;
819 	    }
820 	}
821     }
822 
823   /* Make our best, if any, the one to use.  */
824   if (best_p)
825     {
826       selected = best_p;
827       cut_slot_from_list (selected, &avail_temp_slots);
828 
829       /* If there are enough aligned bytes left over, make them into a new
830 	 temp_slot so that the extra bytes don't get wasted.  Do this only
831 	 for BLKmode slots, so that we can be sure of the alignment.  */
832       if (GET_MODE (best_p->slot) == BLKmode)
833 	{
834 	  int alignment = best_p->align / BITS_PER_UNIT;
835 	  poly_int64 rounded_size = aligned_upper_bound (size, alignment);
836 
837 	  if (known_ge (best_p->size - rounded_size, alignment))
838 	    {
839 	      p = ggc_alloc<temp_slot> ();
840 	      p->in_use = 0;
841 	      p->size = best_p->size - rounded_size;
842 	      p->base_offset = best_p->base_offset + rounded_size;
843 	      p->full_size = best_p->full_size - rounded_size;
844 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
845 	      p->align = best_p->align;
846 	      p->type = best_p->type;
847 	      insert_slot_to_list (p, &avail_temp_slots);
848 
849 	      vec_safe_push (stack_slot_list, p->slot);
850 
851 	      best_p->size = rounded_size;
852 	      best_p->full_size = rounded_size;
853 	    }
854 	}
855     }
856 
857   /* If we still didn't find one, make a new temporary.  */
858   if (selected == 0)
859     {
860       poly_int64 frame_offset_old = frame_offset;
861 
862       p = ggc_alloc<temp_slot> ();
863 
864       /* We are passing an explicit alignment request to assign_stack_local.
865 	 One side effect of that is assign_stack_local will not round SIZE
866 	 to ensure the frame offset remains suitably aligned.
867 
868 	 So for requests which depended on the rounding of SIZE, we go ahead
869 	 and round it now.  We also make sure ALIGNMENT is at least
870 	 BIGGEST_ALIGNMENT.  */
871       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
872       p->slot = assign_stack_local_1 (mode,
873 				      (mode == BLKmode
874 				       ? aligned_upper_bound (size,
875 							      (int) align
876 							      / BITS_PER_UNIT)
877 				       : size),
878 				      align, 0);
879 
880       p->align = align;
881 
882       /* The following slot size computation is necessary because we don't
883 	 know the actual size of the temporary slot until assign_stack_local
884 	 has performed all the frame alignment and size rounding for the
885 	 requested temporary.  Note that extra space added for alignment
886 	 can be either above or below this stack slot depending on which
887 	 way the frame grows.  We include the extra space if and only if it
888 	 is above this slot.  */
889       if (FRAME_GROWS_DOWNWARD)
890 	p->size = frame_offset_old - frame_offset;
891       else
892 	p->size = size;
893 
894       /* Now define the fields used by combine_temp_slots.  */
895       if (FRAME_GROWS_DOWNWARD)
896 	{
897 	  p->base_offset = frame_offset;
898 	  p->full_size = frame_offset_old - frame_offset;
899 	}
900       else
901 	{
902 	  p->base_offset = frame_offset_old;
903 	  p->full_size = frame_offset - frame_offset_old;
904 	}
905 
906       selected = p;
907     }
908 
909   p = selected;
910   p->in_use = 1;
911   p->type = type;
912   p->level = temp_slot_level;
913   n_temp_slots_in_use++;
914 
915   pp = temp_slots_at_level (p->level);
916   insert_slot_to_list (p, pp);
917   insert_temp_slot_address (XEXP (p->slot, 0), p);
918 
919   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
920   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
921   vec_safe_push (stack_slot_list, slot);
922 
923   /* If we know the alias set for the memory that will be used, use
924      it.  If there's no TYPE, then we don't know anything about the
925      alias set for the memory.  */
926   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
927   set_mem_align (slot, align);
928 
929   /* If a type is specified, set the relevant flags.  */
930   if (type != 0)
931     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
932   MEM_NOTRAP_P (slot) = 1;
933 
934   return slot;
935 }
936 
937 /* Allocate a temporary stack slot and record it for possible later
938    reuse.  First two arguments are same as in preceding function.  */
939 
940 rtx
assign_stack_temp(machine_mode mode,poly_int64 size)941 assign_stack_temp (machine_mode mode, poly_int64 size)
942 {
943   return assign_stack_temp_for_type (mode, size, NULL_TREE);
944 }
945 
946 /* Assign a temporary.
947    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
948    and so that should be used in error messages.  In either case, we
949    allocate of the given type.
950    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
951    it is 0 if a register is OK.
952    DONT_PROMOTE is 1 if we should not promote values in register
953    to wider modes.  */
954 
955 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)956 assign_temp (tree type_or_decl, int memory_required,
957 	     int dont_promote ATTRIBUTE_UNUSED)
958 {
959   tree type, decl;
960   machine_mode mode;
961 #ifdef PROMOTE_MODE
962   int unsignedp;
963 #endif
964 
965   if (DECL_P (type_or_decl))
966     decl = type_or_decl, type = TREE_TYPE (decl);
967   else
968     decl = NULL, type = type_or_decl;
969 
970   mode = TYPE_MODE (type);
971 #ifdef PROMOTE_MODE
972   unsignedp = TYPE_UNSIGNED (type);
973 #endif
974 
975   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
976      end.  See also create_tmp_var for the gimplification-time check.  */
977   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
978 
979   if (mode == BLKmode || memory_required)
980     {
981       HOST_WIDE_INT size = int_size_in_bytes (type);
982       rtx tmp;
983 
984       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
985 	 problems with allocating the stack space.  */
986       if (size == 0)
987 	size = 1;
988 
989       /* Unfortunately, we don't yet know how to allocate variable-sized
990 	 temporaries.  However, sometimes we can find a fixed upper limit on
991 	 the size, so try that instead.  */
992       else if (size == -1)
993 	size = max_int_size_in_bytes (type);
994 
995       /* The size of the temporary may be too large to fit into an integer.  */
996       /* ??? Not sure this should happen except for user silliness, so limit
997 	 this to things that aren't compiler-generated temporaries.  The
998 	 rest of the time we'll die in assign_stack_temp_for_type.  */
999       if (decl && size == -1
1000 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1001 	{
1002 	  error ("size of variable %q+D is too large", decl);
1003 	  size = 1;
1004 	}
1005 
1006       tmp = assign_stack_temp_for_type (mode, size, type);
1007       return tmp;
1008     }
1009 
1010 #ifdef PROMOTE_MODE
1011   if (! dont_promote)
1012     mode = promote_mode (type, mode, &unsignedp);
1013 #endif
1014 
1015   return gen_reg_rtx (mode);
1016 }
1017 
1018 /* Combine temporary stack slots which are adjacent on the stack.
1019 
1020    This allows for better use of already allocated stack space.  This is only
1021    done for BLKmode slots because we can be sure that we won't have alignment
1022    problems in this case.  */
1023 
1024 static void
combine_temp_slots(void)1025 combine_temp_slots (void)
1026 {
1027   struct temp_slot *p, *q, *next, *next_q;
1028   int num_slots;
1029 
1030   /* We can't combine slots, because the information about which slot
1031      is in which alias set will be lost.  */
1032   if (flag_strict_aliasing)
1033     return;
1034 
1035   /* If there are a lot of temp slots, don't do anything unless
1036      high levels of optimization.  */
1037   if (! flag_expensive_optimizations)
1038     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1039       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1040 	return;
1041 
1042   for (p = avail_temp_slots; p; p = next)
1043     {
1044       int delete_p = 0;
1045 
1046       next = p->next;
1047 
1048       if (GET_MODE (p->slot) != BLKmode)
1049 	continue;
1050 
1051       for (q = p->next; q; q = next_q)
1052 	{
1053        	  int delete_q = 0;
1054 
1055 	  next_q = q->next;
1056 
1057 	  if (GET_MODE (q->slot) != BLKmode)
1058 	    continue;
1059 
1060 	  if (known_eq (p->base_offset + p->full_size, q->base_offset))
1061 	    {
1062 	      /* Q comes after P; combine Q into P.  */
1063 	      p->size += q->size;
1064 	      p->full_size += q->full_size;
1065 	      delete_q = 1;
1066 	    }
1067 	  else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1068 	    {
1069 	      /* P comes after Q; combine P into Q.  */
1070 	      q->size += p->size;
1071 	      q->full_size += p->full_size;
1072 	      delete_p = 1;
1073 	      break;
1074 	    }
1075 	  if (delete_q)
1076 	    cut_slot_from_list (q, &avail_temp_slots);
1077 	}
1078 
1079       /* Either delete P or advance past it.  */
1080       if (delete_p)
1081 	cut_slot_from_list (p, &avail_temp_slots);
1082     }
1083 }
1084 
1085 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1086    slot that previously was known by OLD_RTX.  */
1087 
1088 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1089 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1090 {
1091   struct temp_slot *p;
1092 
1093   if (rtx_equal_p (old_rtx, new_rtx))
1094     return;
1095 
1096   p = find_temp_slot_from_address (old_rtx);
1097 
1098   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1099      NEW_RTX is a register, see if one operand of the PLUS is a
1100      temporary location.  If so, NEW_RTX points into it.  Otherwise,
1101      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1102      in common between them.  If so, try a recursive call on those
1103      values.  */
1104   if (p == 0)
1105     {
1106       if (GET_CODE (old_rtx) != PLUS)
1107 	return;
1108 
1109       if (REG_P (new_rtx))
1110 	{
1111 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1112 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1113 	  return;
1114 	}
1115       else if (GET_CODE (new_rtx) != PLUS)
1116 	return;
1117 
1118       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1119 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1120       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1121 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1122       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1123 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1124       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1125 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1126 
1127       return;
1128     }
1129 
1130   /* Otherwise add an alias for the temp's address.  */
1131   insert_temp_slot_address (new_rtx, p);
1132 }
1133 
1134 /* If X could be a reference to a temporary slot, mark that slot as
1135    belonging to the to one level higher than the current level.  If X
1136    matched one of our slots, just mark that one.  Otherwise, we can't
1137    easily predict which it is, so upgrade all of them.
1138 
1139    This is called when an ({...}) construct occurs and a statement
1140    returns a value in memory.  */
1141 
1142 void
preserve_temp_slots(rtx x)1143 preserve_temp_slots (rtx x)
1144 {
1145   struct temp_slot *p = 0, *next;
1146 
1147   if (x == 0)
1148     return;
1149 
1150   /* If X is a register that is being used as a pointer, see if we have
1151      a temporary slot we know it points to.  */
1152   if (REG_P (x) && REG_POINTER (x))
1153     p = find_temp_slot_from_address (x);
1154 
1155   /* If X is not in memory or is at a constant address, it cannot be in
1156      a temporary slot.  */
1157   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1158     return;
1159 
1160   /* First see if we can find a match.  */
1161   if (p == 0)
1162     p = find_temp_slot_from_address (XEXP (x, 0));
1163 
1164   if (p != 0)
1165     {
1166       if (p->level == temp_slot_level)
1167 	move_slot_to_level (p, temp_slot_level - 1);
1168       return;
1169     }
1170 
1171   /* Otherwise, preserve all non-kept slots at this level.  */
1172   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173     {
1174       next = p->next;
1175       move_slot_to_level (p, temp_slot_level - 1);
1176     }
1177 }
1178 
1179 /* Free all temporaries used so far.  This is normally called at the
1180    end of generating code for a statement.  */
1181 
1182 void
free_temp_slots(void)1183 free_temp_slots (void)
1184 {
1185   struct temp_slot *p, *next;
1186   bool some_available = false;
1187 
1188   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1189     {
1190       next = p->next;
1191       make_slot_available (p);
1192       some_available = true;
1193     }
1194 
1195   if (some_available)
1196     {
1197       remove_unused_temp_slot_addresses ();
1198       combine_temp_slots ();
1199     }
1200 }
1201 
1202 /* Push deeper into the nesting level for stack temporaries.  */
1203 
1204 void
push_temp_slots(void)1205 push_temp_slots (void)
1206 {
1207   temp_slot_level++;
1208 }
1209 
1210 /* Pop a temporary nesting level.  All slots in use in the current level
1211    are freed.  */
1212 
1213 void
pop_temp_slots(void)1214 pop_temp_slots (void)
1215 {
1216   free_temp_slots ();
1217   temp_slot_level--;
1218 }
1219 
1220 /* Initialize temporary slots.  */
1221 
1222 void
init_temp_slots(void)1223 init_temp_slots (void)
1224 {
1225   /* We have not allocated any temporaries yet.  */
1226   avail_temp_slots = 0;
1227   vec_alloc (used_temp_slots, 0);
1228   temp_slot_level = 0;
1229   n_temp_slots_in_use = 0;
1230 
1231   /* Set up the table to map addresses to temp slots.  */
1232   if (! temp_slot_address_table)
1233     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1234   else
1235     temp_slot_address_table->empty ();
1236 }
1237 
1238 /* Functions and data structures to keep track of the values hard regs
1239    had at the start of the function.  */
1240 
1241 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1242    and has_hard_reg_initial_val..  */
1243 struct GTY(()) initial_value_pair {
1244   rtx hard_reg;
1245   rtx pseudo;
1246 };
1247 /* ???  This could be a VEC but there is currently no way to define an
1248    opaque VEC type.  This could be worked around by defining struct
1249    initial_value_pair in function.h.  */
1250 struct GTY(()) initial_value_struct {
1251   int num_entries;
1252   int max_entries;
1253   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1254 };
1255 
1256 /* If a pseudo represents an initial hard reg (or expression), return
1257    it, else return NULL_RTX.  */
1258 
1259 rtx
get_hard_reg_initial_reg(rtx reg)1260 get_hard_reg_initial_reg (rtx reg)
1261 {
1262   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1263   int i;
1264 
1265   if (ivs == 0)
1266     return NULL_RTX;
1267 
1268   for (i = 0; i < ivs->num_entries; i++)
1269     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1270       return ivs->entries[i].hard_reg;
1271 
1272   return NULL_RTX;
1273 }
1274 
1275 /* Make sure that there's a pseudo register of mode MODE that stores the
1276    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1277 
1278 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1279 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1280 {
1281   struct initial_value_struct *ivs;
1282   rtx rv;
1283 
1284   rv = has_hard_reg_initial_val (mode, regno);
1285   if (rv)
1286     return rv;
1287 
1288   ivs = crtl->hard_reg_initial_vals;
1289   if (ivs == 0)
1290     {
1291       ivs = ggc_alloc<initial_value_struct> ();
1292       ivs->num_entries = 0;
1293       ivs->max_entries = 5;
1294       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1295       crtl->hard_reg_initial_vals = ivs;
1296     }
1297 
1298   if (ivs->num_entries >= ivs->max_entries)
1299     {
1300       ivs->max_entries += 5;
1301       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1302 				    ivs->max_entries);
1303     }
1304 
1305   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1306   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1307 
1308   return ivs->entries[ivs->num_entries++].pseudo;
1309 }
1310 
1311 /* See if get_hard_reg_initial_val has been used to create a pseudo
1312    for the initial value of hard register REGNO in mode MODE.  Return
1313    the associated pseudo if so, otherwise return NULL.  */
1314 
1315 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1316 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1317 {
1318   struct initial_value_struct *ivs;
1319   int i;
1320 
1321   ivs = crtl->hard_reg_initial_vals;
1322   if (ivs != 0)
1323     for (i = 0; i < ivs->num_entries; i++)
1324       if (GET_MODE (ivs->entries[i].hard_reg) == mode
1325 	  && REGNO (ivs->entries[i].hard_reg) == regno)
1326 	return ivs->entries[i].pseudo;
1327 
1328   return NULL_RTX;
1329 }
1330 
1331 unsigned int
emit_initial_value_sets(void)1332 emit_initial_value_sets (void)
1333 {
1334   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1335   int i;
1336   rtx_insn *seq;
1337 
1338   if (ivs == 0)
1339     return 0;
1340 
1341   start_sequence ();
1342   for (i = 0; i < ivs->num_entries; i++)
1343     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1344   seq = get_insns ();
1345   end_sequence ();
1346 
1347   emit_insn_at_entry (seq);
1348   return 0;
1349 }
1350 
1351 /* Return the hardreg-pseudoreg initial values pair entry I and
1352    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1353 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1354 initial_value_entry (int i, rtx *hreg, rtx *preg)
1355 {
1356   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1357   if (!ivs || i >= ivs->num_entries)
1358     return false;
1359 
1360   *hreg = ivs->entries[i].hard_reg;
1361   *preg = ivs->entries[i].pseudo;
1362   return true;
1363 }
1364 
1365 /* These routines are responsible for converting virtual register references
1366    to the actual hard register references once RTL generation is complete.
1367 
1368    The following four variables are used for communication between the
1369    routines.  They contain the offsets of the virtual registers from their
1370    respective hard registers.  */
1371 
1372 static poly_int64 in_arg_offset;
1373 static poly_int64 var_offset;
1374 static poly_int64 dynamic_offset;
1375 static poly_int64 out_arg_offset;
1376 static poly_int64 cfa_offset;
1377 
1378 /* In most machines, the stack pointer register is equivalent to the bottom
1379    of the stack.  */
1380 
1381 #ifndef STACK_POINTER_OFFSET
1382 #define STACK_POINTER_OFFSET	0
1383 #endif
1384 
1385 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1386 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1387 #endif
1388 
1389 /* If not defined, pick an appropriate default for the offset of dynamically
1390    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1391    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1392 
1393 #ifndef STACK_DYNAMIC_OFFSET
1394 
1395 /* The bottom of the stack points to the actual arguments.  If
1396    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1397    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1398    stack space for register parameters is not pushed by the caller, but
1399    rather part of the fixed stack areas and hence not included in
1400    `crtl->outgoing_args_size'.  Nevertheless, we must allow
1401    for it when allocating stack dynamic objects.  */
1402 
1403 #ifdef INCOMING_REG_PARM_STACK_SPACE
1404 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1405 ((ACCUMULATE_OUTGOING_ARGS						      \
1406   ? (crtl->outgoing_args_size				      \
1407      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1408 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1409   : 0) + (STACK_POINTER_OFFSET))
1410 #else
1411 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1412   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1413  + (STACK_POINTER_OFFSET))
1414 #endif
1415 #endif
1416 
1417 
1418 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1419    is a virtual register, return the equivalent hard register and set the
1420    offset indirectly through the pointer.  Otherwise, return 0.  */
1421 
1422 static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)1423 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1424 {
1425   rtx new_rtx;
1426   poly_int64 offset;
1427 
1428   if (x == virtual_incoming_args_rtx)
1429     {
1430       if (stack_realign_drap)
1431         {
1432 	  /* Replace virtual_incoming_args_rtx with internal arg
1433 	     pointer if DRAP is used to realign stack.  */
1434           new_rtx = crtl->args.internal_arg_pointer;
1435           offset = 0;
1436         }
1437       else
1438         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1439     }
1440   else if (x == virtual_stack_vars_rtx)
1441     new_rtx = frame_pointer_rtx, offset = var_offset;
1442   else if (x == virtual_stack_dynamic_rtx)
1443     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1444   else if (x == virtual_outgoing_args_rtx)
1445     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1446   else if (x == virtual_cfa_rtx)
1447     {
1448 #ifdef FRAME_POINTER_CFA_OFFSET
1449       new_rtx = frame_pointer_rtx;
1450 #else
1451       new_rtx = arg_pointer_rtx;
1452 #endif
1453       offset = cfa_offset;
1454     }
1455   else if (x == virtual_preferred_stack_boundary_rtx)
1456     {
1457       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1458       offset = 0;
1459     }
1460   else
1461     return NULL_RTX;
1462 
1463   *poffset = offset;
1464   return new_rtx;
1465 }
1466 
1467 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1468    registers present inside of *LOC.  The expression is simplified,
1469    as much as possible, but is not to be considered "valid" in any sense
1470    implied by the target.  Return true if any change is made.  */
1471 
1472 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1473 instantiate_virtual_regs_in_rtx (rtx *loc)
1474 {
1475   if (!*loc)
1476     return false;
1477   bool changed = false;
1478   subrtx_ptr_iterator::array_type array;
1479   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1480     {
1481       rtx *loc = *iter;
1482       if (rtx x = *loc)
1483 	{
1484 	  rtx new_rtx;
1485 	  poly_int64 offset;
1486 	  switch (GET_CODE (x))
1487 	    {
1488 	    case REG:
1489 	      new_rtx = instantiate_new_reg (x, &offset);
1490 	      if (new_rtx)
1491 		{
1492 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1493 		  changed = true;
1494 		}
1495 	      iter.skip_subrtxes ();
1496 	      break;
1497 
1498 	    case PLUS:
1499 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1500 	      if (new_rtx)
1501 		{
1502 		  XEXP (x, 0) = new_rtx;
1503 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1504 		  changed = true;
1505 		  iter.skip_subrtxes ();
1506 		  break;
1507 		}
1508 
1509 	      /* FIXME -- from old code */
1510 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1511 		 we can commute the PLUS and SUBREG because pointers into the
1512 		 frame are well-behaved.  */
1513 	      break;
1514 
1515 	    default:
1516 	      break;
1517 	    }
1518 	}
1519     }
1520   return changed;
1521 }
1522 
1523 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1524    matches the predicate for insn CODE operand OPERAND.  */
1525 
1526 static int
safe_insn_predicate(int code,int operand,rtx x)1527 safe_insn_predicate (int code, int operand, rtx x)
1528 {
1529   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1530 }
1531 
1532 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1533    registers present inside of insn.  The result will be a valid insn.  */
1534 
1535 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1536 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1537 {
1538   poly_int64 offset;
1539   int insn_code, i;
1540   bool any_change = false;
1541   rtx set, new_rtx, x;
1542   rtx_insn *seq;
1543 
1544   /* There are some special cases to be handled first.  */
1545   set = single_set (insn);
1546   if (set)
1547     {
1548       /* We're allowed to assign to a virtual register.  This is interpreted
1549 	 to mean that the underlying register gets assigned the inverse
1550 	 transformation.  This is used, for example, in the handling of
1551 	 non-local gotos.  */
1552       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1553       if (new_rtx)
1554 	{
1555 	  start_sequence ();
1556 
1557 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1558 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1559 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1560 	  x = force_operand (x, new_rtx);
1561 	  if (x != new_rtx)
1562 	    emit_move_insn (new_rtx, x);
1563 
1564 	  seq = get_insns ();
1565 	  end_sequence ();
1566 
1567 	  emit_insn_before (seq, insn);
1568 	  delete_insn (insn);
1569 	  return;
1570 	}
1571 
1572       /* Handle a straight copy from a virtual register by generating a
1573 	 new add insn.  The difference between this and falling through
1574 	 to the generic case is avoiding a new pseudo and eliminating a
1575 	 move insn in the initial rtl stream.  */
1576       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1577       if (new_rtx
1578 	  && maybe_ne (offset, 0)
1579 	  && REG_P (SET_DEST (set))
1580 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1581 	{
1582 	  start_sequence ();
1583 
1584 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1585 				   gen_int_mode (offset,
1586 						 GET_MODE (SET_DEST (set))),
1587 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1588 	  if (x != SET_DEST (set))
1589 	    emit_move_insn (SET_DEST (set), x);
1590 
1591 	  seq = get_insns ();
1592 	  end_sequence ();
1593 
1594 	  emit_insn_before (seq, insn);
1595 	  delete_insn (insn);
1596 	  return;
1597 	}
1598 
1599       extract_insn (insn);
1600       insn_code = INSN_CODE (insn);
1601 
1602       /* Handle a plus involving a virtual register by determining if the
1603 	 operands remain valid if they're modified in place.  */
1604       poly_int64 delta;
1605       if (GET_CODE (SET_SRC (set)) == PLUS
1606 	  && recog_data.n_operands >= 3
1607 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1608 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1609 	  && poly_int_rtx_p (recog_data.operand[2], &delta)
1610 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1611 	{
1612 	  offset += delta;
1613 
1614 	  /* If the sum is zero, then replace with a plain move.  */
1615 	  if (known_eq (offset, 0)
1616 	      && REG_P (SET_DEST (set))
1617 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1618 	    {
1619 	      start_sequence ();
1620 	      emit_move_insn (SET_DEST (set), new_rtx);
1621 	      seq = get_insns ();
1622 	      end_sequence ();
1623 
1624 	      emit_insn_before (seq, insn);
1625 	      delete_insn (insn);
1626 	      return;
1627 	    }
1628 
1629 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1630 
1631 	  /* Using validate_change and apply_change_group here leaves
1632 	     recog_data in an invalid state.  Since we know exactly what
1633 	     we want to check, do those two by hand.  */
1634 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1635 	      && safe_insn_predicate (insn_code, 2, x))
1636 	    {
1637 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1638 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1639 	      any_change = true;
1640 
1641 	      /* Fall through into the regular operand fixup loop in
1642 		 order to take care of operands other than 1 and 2.  */
1643 	    }
1644 	}
1645     }
1646   else
1647     {
1648       extract_insn (insn);
1649       insn_code = INSN_CODE (insn);
1650     }
1651 
1652   /* In the general case, we expect virtual registers to appear only in
1653      operands, and then only as either bare registers or inside memories.  */
1654   for (i = 0; i < recog_data.n_operands; ++i)
1655     {
1656       x = recog_data.operand[i];
1657       switch (GET_CODE (x))
1658 	{
1659 	case MEM:
1660 	  {
1661 	    rtx addr = XEXP (x, 0);
1662 
1663 	    if (!instantiate_virtual_regs_in_rtx (&addr))
1664 	      continue;
1665 
1666 	    start_sequence ();
1667 	    x = replace_equiv_address (x, addr, true);
1668 	    /* It may happen that the address with the virtual reg
1669 	       was valid (e.g. based on the virtual stack reg, which might
1670 	       be acceptable to the predicates with all offsets), whereas
1671 	       the address now isn't anymore, for instance when the address
1672 	       is still offsetted, but the base reg isn't virtual-stack-reg
1673 	       anymore.  Below we would do a force_reg on the whole operand,
1674 	       but this insn might actually only accept memory.  Hence,
1675 	       before doing that last resort, try to reload the address into
1676 	       a register, so this operand stays a MEM.  */
1677 	    if (!safe_insn_predicate (insn_code, i, x))
1678 	      {
1679 		addr = force_reg (GET_MODE (addr), addr);
1680 		x = replace_equiv_address (x, addr, true);
1681 	      }
1682 	    seq = get_insns ();
1683 	    end_sequence ();
1684 	    if (seq)
1685 	      emit_insn_before (seq, insn);
1686 	  }
1687 	  break;
1688 
1689 	case REG:
1690 	  new_rtx = instantiate_new_reg (x, &offset);
1691 	  if (new_rtx == NULL)
1692 	    continue;
1693 	  if (known_eq (offset, 0))
1694 	    x = new_rtx;
1695 	  else
1696 	    {
1697 	      start_sequence ();
1698 
1699 	      /* Careful, special mode predicates may have stuff in
1700 		 insn_data[insn_code].operand[i].mode that isn't useful
1701 		 to us for computing a new value.  */
1702 	      /* ??? Recognize address_operand and/or "p" constraints
1703 		 to see if (plus new offset) is a valid before we put
1704 		 this through expand_simple_binop.  */
1705 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1706 				       gen_int_mode (offset, GET_MODE (x)),
1707 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1708 	      seq = get_insns ();
1709 	      end_sequence ();
1710 	      emit_insn_before (seq, insn);
1711 	    }
1712 	  break;
1713 
1714 	case SUBREG:
1715 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1716 	  if (new_rtx == NULL)
1717 	    continue;
1718 	  if (maybe_ne (offset, 0))
1719 	    {
1720 	      start_sequence ();
1721 	      new_rtx = expand_simple_binop
1722 		(GET_MODE (new_rtx), PLUS, new_rtx,
1723 		 gen_int_mode (offset, GET_MODE (new_rtx)),
1724 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1725 	      seq = get_insns ();
1726 	      end_sequence ();
1727 	      emit_insn_before (seq, insn);
1728 	    }
1729 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1730 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1731 	  gcc_assert (x);
1732 	  break;
1733 
1734 	default:
1735 	  continue;
1736 	}
1737 
1738       /* At this point, X contains the new value for the operand.
1739 	 Validate the new value vs the insn predicate.  Note that
1740 	 asm insns will have insn_code -1 here.  */
1741       if (!safe_insn_predicate (insn_code, i, x))
1742 	{
1743 	  start_sequence ();
1744 	  if (REG_P (x))
1745 	    {
1746 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1747 	      x = copy_to_reg (x);
1748 	    }
1749 	  else
1750 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1751 	  seq = get_insns ();
1752 	  end_sequence ();
1753 	  if (seq)
1754 	    emit_insn_before (seq, insn);
1755 	}
1756 
1757       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1758       any_change = true;
1759     }
1760 
1761   if (any_change)
1762     {
1763       /* Propagate operand changes into the duplicates.  */
1764       for (i = 0; i < recog_data.n_dups; ++i)
1765 	*recog_data.dup_loc[i]
1766 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1767 
1768       /* Force re-recognition of the instruction for validation.  */
1769       INSN_CODE (insn) = -1;
1770     }
1771 
1772   if (asm_noperands (PATTERN (insn)) >= 0)
1773     {
1774       if (!check_asm_operands (PATTERN (insn)))
1775 	{
1776 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1777 	  /* For asm goto, instead of fixing up all the edges
1778 	     just clear the template and clear input operands
1779 	     (asm goto doesn't have any output operands).  */
1780 	  if (JUMP_P (insn))
1781 	    {
1782 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1783 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1784 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1785 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1786 	    }
1787 	  else
1788 	    delete_insn (insn);
1789 	}
1790     }
1791   else
1792     {
1793       if (recog_memoized (insn) < 0)
1794 	fatal_insn_not_found (insn);
1795     }
1796 }
1797 
1798 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1799    do any instantiation required.  */
1800 
1801 void
instantiate_decl_rtl(rtx x)1802 instantiate_decl_rtl (rtx x)
1803 {
1804   rtx addr;
1805 
1806   if (x == 0)
1807     return;
1808 
1809   /* If this is a CONCAT, recurse for the pieces.  */
1810   if (GET_CODE (x) == CONCAT)
1811     {
1812       instantiate_decl_rtl (XEXP (x, 0));
1813       instantiate_decl_rtl (XEXP (x, 1));
1814       return;
1815     }
1816 
1817   /* If this is not a MEM, no need to do anything.  Similarly if the
1818      address is a constant or a register that is not a virtual register.  */
1819   if (!MEM_P (x))
1820     return;
1821 
1822   addr = XEXP (x, 0);
1823   if (CONSTANT_P (addr)
1824       || (REG_P (addr)
1825 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1826 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1827     return;
1828 
1829   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1830 }
1831 
1832 /* Helper for instantiate_decls called via walk_tree: Process all decls
1833    in the given DECL_VALUE_EXPR.  */
1834 
1835 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1836 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1837 {
1838   tree t = *tp;
1839   if (! EXPR_P (t))
1840     {
1841       *walk_subtrees = 0;
1842       if (DECL_P (t))
1843 	{
1844 	  if (DECL_RTL_SET_P (t))
1845 	    instantiate_decl_rtl (DECL_RTL (t));
1846 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1847 	      && DECL_INCOMING_RTL (t))
1848 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1849 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1850 	      && DECL_HAS_VALUE_EXPR_P (t))
1851 	    {
1852 	      tree v = DECL_VALUE_EXPR (t);
1853 	      walk_tree (&v, instantiate_expr, NULL, NULL);
1854 	    }
1855 	}
1856     }
1857   return NULL;
1858 }
1859 
1860 /* Subroutine of instantiate_decls: Process all decls in the given
1861    BLOCK node and all its subblocks.  */
1862 
1863 static void
instantiate_decls_1(tree let)1864 instantiate_decls_1 (tree let)
1865 {
1866   tree t;
1867 
1868   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1869     {
1870       if (DECL_RTL_SET_P (t))
1871 	instantiate_decl_rtl (DECL_RTL (t));
1872       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1873 	{
1874 	  tree v = DECL_VALUE_EXPR (t);
1875 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1876 	}
1877     }
1878 
1879   /* Process all subblocks.  */
1880   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1881     instantiate_decls_1 (t);
1882 }
1883 
1884 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1885    all virtual registers in their DECL_RTL's.  */
1886 
1887 static void
instantiate_decls(tree fndecl)1888 instantiate_decls (tree fndecl)
1889 {
1890   tree decl;
1891   unsigned ix;
1892 
1893   /* Process all parameters of the function.  */
1894   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1895     {
1896       instantiate_decl_rtl (DECL_RTL (decl));
1897       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1898       if (DECL_HAS_VALUE_EXPR_P (decl))
1899 	{
1900 	  tree v = DECL_VALUE_EXPR (decl);
1901 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1902 	}
1903     }
1904 
1905   if ((decl = DECL_RESULT (fndecl))
1906       && TREE_CODE (decl) == RESULT_DECL)
1907     {
1908       if (DECL_RTL_SET_P (decl))
1909 	instantiate_decl_rtl (DECL_RTL (decl));
1910       if (DECL_HAS_VALUE_EXPR_P (decl))
1911 	{
1912 	  tree v = DECL_VALUE_EXPR (decl);
1913 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1914 	}
1915     }
1916 
1917   /* Process the saved static chain if it exists.  */
1918   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1919   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1920     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1921 
1922   /* Now process all variables defined in the function or its subblocks.  */
1923   if (DECL_INITIAL (fndecl))
1924     instantiate_decls_1 (DECL_INITIAL (fndecl));
1925 
1926   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1927     if (DECL_RTL_SET_P (decl))
1928       instantiate_decl_rtl (DECL_RTL (decl));
1929   vec_free (cfun->local_decls);
1930 }
1931 
1932 /* Pass through the INSNS of function FNDECL and convert virtual register
1933    references to hard register references.  */
1934 
1935 static unsigned int
instantiate_virtual_regs(void)1936 instantiate_virtual_regs (void)
1937 {
1938   rtx_insn *insn;
1939 
1940   /* Compute the offsets to use for this function.  */
1941   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1942   var_offset = targetm.starting_frame_offset ();
1943   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1944   out_arg_offset = STACK_POINTER_OFFSET;
1945 #ifdef FRAME_POINTER_CFA_OFFSET
1946   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1947 #else
1948   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1949 #endif
1950 
1951   /* Initialize recognition, indicating that volatile is OK.  */
1952   init_recog ();
1953 
1954   /* Scan through all the insns, instantiating every virtual register still
1955      present.  */
1956   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1957     if (INSN_P (insn))
1958       {
1959 	/* These patterns in the instruction stream can never be recognized.
1960 	   Fortunately, they shouldn't contain virtual registers either.  */
1961         if (GET_CODE (PATTERN (insn)) == USE
1962 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1963 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT
1964 	    || DEBUG_MARKER_INSN_P (insn))
1965 	  continue;
1966 	else if (DEBUG_BIND_INSN_P (insn))
1967 	  instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1968 	else
1969 	  instantiate_virtual_regs_in_insn (insn);
1970 
1971 	if (insn->deleted ())
1972 	  continue;
1973 
1974 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1975 
1976 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1977 	if (CALL_P (insn))
1978 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1979       }
1980 
1981   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1982   instantiate_decls (current_function_decl);
1983 
1984   targetm.instantiate_decls ();
1985 
1986   /* Indicate that, from now on, assign_stack_local should use
1987      frame_pointer_rtx.  */
1988   virtuals_instantiated = 1;
1989 
1990   return 0;
1991 }
1992 
1993 namespace {
1994 
1995 const pass_data pass_data_instantiate_virtual_regs =
1996 {
1997   RTL_PASS, /* type */
1998   "vregs", /* name */
1999   OPTGROUP_NONE, /* optinfo_flags */
2000   TV_NONE, /* tv_id */
2001   0, /* properties_required */
2002   0, /* properties_provided */
2003   0, /* properties_destroyed */
2004   0, /* todo_flags_start */
2005   0, /* todo_flags_finish */
2006 };
2007 
2008 class pass_instantiate_virtual_regs : public rtl_opt_pass
2009 {
2010 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)2011   pass_instantiate_virtual_regs (gcc::context *ctxt)
2012     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2013   {}
2014 
2015   /* opt_pass methods: */
execute(function *)2016   virtual unsigned int execute (function *)
2017     {
2018       return instantiate_virtual_regs ();
2019     }
2020 
2021 }; // class pass_instantiate_virtual_regs
2022 
2023 } // anon namespace
2024 
2025 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2026 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2027 {
2028   return new pass_instantiate_virtual_regs (ctxt);
2029 }
2030 
2031 
2032 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2033    This means a type for which function calls must pass an address to the
2034    function or get an address back from the function.
2035    EXP may be a type node or an expression (whose type is tested).  */
2036 
2037 int
aggregate_value_p(const_tree exp,const_tree fntype)2038 aggregate_value_p (const_tree exp, const_tree fntype)
2039 {
2040   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2041   int i, regno, nregs;
2042   rtx reg;
2043 
2044   if (fntype)
2045     switch (TREE_CODE (fntype))
2046       {
2047       case CALL_EXPR:
2048 	{
2049 	  tree fndecl = get_callee_fndecl (fntype);
2050 	  if (fndecl)
2051 	    fntype = TREE_TYPE (fndecl);
2052 	  else if (CALL_EXPR_FN (fntype))
2053 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2054 	  else
2055 	    /* For internal functions, assume nothing needs to be
2056 	       returned in memory.  */
2057 	    return 0;
2058 	}
2059 	break;
2060       case FUNCTION_DECL:
2061 	fntype = TREE_TYPE (fntype);
2062 	break;
2063       case FUNCTION_TYPE:
2064       case METHOD_TYPE:
2065         break;
2066       case IDENTIFIER_NODE:
2067 	fntype = NULL_TREE;
2068 	break;
2069       default:
2070 	/* We don't expect other tree types here.  */
2071 	gcc_unreachable ();
2072       }
2073 
2074   if (VOID_TYPE_P (type))
2075     return 0;
2076 
2077   /* If a record should be passed the same as its first (and only) member
2078      don't pass it as an aggregate.  */
2079   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2080     return aggregate_value_p (first_field (type), fntype);
2081 
2082   /* If the front end has decided that this needs to be passed by
2083      reference, do so.  */
2084   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2085       && DECL_BY_REFERENCE (exp))
2086     return 1;
2087 
2088   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2089   if (fntype && TREE_ADDRESSABLE (fntype))
2090     return 1;
2091 
2092   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2093      and thus can't be returned in registers.  */
2094   if (TREE_ADDRESSABLE (type))
2095     return 1;
2096 
2097   if (TYPE_EMPTY_P (type))
2098     return 0;
2099 
2100   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2101     return 1;
2102 
2103   if (targetm.calls.return_in_memory (type, fntype))
2104     return 1;
2105 
2106   /* Make sure we have suitable call-clobbered regs to return
2107      the value in; if not, we must return it in memory.  */
2108   reg = hard_function_value (type, 0, fntype, 0);
2109 
2110   /* If we have something other than a REG (e.g. a PARALLEL), then assume
2111      it is OK.  */
2112   if (!REG_P (reg))
2113     return 0;
2114 
2115   regno = REGNO (reg);
2116   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2117   for (i = 0; i < nregs; i++)
2118     if (! call_used_regs[regno + i])
2119       return 1;
2120 
2121   return 0;
2122 }
2123 
2124 /* Return true if we should assign DECL a pseudo register; false if it
2125    should live on the local stack.  */
2126 
2127 bool
use_register_for_decl(const_tree decl)2128 use_register_for_decl (const_tree decl)
2129 {
2130   if (TREE_CODE (decl) == SSA_NAME)
2131     {
2132       /* We often try to use the SSA_NAME, instead of its underlying
2133 	 decl, to get type information and guide decisions, to avoid
2134 	 differences of behavior between anonymous and named
2135 	 variables, but in this one case we have to go for the actual
2136 	 variable if there is one.  The main reason is that, at least
2137 	 at -O0, we want to place user variables on the stack, but we
2138 	 don't mind using pseudos for anonymous or ignored temps.
2139 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2140 	 should go in pseudos, whereas their corresponding variables
2141 	 might have to go on the stack.  So, disregarding the decl
2142 	 here would negatively impact debug info at -O0, enable
2143 	 coalescing between SSA_NAMEs that ought to get different
2144 	 stack/pseudo assignments, and get the incoming argument
2145 	 processing thoroughly confused by PARM_DECLs expected to live
2146 	 in stack slots but assigned to pseudos.  */
2147       if (!SSA_NAME_VAR (decl))
2148 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2149 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2150 
2151       decl = SSA_NAME_VAR (decl);
2152     }
2153 
2154   /* Honor volatile.  */
2155   if (TREE_SIDE_EFFECTS (decl))
2156     return false;
2157 
2158   /* Honor addressability.  */
2159   if (TREE_ADDRESSABLE (decl))
2160     return false;
2161 
2162   /* RESULT_DECLs are a bit special in that they're assigned without
2163      regard to use_register_for_decl, but we generally only store in
2164      them.  If we coalesce their SSA NAMEs, we'd better return a
2165      result that matches the assignment in expand_function_start.  */
2166   if (TREE_CODE (decl) == RESULT_DECL)
2167     {
2168       /* If it's not an aggregate, we're going to use a REG or a
2169 	 PARALLEL containing a REG.  */
2170       if (!aggregate_value_p (decl, current_function_decl))
2171 	return true;
2172 
2173       /* If expand_function_start determines the return value, we'll
2174 	 use MEM if it's not by reference.  */
2175       if (cfun->returns_pcc_struct
2176 	  || (targetm.calls.struct_value_rtx
2177 	      (TREE_TYPE (current_function_decl), 1)))
2178 	return DECL_BY_REFERENCE (decl);
2179 
2180       /* Otherwise, we're taking an extra all.function_result_decl
2181 	 argument.  It's set up in assign_parms_augmented_arg_list,
2182 	 under the (negated) conditions above, and then it's used to
2183 	 set up the RESULT_DECL rtl in assign_params, after looping
2184 	 over all parameters.  Now, if the RESULT_DECL is not by
2185 	 reference, we'll use a MEM either way.  */
2186       if (!DECL_BY_REFERENCE (decl))
2187 	return false;
2188 
2189       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2190 	 the function_result_decl's assignment.  Since it's a pointer,
2191 	 we can short-circuit a number of the tests below, and we must
2192 	 duplicat e them because we don't have the
2193 	 function_result_decl to test.  */
2194       if (!targetm.calls.allocate_stack_slots_for_args ())
2195 	return true;
2196       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
2197       if (optimize)
2198 	return true;
2199       /* We don't set DECL_REGISTER for the function_result_decl.  */
2200       return false;
2201     }
2202 
2203   /* Decl is implicitly addressible by bound stores and loads
2204      if it is an aggregate holding bounds.  */
2205   if (chkp_function_instrumented_p (current_function_decl)
2206       && TREE_TYPE (decl)
2207       && !BOUNDED_P (decl)
2208       && chkp_type_has_pointer (TREE_TYPE (decl)))
2209     return false;
2210 
2211   /* Only register-like things go in registers.  */
2212   if (DECL_MODE (decl) == BLKmode)
2213     return false;
2214 
2215   /* If -ffloat-store specified, don't put explicit float variables
2216      into registers.  */
2217   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2218      propagates values across these stores, and it probably shouldn't.  */
2219   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2220     return false;
2221 
2222   if (!targetm.calls.allocate_stack_slots_for_args ())
2223     return true;
2224 
2225   /* If we're not interested in tracking debugging information for
2226      this decl, then we can certainly put it in a register.  */
2227   if (DECL_IGNORED_P (decl))
2228     return true;
2229 
2230   if (optimize)
2231     return true;
2232 
2233   if (!DECL_REGISTER (decl))
2234     return false;
2235 
2236   /* When not optimizing, disregard register keyword for types that
2237      could have methods, otherwise the methods won't be callable from
2238      the debugger.  */
2239   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2240     return false;
2241 
2242   return true;
2243 }
2244 
2245 /* Structures to communicate between the subroutines of assign_parms.
2246    The first holds data persistent across all parameters, the second
2247    is cleared out for each parameter.  */
2248 
2249 struct assign_parm_data_all
2250 {
2251   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2252      should become a job of the target or otherwise encapsulated.  */
2253   CUMULATIVE_ARGS args_so_far_v;
2254   cumulative_args_t args_so_far;
2255   struct args_size stack_args_size;
2256   tree function_result_decl;
2257   tree orig_fnargs;
2258   rtx_insn *first_conversion_insn;
2259   rtx_insn *last_conversion_insn;
2260   HOST_WIDE_INT pretend_args_size;
2261   HOST_WIDE_INT extra_pretend_bytes;
2262   int reg_parm_stack_space;
2263 };
2264 
2265 struct assign_parm_data_one
2266 {
2267   tree nominal_type;
2268   tree passed_type;
2269   rtx entry_parm;
2270   rtx stack_parm;
2271   machine_mode nominal_mode;
2272   machine_mode passed_mode;
2273   machine_mode promoted_mode;
2274   struct locate_and_pad_arg_data locate;
2275   int partial;
2276   BOOL_BITFIELD named_arg : 1;
2277   BOOL_BITFIELD passed_pointer : 1;
2278   BOOL_BITFIELD on_stack : 1;
2279   BOOL_BITFIELD loaded_in_reg : 1;
2280 };
2281 
2282 struct bounds_parm_data
2283 {
2284   assign_parm_data_one parm_data;
2285   tree bounds_parm;
2286   tree ptr_parm;
2287   rtx ptr_entry;
2288   int bound_no;
2289 };
2290 
2291 /* A subroutine of assign_parms.  Initialize ALL.  */
2292 
2293 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2294 assign_parms_initialize_all (struct assign_parm_data_all *all)
2295 {
2296   tree fntype ATTRIBUTE_UNUSED;
2297 
2298   memset (all, 0, sizeof (*all));
2299 
2300   fntype = TREE_TYPE (current_function_decl);
2301 
2302 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2303   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2304 #else
2305   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2306 			current_function_decl, -1);
2307 #endif
2308   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2309 
2310 #ifdef INCOMING_REG_PARM_STACK_SPACE
2311   all->reg_parm_stack_space
2312     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2313 #endif
2314 }
2315 
2316 /* If ARGS contains entries with complex types, split the entry into two
2317    entries of the component type.  Return a new list of substitutions are
2318    needed, else the old list.  */
2319 
2320 static void
split_complex_args(vec<tree> * args)2321 split_complex_args (vec<tree> *args)
2322 {
2323   unsigned i;
2324   tree p;
2325 
2326   FOR_EACH_VEC_ELT (*args, i, p)
2327     {
2328       tree type = TREE_TYPE (p);
2329       if (TREE_CODE (type) == COMPLEX_TYPE
2330 	  && targetm.calls.split_complex_arg (type))
2331 	{
2332 	  tree decl;
2333 	  tree subtype = TREE_TYPE (type);
2334 	  bool addressable = TREE_ADDRESSABLE (p);
2335 
2336 	  /* Rewrite the PARM_DECL's type with its component.  */
2337 	  p = copy_node (p);
2338 	  TREE_TYPE (p) = subtype;
2339 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2340 	  SET_DECL_MODE (p, VOIDmode);
2341 	  DECL_SIZE (p) = NULL;
2342 	  DECL_SIZE_UNIT (p) = NULL;
2343 	  /* If this arg must go in memory, put it in a pseudo here.
2344 	     We can't allow it to go in memory as per normal parms,
2345 	     because the usual place might not have the imag part
2346 	     adjacent to the real part.  */
2347 	  DECL_ARTIFICIAL (p) = addressable;
2348 	  DECL_IGNORED_P (p) = addressable;
2349 	  TREE_ADDRESSABLE (p) = 0;
2350 	  layout_decl (p, 0);
2351 	  (*args)[i] = p;
2352 
2353 	  /* Build a second synthetic decl.  */
2354 	  decl = build_decl (EXPR_LOCATION (p),
2355 			     PARM_DECL, NULL_TREE, subtype);
2356 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2357 	  DECL_ARTIFICIAL (decl) = addressable;
2358 	  DECL_IGNORED_P (decl) = addressable;
2359 	  layout_decl (decl, 0);
2360 	  args->safe_insert (++i, decl);
2361 	}
2362     }
2363 }
2364 
2365 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2366    the hidden struct return argument, and (abi willing) complex args.
2367    Return the new parameter list.  */
2368 
2369 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2370 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2371 {
2372   tree fndecl = current_function_decl;
2373   tree fntype = TREE_TYPE (fndecl);
2374   vec<tree> fnargs = vNULL;
2375   tree arg;
2376 
2377   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2378     fnargs.safe_push (arg);
2379 
2380   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2381 
2382   /* If struct value address is treated as the first argument, make it so.  */
2383   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2384       && ! cfun->returns_pcc_struct
2385       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2386     {
2387       tree type = build_pointer_type (TREE_TYPE (fntype));
2388       tree decl;
2389 
2390       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2391 			 PARM_DECL, get_identifier (".result_ptr"), type);
2392       DECL_ARG_TYPE (decl) = type;
2393       DECL_ARTIFICIAL (decl) = 1;
2394       DECL_NAMELESS (decl) = 1;
2395       TREE_CONSTANT (decl) = 1;
2396       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
2397 	 changes, the end of the RESULT_DECL handling block in
2398 	 use_register_for_decl must be adjusted to match.  */
2399 
2400       DECL_CHAIN (decl) = all->orig_fnargs;
2401       all->orig_fnargs = decl;
2402       fnargs.safe_insert (0, decl);
2403 
2404       all->function_result_decl = decl;
2405 
2406       /* If function is instrumented then bounds of the
2407 	 passed structure address is the second argument.  */
2408       if (chkp_function_instrumented_p (fndecl))
2409 	{
2410 	  decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2411 			     PARM_DECL, get_identifier (".result_bnd"),
2412 			     pointer_bounds_type_node);
2413 	  DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2414 	  DECL_ARTIFICIAL (decl) = 1;
2415 	  DECL_NAMELESS (decl) = 1;
2416 	  TREE_CONSTANT (decl) = 1;
2417 
2418 	  DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2419 	  DECL_CHAIN (all->orig_fnargs) = decl;
2420 	  fnargs.safe_insert (1, decl);
2421 	}
2422     }
2423 
2424   /* If the target wants to split complex arguments into scalars, do so.  */
2425   if (targetm.calls.split_complex_arg)
2426     split_complex_args (&fnargs);
2427 
2428   return fnargs;
2429 }
2430 
2431 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2432    data for the parameter.  Incorporate ABI specifics such as pass-by-
2433    reference and type promotion.  */
2434 
2435 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2436 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2437 			     struct assign_parm_data_one *data)
2438 {
2439   tree nominal_type, passed_type;
2440   machine_mode nominal_mode, passed_mode, promoted_mode;
2441   int unsignedp;
2442 
2443   memset (data, 0, sizeof (*data));
2444 
2445   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2446   if (!cfun->stdarg)
2447     data->named_arg = 1;  /* No variadic parms.  */
2448   else if (DECL_CHAIN (parm))
2449     data->named_arg = 1;  /* Not the last non-variadic parm. */
2450   else if (targetm.calls.strict_argument_naming (all->args_so_far))
2451     data->named_arg = 1;  /* Only variadic ones are unnamed.  */
2452   else
2453     data->named_arg = 0;  /* Treat as variadic.  */
2454 
2455   nominal_type = TREE_TYPE (parm);
2456   passed_type = DECL_ARG_TYPE (parm);
2457 
2458   /* Look out for errors propagating this far.  Also, if the parameter's
2459      type is void then its value doesn't matter.  */
2460   if (TREE_TYPE (parm) == error_mark_node
2461       /* This can happen after weird syntax errors
2462 	 or if an enum type is defined among the parms.  */
2463       || TREE_CODE (parm) != PARM_DECL
2464       || passed_type == NULL
2465       || VOID_TYPE_P (nominal_type))
2466     {
2467       nominal_type = passed_type = void_type_node;
2468       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2469       goto egress;
2470     }
2471 
2472   /* Find mode of arg as it is passed, and mode of arg as it should be
2473      during execution of this function.  */
2474   passed_mode = TYPE_MODE (passed_type);
2475   nominal_mode = TYPE_MODE (nominal_type);
2476 
2477   /* If the parm is to be passed as a transparent union or record, use the
2478      type of the first field for the tests below.  We have already verified
2479      that the modes are the same.  */
2480   if (RECORD_OR_UNION_TYPE_P (passed_type)
2481       && TYPE_TRANSPARENT_AGGR (passed_type))
2482     passed_type = TREE_TYPE (first_field (passed_type));
2483 
2484   /* See if this arg was passed by invisible reference.  */
2485   if (pass_by_reference (&all->args_so_far_v, passed_mode,
2486 			 passed_type, data->named_arg))
2487     {
2488       passed_type = nominal_type = build_pointer_type (passed_type);
2489       data->passed_pointer = true;
2490       passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2491     }
2492 
2493   /* Find mode as it is passed by the ABI.  */
2494   unsignedp = TYPE_UNSIGNED (passed_type);
2495   promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2496 				         TREE_TYPE (current_function_decl), 0);
2497 
2498  egress:
2499   data->nominal_type = nominal_type;
2500   data->passed_type = passed_type;
2501   data->nominal_mode = nominal_mode;
2502   data->passed_mode = passed_mode;
2503   data->promoted_mode = promoted_mode;
2504 }
2505 
2506 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2507 
2508 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2509 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2510 			    struct assign_parm_data_one *data, bool no_rtl)
2511 {
2512   int varargs_pretend_bytes = 0;
2513 
2514   targetm.calls.setup_incoming_varargs (all->args_so_far,
2515 					data->promoted_mode,
2516 					data->passed_type,
2517 					&varargs_pretend_bytes, no_rtl);
2518 
2519   /* If the back-end has requested extra stack space, record how much is
2520      needed.  Do not change pretend_args_size otherwise since it may be
2521      nonzero from an earlier partial argument.  */
2522   if (varargs_pretend_bytes > 0)
2523     all->pretend_args_size = varargs_pretend_bytes;
2524 }
2525 
2526 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2527    the incoming location of the current parameter.  */
2528 
2529 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2530 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2531 			    struct assign_parm_data_one *data)
2532 {
2533   HOST_WIDE_INT pretend_bytes = 0;
2534   rtx entry_parm;
2535   bool in_regs;
2536 
2537   if (data->promoted_mode == VOIDmode)
2538     {
2539       data->entry_parm = data->stack_parm = const0_rtx;
2540       return;
2541     }
2542 
2543   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2544 					    data->passed_type);
2545 
2546   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2547 						    data->promoted_mode,
2548 						    data->passed_type,
2549 						    data->named_arg);
2550 
2551   if (entry_parm == 0)
2552     data->promoted_mode = data->passed_mode;
2553 
2554   /* Determine parm's home in the stack, in case it arrives in the stack
2555      or we should pretend it did.  Compute the stack position and rtx where
2556      the argument arrives and its size.
2557 
2558      There is one complexity here:  If this was a parameter that would
2559      have been passed in registers, but wasn't only because it is
2560      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2561      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2562      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2563      as it was the previous time.  */
2564   in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2565 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2566   in_regs = true;
2567 #endif
2568   if (!in_regs && !data->named_arg)
2569     {
2570       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2571 	{
2572 	  rtx tem;
2573 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2574 						     data->promoted_mode,
2575 						     data->passed_type, true);
2576 	  in_regs = tem != NULL;
2577 	}
2578     }
2579 
2580   /* If this parameter was passed both in registers and in the stack, use
2581      the copy on the stack.  */
2582   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2583 					data->passed_type))
2584     entry_parm = 0;
2585 
2586   if (entry_parm)
2587     {
2588       int partial;
2589 
2590       partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2591 						 data->promoted_mode,
2592 						 data->passed_type,
2593 						 data->named_arg);
2594       data->partial = partial;
2595 
2596       /* The caller might already have allocated stack space for the
2597 	 register parameters.  */
2598       if (partial != 0 && all->reg_parm_stack_space == 0)
2599 	{
2600 	  /* Part of this argument is passed in registers and part
2601 	     is passed on the stack.  Ask the prologue code to extend
2602 	     the stack part so that we can recreate the full value.
2603 
2604 	     PRETEND_BYTES is the size of the registers we need to store.
2605 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2606 	     stack space that the prologue should allocate.
2607 
2608 	     Internally, gcc assumes that the argument pointer is aligned
2609 	     to STACK_BOUNDARY bits.  This is used both for alignment
2610 	     optimizations (see init_emit) and to locate arguments that are
2611 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2612 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2613 	     a stack boundary.  */
2614 
2615 	  /* We assume at most one partial arg, and it must be the first
2616 	     argument on the stack.  */
2617 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2618 
2619 	  pretend_bytes = partial;
2620 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2621 
2622 	  /* We want to align relative to the actual stack pointer, so
2623 	     don't include this in the stack size until later.  */
2624 	  all->extra_pretend_bytes = all->pretend_args_size;
2625 	}
2626     }
2627 
2628   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2629 		       all->reg_parm_stack_space,
2630 		       entry_parm ? data->partial : 0, current_function_decl,
2631 		       &all->stack_args_size, &data->locate);
2632 
2633   /* Update parm_stack_boundary if this parameter is passed in the
2634      stack.  */
2635   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2636     crtl->parm_stack_boundary = data->locate.boundary;
2637 
2638   /* Adjust offsets to include the pretend args.  */
2639   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2640   data->locate.slot_offset.constant += pretend_bytes;
2641   data->locate.offset.constant += pretend_bytes;
2642 
2643   data->entry_parm = entry_parm;
2644 }
2645 
2646 /* A subroutine of assign_parms.  If there is actually space on the stack
2647    for this parm, count it in stack_args_size and return true.  */
2648 
2649 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2650 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2651 			   struct assign_parm_data_one *data)
2652 {
2653   /* Bounds are never passed on the stack to keep compatibility
2654      with not instrumented code.  */
2655   if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2656     return false;
2657   /* Trivially true if we've no incoming register.  */
2658   else if (data->entry_parm == NULL)
2659     ;
2660   /* Also true if we're partially in registers and partially not,
2661      since we've arranged to drop the entire argument on the stack.  */
2662   else if (data->partial != 0)
2663     ;
2664   /* Also true if the target says that it's passed in both registers
2665      and on the stack.  */
2666   else if (GET_CODE (data->entry_parm) == PARALLEL
2667 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2668     ;
2669   /* Also true if the target says that there's stack allocated for
2670      all register parameters.  */
2671   else if (all->reg_parm_stack_space > 0)
2672     ;
2673   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2674   else
2675     return false;
2676 
2677   all->stack_args_size.constant += data->locate.size.constant;
2678   if (data->locate.size.var)
2679     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2680 
2681   return true;
2682 }
2683 
2684 /* A subroutine of assign_parms.  Given that this parameter is allocated
2685    stack space by the ABI, find it.  */
2686 
2687 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2688 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2689 {
2690   rtx offset_rtx, stack_parm;
2691   unsigned int align, boundary;
2692 
2693   /* If we're passing this arg using a reg, make its stack home the
2694      aligned stack slot.  */
2695   if (data->entry_parm)
2696     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2697   else
2698     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2699 
2700   stack_parm = crtl->args.internal_arg_pointer;
2701   if (offset_rtx != const0_rtx)
2702     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2703   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2704 
2705   if (!data->passed_pointer)
2706     {
2707       set_mem_attributes (stack_parm, parm, 1);
2708       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2709 	 while promoted mode's size is needed.  */
2710       if (data->promoted_mode != BLKmode
2711 	  && data->promoted_mode != DECL_MODE (parm))
2712 	{
2713 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2714 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2715 	    {
2716 	      poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2717 							 data->promoted_mode);
2718 	      if (maybe_ne (offset, 0))
2719 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2720 	    }
2721 	}
2722     }
2723 
2724   boundary = data->locate.boundary;
2725   align = BITS_PER_UNIT;
2726 
2727   /* If we're padding upward, we know that the alignment of the slot
2728      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2729      intentionally forcing upward padding.  Otherwise we have to come
2730      up with a guess at the alignment based on OFFSET_RTX.  */
2731   poly_int64 offset;
2732   if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2733     align = boundary;
2734   else if (poly_int_rtx_p (offset_rtx, &offset))
2735     {
2736       align = least_bit_hwi (boundary);
2737       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2738       if (offset_align != 0)
2739 	align = MIN (align, offset_align);
2740     }
2741   set_mem_align (stack_parm, align);
2742 
2743   if (data->entry_parm)
2744     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2745 
2746   data->stack_parm = stack_parm;
2747 }
2748 
2749 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2750    always valid and contiguous.  */
2751 
2752 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2753 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2754 {
2755   rtx entry_parm = data->entry_parm;
2756   rtx stack_parm = data->stack_parm;
2757 
2758   /* If this parm was passed part in regs and part in memory, pretend it
2759      arrived entirely in memory by pushing the register-part onto the stack.
2760      In the special case of a DImode or DFmode that is split, we could put
2761      it together in a pseudoreg directly, but for now that's not worth
2762      bothering with.  */
2763   if (data->partial != 0)
2764     {
2765       /* Handle calls that pass values in multiple non-contiguous
2766 	 locations.  The Irix 6 ABI has examples of this.  */
2767       if (GET_CODE (entry_parm) == PARALLEL)
2768 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2769 			  data->passed_type,
2770 			  int_size_in_bytes (data->passed_type));
2771       else
2772 	{
2773 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2774 	  move_block_from_reg (REGNO (entry_parm),
2775 			       validize_mem (copy_rtx (stack_parm)),
2776 			       data->partial / UNITS_PER_WORD);
2777 	}
2778 
2779       entry_parm = stack_parm;
2780     }
2781 
2782   /* If we didn't decide this parm came in a register, by default it came
2783      on the stack.  */
2784   else if (entry_parm == NULL)
2785     entry_parm = stack_parm;
2786 
2787   /* When an argument is passed in multiple locations, we can't make use
2788      of this information, but we can save some copying if the whole argument
2789      is passed in a single register.  */
2790   else if (GET_CODE (entry_parm) == PARALLEL
2791 	   && data->nominal_mode != BLKmode
2792 	   && data->passed_mode != BLKmode)
2793     {
2794       size_t i, len = XVECLEN (entry_parm, 0);
2795 
2796       for (i = 0; i < len; i++)
2797 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2798 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2799 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2800 		== data->passed_mode)
2801 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2802 	  {
2803 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2804 	    break;
2805 	  }
2806     }
2807 
2808   data->entry_parm = entry_parm;
2809 }
2810 
2811 /* A subroutine of assign_parms.  Reconstitute any values which were
2812    passed in multiple registers and would fit in a single register.  */
2813 
2814 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2815 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2816 {
2817   rtx entry_parm = data->entry_parm;
2818 
2819   /* Convert the PARALLEL to a REG of the same mode as the parallel.
2820      This can be done with register operations rather than on the
2821      stack, even if we will store the reconstituted parameter on the
2822      stack later.  */
2823   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2824     {
2825       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2826       emit_group_store (parmreg, entry_parm, data->passed_type,
2827 			GET_MODE_SIZE (GET_MODE (entry_parm)));
2828       entry_parm = parmreg;
2829     }
2830 
2831   data->entry_parm = entry_parm;
2832 }
2833 
2834 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2835    always valid and properly aligned.  */
2836 
2837 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2838 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2839 {
2840   rtx stack_parm = data->stack_parm;
2841 
2842   /* If we can't trust the parm stack slot to be aligned enough for its
2843      ultimate type, don't use that slot after entry.  We'll make another
2844      stack slot, if we need one.  */
2845   if (stack_parm
2846       && ((STRICT_ALIGNMENT
2847 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2848 	  || (data->nominal_type
2849 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2850 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2851     stack_parm = NULL;
2852 
2853   /* If parm was passed in memory, and we need to convert it on entry,
2854      don't store it back in that same slot.  */
2855   else if (data->entry_parm == stack_parm
2856 	   && data->nominal_mode != BLKmode
2857 	   && data->nominal_mode != data->passed_mode)
2858     stack_parm = NULL;
2859 
2860   /* If stack protection is in effect for this function, don't leave any
2861      pointers in their passed stack slots.  */
2862   else if (crtl->stack_protect_guard
2863 	   && (flag_stack_protect == 2
2864 	       || data->passed_pointer
2865 	       || POINTER_TYPE_P (data->nominal_type)))
2866     stack_parm = NULL;
2867 
2868   data->stack_parm = stack_parm;
2869 }
2870 
2871 /* A subroutine of assign_parms.  Return true if the current parameter
2872    should be stored as a BLKmode in the current frame.  */
2873 
2874 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2875 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2876 {
2877   if (data->nominal_mode == BLKmode)
2878     return true;
2879   if (GET_MODE (data->entry_parm) == BLKmode)
2880     return true;
2881 
2882 #ifdef BLOCK_REG_PADDING
2883   /* Only assign_parm_setup_block knows how to deal with register arguments
2884      that are padded at the least significant end.  */
2885   if (REG_P (data->entry_parm)
2886       && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
2887       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2888 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2889     return true;
2890 #endif
2891 
2892   return false;
2893 }
2894 
2895 /* A subroutine of assign_parms.  Arrange for the parameter to be
2896    present and valid in DATA->STACK_RTL.  */
2897 
2898 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2899 assign_parm_setup_block (struct assign_parm_data_all *all,
2900 			 tree parm, struct assign_parm_data_one *data)
2901 {
2902   rtx entry_parm = data->entry_parm;
2903   rtx stack_parm = data->stack_parm;
2904   rtx target_reg = NULL_RTX;
2905   bool in_conversion_seq = false;
2906   HOST_WIDE_INT size;
2907   HOST_WIDE_INT size_stored;
2908 
2909   if (GET_CODE (entry_parm) == PARALLEL)
2910     entry_parm = emit_group_move_into_temps (entry_parm);
2911 
2912   /* If we want the parameter in a pseudo, don't use a stack slot.  */
2913   if (is_gimple_reg (parm) && use_register_for_decl (parm))
2914     {
2915       tree def = ssa_default_def (cfun, parm);
2916       gcc_assert (def);
2917       machine_mode mode = promote_ssa_mode (def, NULL);
2918       rtx reg = gen_reg_rtx (mode);
2919       if (GET_CODE (reg) != CONCAT)
2920 	stack_parm = reg;
2921       else
2922 	{
2923 	  target_reg = reg;
2924 	  /* Avoid allocating a stack slot, if there isn't one
2925 	     preallocated by the ABI.  It might seem like we should
2926 	     always prefer a pseudo, but converting between
2927 	     floating-point and integer modes goes through the stack
2928 	     on various machines, so it's better to use the reserved
2929 	     stack slot than to risk wasting it and allocating more
2930 	     for the conversion.  */
2931 	  if (stack_parm == NULL_RTX)
2932 	    {
2933 	      int save = generating_concat_p;
2934 	      generating_concat_p = 0;
2935 	      stack_parm = gen_reg_rtx (mode);
2936 	      generating_concat_p = save;
2937 	    }
2938 	}
2939       data->stack_parm = NULL;
2940     }
2941 
2942   size = int_size_in_bytes (data->passed_type);
2943   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2944   if (stack_parm == 0)
2945     {
2946       SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2947       stack_parm = assign_stack_local (BLKmode, size_stored,
2948 				       DECL_ALIGN (parm));
2949       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2950 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2951       set_mem_attributes (stack_parm, parm, 1);
2952     }
2953 
2954   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2955      calls that pass values in multiple non-contiguous locations.  */
2956   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2957     {
2958       rtx mem;
2959 
2960       /* Note that we will be storing an integral number of words.
2961 	 So we have to be careful to ensure that we allocate an
2962 	 integral number of words.  We do this above when we call
2963 	 assign_stack_local if space was not allocated in the argument
2964 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2965 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2966 	 if it becomes a problem.  Exception is when BLKmode arrives
2967 	 with arguments not conforming to word_mode.  */
2968 
2969       if (data->stack_parm == 0)
2970 	;
2971       else if (GET_CODE (entry_parm) == PARALLEL)
2972 	;
2973       else
2974 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2975 
2976       mem = validize_mem (copy_rtx (stack_parm));
2977 
2978       /* Handle values in multiple non-contiguous locations.  */
2979       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2980 	emit_group_store (mem, entry_parm, data->passed_type, size);
2981       else if (GET_CODE (entry_parm) == PARALLEL)
2982 	{
2983 	  push_to_sequence2 (all->first_conversion_insn,
2984 			     all->last_conversion_insn);
2985 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2986 	  all->first_conversion_insn = get_insns ();
2987 	  all->last_conversion_insn = get_last_insn ();
2988 	  end_sequence ();
2989 	  in_conversion_seq = true;
2990 	}
2991 
2992       else if (size == 0)
2993 	;
2994 
2995       /* If SIZE is that of a mode no bigger than a word, just use
2996 	 that mode's store operation.  */
2997       else if (size <= UNITS_PER_WORD)
2998 	{
2999 	  unsigned int bits = size * BITS_PER_UNIT;
3000 	  machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3001 
3002 	  if (mode != BLKmode
3003 #ifdef BLOCK_REG_PADDING
3004 	      && (size == UNITS_PER_WORD
3005 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3006 		      != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3007 #endif
3008 	      )
3009 	    {
3010 	      rtx reg;
3011 
3012 	      /* We are really truncating a word_mode value containing
3013 		 SIZE bytes into a value of mode MODE.  If such an
3014 		 operation requires no actual instructions, we can refer
3015 		 to the value directly in mode MODE, otherwise we must
3016 		 start with the register in word_mode and explicitly
3017 		 convert it.  */
3018 	      if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3019 						 BITS_PER_WORD))
3020 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
3021 	      else
3022 		{
3023 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3024 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3025 		}
3026 	      emit_move_insn (change_address (mem, mode, 0), reg);
3027 	    }
3028 
3029 #ifdef BLOCK_REG_PADDING
3030 	  /* Storing the register in memory as a full word, as
3031 	     move_block_from_reg below would do, and then using the
3032 	     MEM in a smaller mode, has the effect of shifting right
3033 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
3034 	     shifting must be explicit.  */
3035 	  else if (!MEM_P (mem))
3036 	    {
3037 	      rtx x;
3038 
3039 	      /* If the assert below fails, we should have taken the
3040 		 mode != BLKmode path above, unless we have downward
3041 		 padding of smaller-than-word arguments on a machine
3042 		 with little-endian bytes, which would likely require
3043 		 additional changes to work correctly.  */
3044 	      gcc_checking_assert (BYTES_BIG_ENDIAN
3045 				   && (BLOCK_REG_PADDING (mode,
3046 							  data->passed_type, 1)
3047 				       == PAD_UPWARD));
3048 
3049 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3050 
3051 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3052 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3053 				NULL_RTX, 1);
3054 	      x = force_reg (word_mode, x);
3055 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3056 
3057 	      emit_move_insn (mem, x);
3058 	    }
3059 #endif
3060 
3061 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3062 	     machine must be aligned to the left before storing
3063 	     to memory.  Note that the previous test doesn't
3064 	     handle all cases (e.g. SIZE == 3).  */
3065 	  else if (size != UNITS_PER_WORD
3066 #ifdef BLOCK_REG_PADDING
3067 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3068 		       == PAD_DOWNWARD)
3069 #else
3070 		   && BYTES_BIG_ENDIAN
3071 #endif
3072 		   )
3073 	    {
3074 	      rtx tem, x;
3075 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3076 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3077 
3078 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3079 	      tem = change_address (mem, word_mode, 0);
3080 	      emit_move_insn (tem, x);
3081 	    }
3082 	  else
3083 	    move_block_from_reg (REGNO (entry_parm), mem,
3084 				 size_stored / UNITS_PER_WORD);
3085 	}
3086       else if (!MEM_P (mem))
3087 	{
3088 	  gcc_checking_assert (size > UNITS_PER_WORD);
3089 #ifdef BLOCK_REG_PADDING
3090 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3091 						  data->passed_type, 0)
3092 			       == PAD_UPWARD);
3093 #endif
3094 	  emit_move_insn (mem, entry_parm);
3095 	}
3096       else
3097 	move_block_from_reg (REGNO (entry_parm), mem,
3098 			     size_stored / UNITS_PER_WORD);
3099     }
3100   else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->passed_type))
3101     {
3102       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3103       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3104 		       BLOCK_OP_NORMAL);
3105       all->first_conversion_insn = get_insns ();
3106       all->last_conversion_insn = get_last_insn ();
3107       end_sequence ();
3108       in_conversion_seq = true;
3109     }
3110 
3111   if (target_reg)
3112     {
3113       if (!in_conversion_seq)
3114 	emit_move_insn (target_reg, stack_parm);
3115       else
3116 	{
3117 	  push_to_sequence2 (all->first_conversion_insn,
3118 			     all->last_conversion_insn);
3119 	  emit_move_insn (target_reg, stack_parm);
3120 	  all->first_conversion_insn = get_insns ();
3121 	  all->last_conversion_insn = get_last_insn ();
3122 	  end_sequence ();
3123 	}
3124       stack_parm = target_reg;
3125     }
3126 
3127   data->stack_parm = stack_parm;
3128   set_parm_rtl (parm, stack_parm);
3129 }
3130 
3131 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3132    parameter.  Get it there.  Perform all ABI specified conversions.  */
3133 
3134 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3135 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3136 		       struct assign_parm_data_one *data)
3137 {
3138   rtx parmreg, validated_mem;
3139   rtx equiv_stack_parm;
3140   machine_mode promoted_nominal_mode;
3141   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3142   bool did_conversion = false;
3143   bool need_conversion, moved;
3144   rtx rtl;
3145 
3146   /* Store the parm in a pseudoregister during the function, but we may
3147      need to do it in a wider mode.  Using 2 here makes the result
3148      consistent with promote_decl_mode and thus expand_expr_real_1.  */
3149   promoted_nominal_mode
3150     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3151 			     TREE_TYPE (current_function_decl), 2);
3152 
3153   parmreg = gen_reg_rtx (promoted_nominal_mode);
3154   if (!DECL_ARTIFICIAL (parm))
3155     mark_user_reg (parmreg);
3156 
3157   /* If this was an item that we received a pointer to,
3158      set rtl appropriately.  */
3159   if (data->passed_pointer)
3160     {
3161       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3162       set_mem_attributes (rtl, parm, 1);
3163     }
3164   else
3165     rtl = parmreg;
3166 
3167   assign_parm_remove_parallels (data);
3168 
3169   /* Copy the value into the register, thus bridging between
3170      assign_parm_find_data_types and expand_expr_real_1.  */
3171 
3172   equiv_stack_parm = data->stack_parm;
3173   validated_mem = validize_mem (copy_rtx (data->entry_parm));
3174 
3175   need_conversion = (data->nominal_mode != data->passed_mode
3176 		     || promoted_nominal_mode != data->promoted_mode);
3177   moved = false;
3178 
3179   if (need_conversion
3180       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3181       && data->nominal_mode == data->passed_mode
3182       && data->nominal_mode == GET_MODE (data->entry_parm))
3183     {
3184       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3185 	 mode, by the caller.  We now have to convert it to
3186 	 NOMINAL_MODE, if different.  However, PARMREG may be in
3187 	 a different mode than NOMINAL_MODE if it is being stored
3188 	 promoted.
3189 
3190 	 If ENTRY_PARM is a hard register, it might be in a register
3191 	 not valid for operating in its mode (e.g., an odd-numbered
3192 	 register for a DFmode).  In that case, moves are the only
3193 	 thing valid, so we can't do a convert from there.  This
3194 	 occurs when the calling sequence allow such misaligned
3195 	 usages.
3196 
3197 	 In addition, the conversion may involve a call, which could
3198 	 clobber parameters which haven't been copied to pseudo
3199 	 registers yet.
3200 
3201 	 First, we try to emit an insn which performs the necessary
3202 	 conversion.  We verify that this insn does not clobber any
3203 	 hard registers.  */
3204 
3205       enum insn_code icode;
3206       rtx op0, op1;
3207 
3208       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3209 			    unsignedp);
3210 
3211       op0 = parmreg;
3212       op1 = validated_mem;
3213       if (icode != CODE_FOR_nothing
3214 	  && insn_operand_matches (icode, 0, op0)
3215 	  && insn_operand_matches (icode, 1, op1))
3216 	{
3217 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3218 	  rtx_insn *insn, *insns;
3219 	  rtx t = op1;
3220 	  HARD_REG_SET hardregs;
3221 
3222 	  start_sequence ();
3223 	  /* If op1 is a hard register that is likely spilled, first
3224 	     force it into a pseudo, otherwise combiner might extend
3225 	     its lifetime too much.  */
3226 	  if (GET_CODE (t) == SUBREG)
3227 	    t = SUBREG_REG (t);
3228 	  if (REG_P (t)
3229 	      && HARD_REGISTER_P (t)
3230 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3231 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3232 	    {
3233 	      t = gen_reg_rtx (GET_MODE (op1));
3234 	      emit_move_insn (t, op1);
3235 	    }
3236 	  else
3237 	    t = op1;
3238 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3239 					   data->passed_mode, unsignedp);
3240 	  emit_insn (pat);
3241 	  insns = get_insns ();
3242 
3243 	  moved = true;
3244 	  CLEAR_HARD_REG_SET (hardregs);
3245 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3246 	    {
3247 	      if (INSN_P (insn))
3248 		note_stores (PATTERN (insn), record_hard_reg_sets,
3249 			     &hardregs);
3250 	      if (!hard_reg_set_empty_p (hardregs))
3251 		moved = false;
3252 	    }
3253 
3254 	  end_sequence ();
3255 
3256 	  if (moved)
3257 	    {
3258 	      emit_insn (insns);
3259 	      if (equiv_stack_parm != NULL_RTX)
3260 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3261 						  equiv_stack_parm);
3262 	    }
3263 	}
3264     }
3265 
3266   if (moved)
3267     /* Nothing to do.  */
3268     ;
3269   else if (need_conversion)
3270     {
3271       /* We did not have an insn to convert directly, or the sequence
3272 	 generated appeared unsafe.  We must first copy the parm to a
3273 	 pseudo reg, and save the conversion until after all
3274 	 parameters have been moved.  */
3275 
3276       int save_tree_used;
3277       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3278 
3279       emit_move_insn (tempreg, validated_mem);
3280 
3281       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3282       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3283 
3284       if (partial_subreg_p (tempreg)
3285 	  && GET_MODE (tempreg) == data->nominal_mode
3286 	  && REG_P (SUBREG_REG (tempreg))
3287 	  && data->nominal_mode == data->passed_mode
3288 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3289 	{
3290 	  /* The argument is already sign/zero extended, so note it
3291 	     into the subreg.  */
3292 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3293 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3294 	}
3295 
3296       /* TREE_USED gets set erroneously during expand_assignment.  */
3297       save_tree_used = TREE_USED (parm);
3298       SET_DECL_RTL (parm, rtl);
3299       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3300       SET_DECL_RTL (parm, NULL_RTX);
3301       TREE_USED (parm) = save_tree_used;
3302       all->first_conversion_insn = get_insns ();
3303       all->last_conversion_insn = get_last_insn ();
3304       end_sequence ();
3305 
3306       did_conversion = true;
3307     }
3308   else
3309     emit_move_insn (parmreg, validated_mem);
3310 
3311   /* If we were passed a pointer but the actual value can safely live
3312      in a register, retrieve it and use it directly.  */
3313   if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3314     {
3315       /* We can't use nominal_mode, because it will have been set to
3316 	 Pmode above.  We must use the actual mode of the parm.  */
3317       if (use_register_for_decl (parm))
3318 	{
3319 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3320 	  mark_user_reg (parmreg);
3321 	}
3322       else
3323 	{
3324 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3325 					    TYPE_MODE (TREE_TYPE (parm)),
3326 					    TYPE_ALIGN (TREE_TYPE (parm)));
3327 	  parmreg
3328 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3329 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3330 				  align);
3331 	  set_mem_attributes (parmreg, parm, 1);
3332 	}
3333 
3334       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3335 	 the debug info in case it is not legitimate.  */
3336       if (GET_MODE (parmreg) != GET_MODE (rtl))
3337 	{
3338 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3339 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3340 
3341 	  push_to_sequence2 (all->first_conversion_insn,
3342 			     all->last_conversion_insn);
3343 	  emit_move_insn (tempreg, rtl);
3344 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3345 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3346 			  tempreg);
3347 	  all->first_conversion_insn = get_insns ();
3348 	  all->last_conversion_insn = get_last_insn ();
3349 	  end_sequence ();
3350 
3351 	  did_conversion = true;
3352 	}
3353       else
3354 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3355 
3356       rtl = parmreg;
3357 
3358       /* STACK_PARM is the pointer, not the parm, and PARMREG is
3359 	 now the parm.  */
3360       data->stack_parm = NULL;
3361     }
3362 
3363   set_parm_rtl (parm, rtl);
3364 
3365   /* Mark the register as eliminable if we did no conversion and it was
3366      copied from memory at a fixed offset, and the arg pointer was not
3367      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3368      offset formed an invalid address, such memory-equivalences as we
3369      make here would screw up life analysis for it.  */
3370   if (data->nominal_mode == data->passed_mode
3371       && !did_conversion
3372       && data->stack_parm != 0
3373       && MEM_P (data->stack_parm)
3374       && data->locate.offset.var == 0
3375       && reg_mentioned_p (virtual_incoming_args_rtx,
3376 			  XEXP (data->stack_parm, 0)))
3377     {
3378       rtx_insn *linsn = get_last_insn ();
3379       rtx_insn *sinsn;
3380       rtx set;
3381 
3382       /* Mark complex types separately.  */
3383       if (GET_CODE (parmreg) == CONCAT)
3384 	{
3385 	  scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3386 	  int regnor = REGNO (XEXP (parmreg, 0));
3387 	  int regnoi = REGNO (XEXP (parmreg, 1));
3388 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3389 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3390 					  GET_MODE_SIZE (submode));
3391 
3392 	  /* Scan backwards for the set of the real and
3393 	     imaginary parts.  */
3394 	  for (sinsn = linsn; sinsn != 0;
3395 	       sinsn = prev_nonnote_insn (sinsn))
3396 	    {
3397 	      set = single_set (sinsn);
3398 	      if (set == 0)
3399 		continue;
3400 
3401 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3402 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3403 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3404 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3405 	    }
3406 	}
3407       else
3408 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3409     }
3410 
3411   /* For pointer data type, suggest pointer register.  */
3412   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3413     mark_reg_pointer (parmreg,
3414 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3415 }
3416 
3417 /* A subroutine of assign_parms.  Allocate stack space to hold the current
3418    parameter.  Get it there.  Perform all ABI specified conversions.  */
3419 
3420 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3421 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3422 		         struct assign_parm_data_one *data)
3423 {
3424   /* Value must be stored in the stack slot STACK_PARM during function
3425      execution.  */
3426   bool to_conversion = false;
3427 
3428   assign_parm_remove_parallels (data);
3429 
3430   if (data->promoted_mode != data->nominal_mode)
3431     {
3432       /* Conversion is required.  */
3433       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3434 
3435       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3436 
3437       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3438       to_conversion = true;
3439 
3440       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3441 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3442 
3443       if (data->stack_parm)
3444 	{
3445 	  poly_int64 offset
3446 	    = subreg_lowpart_offset (data->nominal_mode,
3447 				     GET_MODE (data->stack_parm));
3448 	  /* ??? This may need a big-endian conversion on sparc64.  */
3449 	  data->stack_parm
3450 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3451 	  if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3452 	    set_mem_offset (data->stack_parm,
3453 			    MEM_OFFSET (data->stack_parm) + offset);
3454 	}
3455     }
3456 
3457   if (data->entry_parm != data->stack_parm)
3458     {
3459       rtx src, dest;
3460 
3461       if (data->stack_parm == 0)
3462 	{
3463 	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3464 					    GET_MODE (data->entry_parm),
3465 					    TYPE_ALIGN (data->passed_type));
3466 	  data->stack_parm
3467 	    = assign_stack_local (GET_MODE (data->entry_parm),
3468 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3469 				  align);
3470 	  set_mem_attributes (data->stack_parm, parm, 1);
3471 	}
3472 
3473       dest = validize_mem (copy_rtx (data->stack_parm));
3474       src = validize_mem (copy_rtx (data->entry_parm));
3475 
3476       if (TYPE_EMPTY_P (data->passed_type))
3477 	/* Empty types don't really need to be copied.  */;
3478       else if (MEM_P (src))
3479 	{
3480 	  /* Use a block move to handle potentially misaligned entry_parm.  */
3481 	  if (!to_conversion)
3482 	    push_to_sequence2 (all->first_conversion_insn,
3483 			       all->last_conversion_insn);
3484 	  to_conversion = true;
3485 
3486 	  emit_block_move (dest, src,
3487 			   GEN_INT (int_size_in_bytes (data->passed_type)),
3488 			   BLOCK_OP_NORMAL);
3489 	}
3490       else
3491 	{
3492 	  if (!REG_P (src))
3493 	    src = force_reg (GET_MODE (src), src);
3494 	  emit_move_insn (dest, src);
3495 	}
3496     }
3497 
3498   if (to_conversion)
3499     {
3500       all->first_conversion_insn = get_insns ();
3501       all->last_conversion_insn = get_last_insn ();
3502       end_sequence ();
3503     }
3504 
3505   set_parm_rtl (parm, data->stack_parm);
3506 }
3507 
3508 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3509    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3510 
3511 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3512 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3513 			      vec<tree> fnargs)
3514 {
3515   tree parm;
3516   tree orig_fnargs = all->orig_fnargs;
3517   unsigned i = 0;
3518 
3519   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3520     {
3521       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3522 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3523 	{
3524 	  rtx tmp, real, imag;
3525 	  scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3526 
3527 	  real = DECL_RTL (fnargs[i]);
3528 	  imag = DECL_RTL (fnargs[i + 1]);
3529 	  if (inner != GET_MODE (real))
3530 	    {
3531 	      real = gen_lowpart_SUBREG (inner, real);
3532 	      imag = gen_lowpart_SUBREG (inner, imag);
3533 	    }
3534 
3535 	  if (TREE_ADDRESSABLE (parm))
3536 	    {
3537 	      rtx rmem, imem;
3538 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3539 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3540 						DECL_MODE (parm),
3541 						TYPE_ALIGN (TREE_TYPE (parm)));
3542 
3543 	      /* split_complex_arg put the real and imag parts in
3544 		 pseudos.  Move them to memory.  */
3545 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3546 	      set_mem_attributes (tmp, parm, 1);
3547 	      rmem = adjust_address_nv (tmp, inner, 0);
3548 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3549 	      push_to_sequence2 (all->first_conversion_insn,
3550 				 all->last_conversion_insn);
3551 	      emit_move_insn (rmem, real);
3552 	      emit_move_insn (imem, imag);
3553 	      all->first_conversion_insn = get_insns ();
3554 	      all->last_conversion_insn = get_last_insn ();
3555 	      end_sequence ();
3556 	    }
3557 	  else
3558 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3559 	  set_parm_rtl (parm, tmp);
3560 
3561 	  real = DECL_INCOMING_RTL (fnargs[i]);
3562 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3563 	  if (inner != GET_MODE (real))
3564 	    {
3565 	      real = gen_lowpart_SUBREG (inner, real);
3566 	      imag = gen_lowpart_SUBREG (inner, imag);
3567 	    }
3568 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3569 	  set_decl_incoming_rtl (parm, tmp, false);
3570 	  i++;
3571 	}
3572     }
3573 }
3574 
3575 /* Load bounds of PARM from bounds table.  */
3576 static void
assign_parm_load_bounds(struct assign_parm_data_one * data,tree parm,rtx entry,unsigned bound_no)3577 assign_parm_load_bounds (struct assign_parm_data_one *data,
3578 			 tree parm,
3579 			 rtx entry,
3580 			 unsigned bound_no)
3581 {
3582   bitmap_iterator bi;
3583   unsigned i, offs = 0;
3584   int bnd_no = -1;
3585   rtx slot = NULL, ptr = NULL;
3586 
3587   if (parm)
3588     {
3589       bitmap slots;
3590       bitmap_obstack_initialize (NULL);
3591       slots = BITMAP_ALLOC (NULL);
3592       chkp_find_bound_slots (TREE_TYPE (parm), slots);
3593       EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3594 	{
3595 	  if (bound_no)
3596 	    bound_no--;
3597 	  else
3598 	    {
3599 	      bnd_no = i;
3600 	      break;
3601 	    }
3602 	}
3603       BITMAP_FREE (slots);
3604       bitmap_obstack_release (NULL);
3605     }
3606 
3607   /* We may have bounds not associated with any pointer.  */
3608   if (bnd_no != -1)
3609     offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3610 
3611   /* Find associated pointer.  */
3612   if (bnd_no == -1)
3613     {
3614       /* If bounds are not associated with any bounds,
3615 	 then it is passed in a register or special slot.  */
3616       gcc_assert (data->entry_parm);
3617       ptr = const0_rtx;
3618     }
3619   else if (MEM_P (entry))
3620     slot = adjust_address (entry, Pmode, offs);
3621   else if (REG_P (entry))
3622     ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3623   else if (GET_CODE (entry) == PARALLEL)
3624     ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3625   else
3626     gcc_unreachable ();
3627   data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3628 							data->entry_parm);
3629 }
3630 
3631 /* Assign RTL expressions to the function's bounds parameters BNDARGS.  */
3632 
3633 static void
assign_bounds(vec<bounds_parm_data> & bndargs,struct assign_parm_data_all & all,bool assign_regs,bool assign_special,bool assign_bt)3634 assign_bounds (vec<bounds_parm_data> &bndargs,
3635 	       struct assign_parm_data_all &all,
3636 	       bool assign_regs, bool assign_special,
3637 	       bool assign_bt)
3638 {
3639   unsigned i, pass;
3640   bounds_parm_data *pbdata;
3641 
3642   if (!bndargs.exists ())
3643     return;
3644 
3645   /* We make few passes to store input bounds.  Firstly handle bounds
3646      passed in registers.  After that we load bounds passed in special
3647      slots.  Finally we load bounds from Bounds Table.  */
3648   for (pass = 0; pass < 3; pass++)
3649     FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3650       {
3651 	/* Pass 0 => regs only.  */
3652 	if (pass == 0
3653 	    && (!assign_regs
3654 		||(!pbdata->parm_data.entry_parm
3655 		   || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3656 	  continue;
3657 	/* Pass 1 => slots only.  */
3658 	else if (pass == 1
3659 		 && (!assign_special
3660 		     || (!pbdata->parm_data.entry_parm
3661 			 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3662 	  continue;
3663 	/* Pass 2 => BT only.  */
3664 	else if (pass == 2
3665 		 && (!assign_bt
3666 		     || pbdata->parm_data.entry_parm))
3667 	  continue;
3668 
3669 	if (!pbdata->parm_data.entry_parm
3670 	    || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3671 	  assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3672 				   pbdata->ptr_entry, pbdata->bound_no);
3673 
3674 	set_decl_incoming_rtl (pbdata->bounds_parm,
3675 			       pbdata->parm_data.entry_parm, false);
3676 
3677 	if (assign_parm_setup_block_p (&pbdata->parm_data))
3678 	  assign_parm_setup_block (&all, pbdata->bounds_parm,
3679 				   &pbdata->parm_data);
3680 	else if (pbdata->parm_data.passed_pointer
3681 		 || use_register_for_decl (pbdata->bounds_parm))
3682 	  assign_parm_setup_reg (&all, pbdata->bounds_parm,
3683 				 &pbdata->parm_data);
3684 	else
3685 	  assign_parm_setup_stack (&all, pbdata->bounds_parm,
3686 				   &pbdata->parm_data);
3687       }
3688 }
3689 
3690 /* Assign RTL expressions to the function's parameters.  This may involve
3691    copying them into registers and using those registers as the DECL_RTL.  */
3692 
3693 static void
assign_parms(tree fndecl)3694 assign_parms (tree fndecl)
3695 {
3696   struct assign_parm_data_all all;
3697   tree parm;
3698   vec<tree> fnargs;
3699   unsigned i, bound_no = 0;
3700   tree last_arg = NULL;
3701   rtx last_arg_entry = NULL;
3702   vec<bounds_parm_data> bndargs = vNULL;
3703   bounds_parm_data bdata;
3704 
3705   crtl->args.internal_arg_pointer
3706     = targetm.calls.internal_arg_pointer ();
3707 
3708   assign_parms_initialize_all (&all);
3709   fnargs = assign_parms_augmented_arg_list (&all);
3710 
3711   FOR_EACH_VEC_ELT (fnargs, i, parm)
3712     {
3713       struct assign_parm_data_one data;
3714 
3715       /* Extract the type of PARM; adjust it according to ABI.  */
3716       assign_parm_find_data_types (&all, parm, &data);
3717 
3718       /* Early out for errors and void parameters.  */
3719       if (data.passed_mode == VOIDmode)
3720 	{
3721 	  SET_DECL_RTL (parm, const0_rtx);
3722 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3723 	  continue;
3724 	}
3725 
3726       /* Estimate stack alignment from parameter alignment.  */
3727       if (SUPPORTS_STACK_ALIGNMENT)
3728         {
3729           unsigned int align
3730 	    = targetm.calls.function_arg_boundary (data.promoted_mode,
3731 						   data.passed_type);
3732 	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3733 				     align);
3734 	  if (TYPE_ALIGN (data.nominal_type) > align)
3735 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3736 				       TYPE_MODE (data.nominal_type),
3737 				       TYPE_ALIGN (data.nominal_type));
3738 	  if (crtl->stack_alignment_estimated < align)
3739 	    {
3740 	      gcc_assert (!crtl->stack_realign_processed);
3741 	      crtl->stack_alignment_estimated = align;
3742 	    }
3743 	}
3744 
3745       /* Find out where the parameter arrives in this function.  */
3746       assign_parm_find_entry_rtl (&all, &data);
3747 
3748       /* Find out where stack space for this parameter might be.  */
3749       if (assign_parm_is_stack_parm (&all, &data))
3750 	{
3751 	  assign_parm_find_stack_rtl (parm, &data);
3752 	  assign_parm_adjust_entry_rtl (&data);
3753 	  /* For arguments that occupy no space in the parameter
3754 	     passing area, have non-zero size and have address taken,
3755 	     force creation of a stack slot so that they have distinct
3756 	     address from other parameters.  */
3757 	  if (TYPE_EMPTY_P (data.passed_type)
3758 	      && TREE_ADDRESSABLE (parm)
3759 	      && data.entry_parm == data.stack_parm
3760 	      && MEM_P (data.entry_parm)
3761 	      && int_size_in_bytes (data.passed_type))
3762 	    data.stack_parm = NULL_RTX;
3763 	}
3764       if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3765 	{
3766 	  /* Remember where last non bounds arg was passed in case
3767 	     we have to load associated bounds for it from Bounds
3768 	     Table.  */
3769 	  last_arg = parm;
3770 	  last_arg_entry = data.entry_parm;
3771 	  bound_no = 0;
3772 	}
3773       /* Record permanently how this parm was passed.  */
3774       if (data.passed_pointer)
3775 	{
3776 	  rtx incoming_rtl
3777 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3778 			   data.entry_parm);
3779 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3780 	}
3781       else
3782 	set_decl_incoming_rtl (parm, data.entry_parm, false);
3783 
3784       assign_parm_adjust_stack_rtl (&data);
3785 
3786       /* Bounds should be loaded in the particular order to
3787 	 have registers allocated correctly.  Collect info about
3788 	 input bounds and load them later.  */
3789       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3790 	{
3791 	  /* Expect bounds in instrumented functions only.  */
3792 	  gcc_assert (chkp_function_instrumented_p (fndecl));
3793 
3794 	  bdata.parm_data = data;
3795 	  bdata.bounds_parm = parm;
3796 	  bdata.ptr_parm = last_arg;
3797 	  bdata.ptr_entry = last_arg_entry;
3798 	  bdata.bound_no = bound_no;
3799 	  bndargs.safe_push (bdata);
3800 	}
3801       else
3802 	{
3803 	  if (assign_parm_setup_block_p (&data))
3804 	    assign_parm_setup_block (&all, parm, &data);
3805 	  else if (data.passed_pointer || use_register_for_decl (parm))
3806 	    assign_parm_setup_reg (&all, parm, &data);
3807 	  else
3808 	    assign_parm_setup_stack (&all, parm, &data);
3809 	}
3810 
3811       if (cfun->stdarg && !DECL_CHAIN (parm))
3812 	{
3813 	  int pretend_bytes = 0;
3814 
3815 	  assign_parms_setup_varargs (&all, &data, false);
3816 
3817 	  if (chkp_function_instrumented_p (fndecl))
3818 	    {
3819 	      /* We expect this is the last parm.  Otherwise it is wrong
3820 		 to assign bounds right now.  */
3821 	      gcc_assert (i == (fnargs.length () - 1));
3822 	      assign_bounds (bndargs, all, true, false, false);
3823 	      targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3824 							  data.promoted_mode,
3825 							  data.passed_type,
3826 							  &pretend_bytes,
3827 							  false);
3828 	      assign_bounds (bndargs, all, false, true, true);
3829 	      bndargs.release ();
3830 	    }
3831 	}
3832 
3833       /* Update info on where next arg arrives in registers.  */
3834       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3835 					  data.passed_type, data.named_arg);
3836 
3837       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3838 	bound_no++;
3839     }
3840 
3841   assign_bounds (bndargs, all, true, true, true);
3842   bndargs.release ();
3843 
3844   if (targetm.calls.split_complex_arg)
3845     assign_parms_unsplit_complex (&all, fnargs);
3846 
3847   fnargs.release ();
3848 
3849   /* Output all parameter conversion instructions (possibly including calls)
3850      now that all parameters have been copied out of hard registers.  */
3851   emit_insn (all.first_conversion_insn);
3852 
3853   /* Estimate reload stack alignment from scalar return mode.  */
3854   if (SUPPORTS_STACK_ALIGNMENT)
3855     {
3856       if (DECL_RESULT (fndecl))
3857 	{
3858 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3859 	  machine_mode mode = TYPE_MODE (type);
3860 
3861 	  if (mode != BLKmode
3862 	      && mode != VOIDmode
3863 	      && !AGGREGATE_TYPE_P (type))
3864 	    {
3865 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3866 	      if (crtl->stack_alignment_estimated < align)
3867 		{
3868 		  gcc_assert (!crtl->stack_realign_processed);
3869 		  crtl->stack_alignment_estimated = align;
3870 		}
3871 	    }
3872 	}
3873     }
3874 
3875   /* If we are receiving a struct value address as the first argument, set up
3876      the RTL for the function result. As this might require code to convert
3877      the transmitted address to Pmode, we do this here to ensure that possible
3878      preliminary conversions of the address have been emitted already.  */
3879   if (all.function_result_decl)
3880     {
3881       tree result = DECL_RESULT (current_function_decl);
3882       rtx addr = DECL_RTL (all.function_result_decl);
3883       rtx x;
3884 
3885       if (DECL_BY_REFERENCE (result))
3886 	{
3887 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3888 	  x = addr;
3889 	}
3890       else
3891 	{
3892 	  SET_DECL_VALUE_EXPR (result,
3893 			       build1 (INDIRECT_REF, TREE_TYPE (result),
3894 				       all.function_result_decl));
3895 	  addr = convert_memory_address (Pmode, addr);
3896 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3897 	  set_mem_attributes (x, result, 1);
3898 	}
3899 
3900       DECL_HAS_VALUE_EXPR_P (result) = 1;
3901 
3902       set_parm_rtl (result, x);
3903     }
3904 
3905   /* We have aligned all the args, so add space for the pretend args.  */
3906   crtl->args.pretend_args_size = all.pretend_args_size;
3907   all.stack_args_size.constant += all.extra_pretend_bytes;
3908   crtl->args.size = all.stack_args_size.constant;
3909 
3910   /* Adjust function incoming argument size for alignment and
3911      minimum length.  */
3912 
3913   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3914   crtl->args.size = aligned_upper_bound (crtl->args.size,
3915 					 PARM_BOUNDARY / BITS_PER_UNIT);
3916 
3917   if (ARGS_GROW_DOWNWARD)
3918     {
3919       crtl->args.arg_offset_rtx
3920 	= (all.stack_args_size.var == 0
3921 	   ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3922 	   : expand_expr (size_diffop (all.stack_args_size.var,
3923 				       size_int (-all.stack_args_size.constant)),
3924 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
3925     }
3926   else
3927     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3928 
3929   /* See how many bytes, if any, of its args a function should try to pop
3930      on return.  */
3931 
3932   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3933 							 TREE_TYPE (fndecl),
3934 							 crtl->args.size);
3935 
3936   /* For stdarg.h function, save info about
3937      regs and stack space used by the named args.  */
3938 
3939   crtl->args.info = all.args_so_far_v;
3940 
3941   /* Set the rtx used for the function return value.  Put this in its
3942      own variable so any optimizers that need this information don't have
3943      to include tree.h.  Do this here so it gets done when an inlined
3944      function gets output.  */
3945 
3946   crtl->return_rtx
3947     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3948        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3949 
3950   /* If scalar return value was computed in a pseudo-reg, or was a named
3951      return value that got dumped to the stack, copy that to the hard
3952      return register.  */
3953   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3954     {
3955       tree decl_result = DECL_RESULT (fndecl);
3956       rtx decl_rtl = DECL_RTL (decl_result);
3957 
3958       if (REG_P (decl_rtl)
3959 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3960 	  : DECL_REGISTER (decl_result))
3961 	{
3962 	  rtx real_decl_rtl;
3963 
3964 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3965 							fndecl, true);
3966 	  if (chkp_function_instrumented_p (fndecl))
3967 	    crtl->return_bnd
3968 	      = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3969 							  fndecl, true);
3970 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3971 	  /* The delay slot scheduler assumes that crtl->return_rtx
3972 	     holds the hard register containing the return value, not a
3973 	     temporary pseudo.  */
3974 	  crtl->return_rtx = real_decl_rtl;
3975 	}
3976     }
3977 }
3978 
3979 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3980    For all seen types, gimplify their sizes.  */
3981 
3982 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3983 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3984 {
3985   tree t = *tp;
3986 
3987   *walk_subtrees = 0;
3988   if (TYPE_P (t))
3989     {
3990       if (POINTER_TYPE_P (t))
3991 	*walk_subtrees = 1;
3992       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3993 	       && !TYPE_SIZES_GIMPLIFIED (t))
3994 	{
3995 	  gimplify_type_sizes (t, (gimple_seq *) data);
3996 	  *walk_subtrees = 1;
3997 	}
3998     }
3999 
4000   return NULL;
4001 }
4002 
4003 /* Gimplify the parameter list for current_function_decl.  This involves
4004    evaluating SAVE_EXPRs of variable sized parameters and generating code
4005    to implement callee-copies reference parameters.  Returns a sequence of
4006    statements to add to the beginning of the function.  */
4007 
4008 gimple_seq
gimplify_parameters(gimple_seq * cleanup)4009 gimplify_parameters (gimple_seq *cleanup)
4010 {
4011   struct assign_parm_data_all all;
4012   tree parm;
4013   gimple_seq stmts = NULL;
4014   vec<tree> fnargs;
4015   unsigned i;
4016 
4017   assign_parms_initialize_all (&all);
4018   fnargs = assign_parms_augmented_arg_list (&all);
4019 
4020   FOR_EACH_VEC_ELT (fnargs, i, parm)
4021     {
4022       struct assign_parm_data_one data;
4023 
4024       /* Extract the type of PARM; adjust it according to ABI.  */
4025       assign_parm_find_data_types (&all, parm, &data);
4026 
4027       /* Early out for errors and void parameters.  */
4028       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4029 	continue;
4030 
4031       /* Update info on where next arg arrives in registers.  */
4032       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4033 					  data.passed_type, data.named_arg);
4034 
4035       /* ??? Once upon a time variable_size stuffed parameter list
4036 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
4037 	 turned out to be less than manageable in the gimple world.
4038 	 Now we have to hunt them down ourselves.  */
4039       walk_tree_without_duplicates (&data.passed_type,
4040 				    gimplify_parm_type, &stmts);
4041 
4042       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4043 	{
4044 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4045 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4046 	}
4047 
4048       if (data.passed_pointer)
4049 	{
4050           tree type = TREE_TYPE (data.passed_type);
4051 	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4052 				       type, data.named_arg))
4053 	    {
4054 	      tree local, t;
4055 
4056 	      /* For constant-sized objects, this is trivial; for
4057 		 variable-sized objects, we have to play games.  */
4058 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4059 		  && !(flag_stack_check == GENERIC_STACK_CHECK
4060 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
4061 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
4062 		{
4063 		  local = create_tmp_var (type, get_name (parm));
4064 		  DECL_IGNORED_P (local) = 0;
4065 		  /* If PARM was addressable, move that flag over
4066 		     to the local copy, as its address will be taken,
4067 		     not the PARMs.  Keep the parms address taken
4068 		     as we'll query that flag during gimplification.  */
4069 		  if (TREE_ADDRESSABLE (parm))
4070 		    TREE_ADDRESSABLE (local) = 1;
4071 		  else if (TREE_CODE (type) == COMPLEX_TYPE
4072 			   || TREE_CODE (type) == VECTOR_TYPE)
4073 		    DECL_GIMPLE_REG_P (local) = 1;
4074 
4075 		  if (!is_gimple_reg (local)
4076 		      && flag_stack_reuse != SR_NONE)
4077 		    {
4078 		      tree clobber = build_constructor (type, NULL);
4079 		      gimple *clobber_stmt;
4080 		      TREE_THIS_VOLATILE (clobber) = 1;
4081 		      clobber_stmt = gimple_build_assign (local, clobber);
4082 		      gimple_seq_add_stmt (cleanup, clobber_stmt);
4083 		    }
4084 		}
4085 	      else
4086 		{
4087 		  tree ptr_type, addr;
4088 
4089 		  ptr_type = build_pointer_type (type);
4090 		  addr = create_tmp_reg (ptr_type, get_name (parm));
4091 		  DECL_IGNORED_P (addr) = 0;
4092 		  local = build_fold_indirect_ref (addr);
4093 
4094 		  t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
4095 					      DECL_ALIGN (parm),
4096 					      max_int_size_in_bytes (type));
4097 		  /* The call has been built for a variable-sized object.  */
4098 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
4099 		  t = fold_convert (ptr_type, t);
4100 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4101 		  gimplify_and_add (t, &stmts);
4102 		}
4103 
4104 	      gimplify_assign (local, parm, &stmts);
4105 
4106 	      SET_DECL_VALUE_EXPR (parm, local);
4107 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
4108 	    }
4109 	}
4110     }
4111 
4112   fnargs.release ();
4113 
4114   return stmts;
4115 }
4116 
4117 /* Compute the size and offset from the start of the stacked arguments for a
4118    parm passed in mode PASSED_MODE and with type TYPE.
4119 
4120    INITIAL_OFFSET_PTR points to the current offset into the stacked
4121    arguments.
4122 
4123    The starting offset and size for this parm are returned in
4124    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
4125    nonzero, the offset is that of stack slot, which is returned in
4126    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
4127    padding required from the initial offset ptr to the stack slot.
4128 
4129    IN_REGS is nonzero if the argument will be passed in registers.  It will
4130    never be set if REG_PARM_STACK_SPACE is not defined.
4131 
4132    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4133    for arguments which are passed in registers.
4134 
4135    FNDECL is the function in which the argument was defined.
4136 
4137    There are two types of rounding that are done.  The first, controlled by
4138    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4139    argument list to be aligned to the specific boundary (in bits).  This
4140    rounding affects the initial and starting offsets, but not the argument
4141    size.
4142 
4143    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4144    optionally rounds the size of the parm to PARM_BOUNDARY.  The
4145    initial offset is not affected by this rounding, while the size always
4146    is and the starting offset may be.  */
4147 
4148 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4149     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4150     callers pass in the total size of args so far as
4151     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
4152 
4153 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)4154 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4155 		     int reg_parm_stack_space, int partial,
4156 		     tree fndecl ATTRIBUTE_UNUSED,
4157 		     struct args_size *initial_offset_ptr,
4158 		     struct locate_and_pad_arg_data *locate)
4159 {
4160   tree sizetree;
4161   pad_direction where_pad;
4162   unsigned int boundary, round_boundary;
4163   int part_size_in_regs;
4164 
4165   /* If we have found a stack parm before we reach the end of the
4166      area reserved for registers, skip that area.  */
4167   if (! in_regs)
4168     {
4169       if (reg_parm_stack_space > 0)
4170 	{
4171 	  if (initial_offset_ptr->var
4172 	      || !ordered_p (initial_offset_ptr->constant,
4173 			     reg_parm_stack_space))
4174 	    {
4175 	      initial_offset_ptr->var
4176 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4177 			      ssize_int (reg_parm_stack_space));
4178 	      initial_offset_ptr->constant = 0;
4179 	    }
4180 	  else
4181 	    initial_offset_ptr->constant
4182 	      = ordered_max (initial_offset_ptr->constant,
4183 			     reg_parm_stack_space);
4184 	}
4185     }
4186 
4187   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4188 
4189   sizetree = (type
4190 	      ? arg_size_in_bytes (type)
4191 	      : size_int (GET_MODE_SIZE (passed_mode)));
4192   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4193   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4194   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4195 							      type);
4196   locate->where_pad = where_pad;
4197 
4198   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
4199   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4200     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4201 
4202   locate->boundary = boundary;
4203 
4204   if (SUPPORTS_STACK_ALIGNMENT)
4205     {
4206       /* stack_alignment_estimated can't change after stack has been
4207 	 realigned.  */
4208       if (crtl->stack_alignment_estimated < boundary)
4209         {
4210           if (!crtl->stack_realign_processed)
4211 	    crtl->stack_alignment_estimated = boundary;
4212 	  else
4213 	    {
4214 	      /* If stack is realigned and stack alignment value
4215 		 hasn't been finalized, it is OK not to increase
4216 		 stack_alignment_estimated.  The bigger alignment
4217 		 requirement is recorded in stack_alignment_needed
4218 		 below.  */
4219 	      gcc_assert (!crtl->stack_realign_finalized
4220 			  && crtl->stack_realign_needed);
4221 	    }
4222 	}
4223     }
4224 
4225   /* Remember if the outgoing parameter requires extra alignment on the
4226      calling function side.  */
4227   if (crtl->stack_alignment_needed < boundary)
4228     crtl->stack_alignment_needed = boundary;
4229   if (crtl->preferred_stack_boundary < boundary)
4230     crtl->preferred_stack_boundary = boundary;
4231 
4232   if (ARGS_GROW_DOWNWARD)
4233     {
4234       locate->slot_offset.constant = -initial_offset_ptr->constant;
4235       if (initial_offset_ptr->var)
4236 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4237 					      initial_offset_ptr->var);
4238 
4239       {
4240 	tree s2 = sizetree;
4241 	if (where_pad != PAD_NONE
4242 	    && (!tree_fits_uhwi_p (sizetree)
4243 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4244 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4245 	SUB_PARM_SIZE (locate->slot_offset, s2);
4246       }
4247 
4248       locate->slot_offset.constant += part_size_in_regs;
4249 
4250       if (!in_regs || reg_parm_stack_space > 0)
4251 	pad_to_arg_alignment (&locate->slot_offset, boundary,
4252 			      &locate->alignment_pad);
4253 
4254       locate->size.constant = (-initial_offset_ptr->constant
4255 			       - locate->slot_offset.constant);
4256       if (initial_offset_ptr->var)
4257 	locate->size.var = size_binop (MINUS_EXPR,
4258 				       size_binop (MINUS_EXPR,
4259 						   ssize_int (0),
4260 						   initial_offset_ptr->var),
4261 				       locate->slot_offset.var);
4262 
4263       /* Pad_below needs the pre-rounded size to know how much to pad
4264 	 below.  */
4265       locate->offset = locate->slot_offset;
4266       if (where_pad == PAD_DOWNWARD)
4267 	pad_below (&locate->offset, passed_mode, sizetree);
4268 
4269     }
4270   else
4271     {
4272       if (!in_regs || reg_parm_stack_space > 0)
4273 	pad_to_arg_alignment (initial_offset_ptr, boundary,
4274 			      &locate->alignment_pad);
4275       locate->slot_offset = *initial_offset_ptr;
4276 
4277 #ifdef PUSH_ROUNDING
4278       if (passed_mode != BLKmode)
4279 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4280 #endif
4281 
4282       /* Pad_below needs the pre-rounded size to know how much to pad below
4283 	 so this must be done before rounding up.  */
4284       locate->offset = locate->slot_offset;
4285       if (where_pad == PAD_DOWNWARD)
4286 	pad_below (&locate->offset, passed_mode, sizetree);
4287 
4288       if (where_pad != PAD_NONE
4289 	  && (!tree_fits_uhwi_p (sizetree)
4290 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4291 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4292 
4293       ADD_PARM_SIZE (locate->size, sizetree);
4294 
4295       locate->size.constant -= part_size_in_regs;
4296     }
4297 
4298   locate->offset.constant
4299     += targetm.calls.function_arg_offset (passed_mode, type);
4300 }
4301 
4302 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4303    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4304 
4305 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4306 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4307 		      struct args_size *alignment_pad)
4308 {
4309   tree save_var = NULL_TREE;
4310   poly_int64 save_constant = 0;
4311   int boundary_in_bytes = boundary / BITS_PER_UNIT;
4312   poly_int64 sp_offset = STACK_POINTER_OFFSET;
4313 
4314 #ifdef SPARC_STACK_BOUNDARY_HACK
4315   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4316      the real alignment of %sp.  However, when it does this, the
4317      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4318   if (SPARC_STACK_BOUNDARY_HACK)
4319     sp_offset = 0;
4320 #endif
4321 
4322   if (boundary > PARM_BOUNDARY)
4323     {
4324       save_var = offset_ptr->var;
4325       save_constant = offset_ptr->constant;
4326     }
4327 
4328   alignment_pad->var = NULL_TREE;
4329   alignment_pad->constant = 0;
4330 
4331   if (boundary > BITS_PER_UNIT)
4332     {
4333       int misalign;
4334       if (offset_ptr->var
4335 	  || !known_misalignment (offset_ptr->constant + sp_offset,
4336 				  boundary_in_bytes, &misalign))
4337 	{
4338 	  tree sp_offset_tree = ssize_int (sp_offset);
4339 	  tree offset = size_binop (PLUS_EXPR,
4340 				    ARGS_SIZE_TREE (*offset_ptr),
4341 				    sp_offset_tree);
4342 	  tree rounded;
4343 	  if (ARGS_GROW_DOWNWARD)
4344 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
4345 	  else
4346 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4347 
4348 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4349 	  /* ARGS_SIZE_TREE includes constant term.  */
4350 	  offset_ptr->constant = 0;
4351 	  if (boundary > PARM_BOUNDARY)
4352 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4353 					     save_var);
4354 	}
4355       else
4356 	{
4357 	  if (ARGS_GROW_DOWNWARD)
4358 	    offset_ptr->constant -= misalign;
4359 	  else
4360 	    offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4361 
4362 	  if (boundary > PARM_BOUNDARY)
4363 	    alignment_pad->constant = offset_ptr->constant - save_constant;
4364 	}
4365     }
4366 }
4367 
4368 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4369 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4370 {
4371   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4372   int misalign;
4373   if (passed_mode != BLKmode
4374       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4375     offset_ptr->constant += -misalign & (align - 1);
4376   else
4377     {
4378       if (TREE_CODE (sizetree) != INTEGER_CST
4379 	  || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4380 	{
4381 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4382 	  tree s2 = round_up (sizetree, align);
4383 	  /* Add it in.  */
4384 	  ADD_PARM_SIZE (*offset_ptr, s2);
4385 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4386 	}
4387     }
4388 }
4389 
4390 
4391 /* True if register REGNO was alive at a place where `setjmp' was
4392    called and was set more than once or is an argument.  Such regs may
4393    be clobbered by `longjmp'.  */
4394 
4395 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4396 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4397 {
4398   /* There appear to be cases where some local vars never reach the
4399      backend but have bogus regnos.  */
4400   if (regno >= max_reg_num ())
4401     return false;
4402 
4403   return ((REG_N_SETS (regno) > 1
4404 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4405 			       regno))
4406 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4407 }
4408 
4409 /* Walk the tree of blocks describing the binding levels within a
4410    function and warn about variables the might be killed by setjmp or
4411    vfork.  This is done after calling flow_analysis before register
4412    allocation since that will clobber the pseudo-regs to hard
4413    regs.  */
4414 
4415 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4416 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4417 {
4418   tree decl, sub;
4419 
4420   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4421     {
4422       if (VAR_P (decl)
4423 	  && DECL_RTL_SET_P (decl)
4424 	  && REG_P (DECL_RTL (decl))
4425 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4426 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4427                  " %<longjmp%> or %<vfork%>", decl);
4428     }
4429 
4430   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4431     setjmp_vars_warning (setjmp_crosses, sub);
4432 }
4433 
4434 /* Do the appropriate part of setjmp_vars_warning
4435    but for arguments instead of local variables.  */
4436 
4437 static void
setjmp_args_warning(bitmap setjmp_crosses)4438 setjmp_args_warning (bitmap setjmp_crosses)
4439 {
4440   tree decl;
4441   for (decl = DECL_ARGUMENTS (current_function_decl);
4442        decl; decl = DECL_CHAIN (decl))
4443     if (DECL_RTL (decl) != 0
4444 	&& REG_P (DECL_RTL (decl))
4445 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4446       warning (OPT_Wclobbered,
4447                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4448 	       decl);
4449 }
4450 
4451 /* Generate warning messages for variables live across setjmp.  */
4452 
4453 void
generate_setjmp_warnings(void)4454 generate_setjmp_warnings (void)
4455 {
4456   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4457 
4458   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4459       || bitmap_empty_p (setjmp_crosses))
4460     return;
4461 
4462   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4463   setjmp_args_warning (setjmp_crosses);
4464 }
4465 
4466 
4467 /* Reverse the order of elements in the fragment chain T of blocks,
4468    and return the new head of the chain (old last element).
4469    In addition to that clear BLOCK_SAME_RANGE flags when needed
4470    and adjust BLOCK_SUPERCONTEXT from the super fragment to
4471    its super fragment origin.  */
4472 
4473 static tree
block_fragments_nreverse(tree t)4474 block_fragments_nreverse (tree t)
4475 {
4476   tree prev = 0, block, next, prev_super = 0;
4477   tree super = BLOCK_SUPERCONTEXT (t);
4478   if (BLOCK_FRAGMENT_ORIGIN (super))
4479     super = BLOCK_FRAGMENT_ORIGIN (super);
4480   for (block = t; block; block = next)
4481     {
4482       next = BLOCK_FRAGMENT_CHAIN (block);
4483       BLOCK_FRAGMENT_CHAIN (block) = prev;
4484       if ((prev && !BLOCK_SAME_RANGE (prev))
4485 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4486 	      != prev_super))
4487 	BLOCK_SAME_RANGE (block) = 0;
4488       prev_super = BLOCK_SUPERCONTEXT (block);
4489       BLOCK_SUPERCONTEXT (block) = super;
4490       prev = block;
4491     }
4492   t = BLOCK_FRAGMENT_ORIGIN (t);
4493   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4494       != prev_super)
4495     BLOCK_SAME_RANGE (t) = 0;
4496   BLOCK_SUPERCONTEXT (t) = super;
4497   return prev;
4498 }
4499 
4500 /* Reverse the order of elements in the chain T of blocks,
4501    and return the new head of the chain (old last element).
4502    Also do the same on subblocks and reverse the order of elements
4503    in BLOCK_FRAGMENT_CHAIN as well.  */
4504 
4505 static tree
blocks_nreverse_all(tree t)4506 blocks_nreverse_all (tree t)
4507 {
4508   tree prev = 0, block, next;
4509   for (block = t; block; block = next)
4510     {
4511       next = BLOCK_CHAIN (block);
4512       BLOCK_CHAIN (block) = prev;
4513       if (BLOCK_FRAGMENT_CHAIN (block)
4514 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4515 	{
4516 	  BLOCK_FRAGMENT_CHAIN (block)
4517 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4518 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4519 	    BLOCK_SAME_RANGE (block) = 0;
4520 	}
4521       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4522       prev = block;
4523     }
4524   return prev;
4525 }
4526 
4527 
4528 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4529    and create duplicate blocks.  */
4530 /* ??? Need an option to either create block fragments or to create
4531    abstract origin duplicates of a source block.  It really depends
4532    on what optimization has been performed.  */
4533 
4534 void
reorder_blocks(void)4535 reorder_blocks (void)
4536 {
4537   tree block = DECL_INITIAL (current_function_decl);
4538 
4539   if (block == NULL_TREE)
4540     return;
4541 
4542   auto_vec<tree, 10> block_stack;
4543 
4544   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4545   clear_block_marks (block);
4546 
4547   /* Prune the old trees away, so that they don't get in the way.  */
4548   BLOCK_SUBBLOCKS (block) = NULL_TREE;
4549   BLOCK_CHAIN (block) = NULL_TREE;
4550 
4551   /* Recreate the block tree from the note nesting.  */
4552   reorder_blocks_1 (get_insns (), block, &block_stack);
4553   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4554 }
4555 
4556 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4557 
4558 void
clear_block_marks(tree block)4559 clear_block_marks (tree block)
4560 {
4561   while (block)
4562     {
4563       TREE_ASM_WRITTEN (block) = 0;
4564       clear_block_marks (BLOCK_SUBBLOCKS (block));
4565       block = BLOCK_CHAIN (block);
4566     }
4567 }
4568 
4569 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4570 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4571 		  vec<tree> *p_block_stack)
4572 {
4573   rtx_insn *insn;
4574   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4575 
4576   for (insn = insns; insn; insn = NEXT_INSN (insn))
4577     {
4578       if (NOTE_P (insn))
4579 	{
4580 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4581 	    {
4582 	      tree block = NOTE_BLOCK (insn);
4583 	      tree origin;
4584 
4585 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4586 	      origin = block;
4587 
4588 	      if (prev_end)
4589 		BLOCK_SAME_RANGE (prev_end) = 0;
4590 	      prev_end = NULL_TREE;
4591 
4592 	      /* If we have seen this block before, that means it now
4593 		 spans multiple address regions.  Create a new fragment.  */
4594 	      if (TREE_ASM_WRITTEN (block))
4595 		{
4596 		  tree new_block = copy_node (block);
4597 
4598 		  BLOCK_SAME_RANGE (new_block) = 0;
4599 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4600 		  BLOCK_FRAGMENT_CHAIN (new_block)
4601 		    = BLOCK_FRAGMENT_CHAIN (origin);
4602 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4603 
4604 		  NOTE_BLOCK (insn) = new_block;
4605 		  block = new_block;
4606 		}
4607 
4608 	      if (prev_beg == current_block && prev_beg)
4609 		BLOCK_SAME_RANGE (block) = 1;
4610 
4611 	      prev_beg = origin;
4612 
4613 	      BLOCK_SUBBLOCKS (block) = 0;
4614 	      TREE_ASM_WRITTEN (block) = 1;
4615 	      /* When there's only one block for the entire function,
4616 		 current_block == block and we mustn't do this, it
4617 		 will cause infinite recursion.  */
4618 	      if (block != current_block)
4619 		{
4620 		  tree super;
4621 		  if (block != origin)
4622 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4623 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4624 								      (origin))
4625 				   == current_block);
4626 		  if (p_block_stack->is_empty ())
4627 		    super = current_block;
4628 		  else
4629 		    {
4630 		      super = p_block_stack->last ();
4631 		      gcc_assert (super == current_block
4632 				  || BLOCK_FRAGMENT_ORIGIN (super)
4633 				     == current_block);
4634 		    }
4635 		  BLOCK_SUPERCONTEXT (block) = super;
4636 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4637 		  BLOCK_SUBBLOCKS (current_block) = block;
4638 		  current_block = origin;
4639 		}
4640 	      p_block_stack->safe_push (block);
4641 	    }
4642 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4643 	    {
4644 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4645 	      current_block = BLOCK_SUPERCONTEXT (current_block);
4646 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4647 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4648 	      prev_beg = NULL_TREE;
4649 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4650 			 ? NOTE_BLOCK (insn) : NULL_TREE;
4651 	    }
4652 	}
4653       else
4654 	{
4655 	  prev_beg = NULL_TREE;
4656 	  if (prev_end)
4657 	    BLOCK_SAME_RANGE (prev_end) = 0;
4658 	  prev_end = NULL_TREE;
4659 	}
4660     }
4661 }
4662 
4663 /* Reverse the order of elements in the chain T of blocks,
4664    and return the new head of the chain (old last element).  */
4665 
4666 tree
blocks_nreverse(tree t)4667 blocks_nreverse (tree t)
4668 {
4669   tree prev = 0, block, next;
4670   for (block = t; block; block = next)
4671     {
4672       next = BLOCK_CHAIN (block);
4673       BLOCK_CHAIN (block) = prev;
4674       prev = block;
4675     }
4676   return prev;
4677 }
4678 
4679 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4680    by modifying the last node in chain 1 to point to chain 2.  */
4681 
4682 tree
block_chainon(tree op1,tree op2)4683 block_chainon (tree op1, tree op2)
4684 {
4685   tree t1;
4686 
4687   if (!op1)
4688     return op2;
4689   if (!op2)
4690     return op1;
4691 
4692   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4693     continue;
4694   BLOCK_CHAIN (t1) = op2;
4695 
4696 #ifdef ENABLE_TREE_CHECKING
4697   {
4698     tree t2;
4699     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4700       gcc_assert (t2 != t1);
4701   }
4702 #endif
4703 
4704   return op1;
4705 }
4706 
4707 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4708    non-NULL, list them all into VECTOR, in a depth-first preorder
4709    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4710    blocks.  */
4711 
4712 static int
all_blocks(tree block,tree * vector)4713 all_blocks (tree block, tree *vector)
4714 {
4715   int n_blocks = 0;
4716 
4717   while (block)
4718     {
4719       TREE_ASM_WRITTEN (block) = 0;
4720 
4721       /* Record this block.  */
4722       if (vector)
4723 	vector[n_blocks] = block;
4724 
4725       ++n_blocks;
4726 
4727       /* Record the subblocks, and their subblocks...  */
4728       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4729 			      vector ? vector + n_blocks : 0);
4730       block = BLOCK_CHAIN (block);
4731     }
4732 
4733   return n_blocks;
4734 }
4735 
4736 /* Return a vector containing all the blocks rooted at BLOCK.  The
4737    number of elements in the vector is stored in N_BLOCKS_P.  The
4738    vector is dynamically allocated; it is the caller's responsibility
4739    to call `free' on the pointer returned.  */
4740 
4741 static tree *
get_block_vector(tree block,int * n_blocks_p)4742 get_block_vector (tree block, int *n_blocks_p)
4743 {
4744   tree *block_vector;
4745 
4746   *n_blocks_p = all_blocks (block, NULL);
4747   block_vector = XNEWVEC (tree, *n_blocks_p);
4748   all_blocks (block, block_vector);
4749 
4750   return block_vector;
4751 }
4752 
4753 static GTY(()) int next_block_index = 2;
4754 
4755 /* Set BLOCK_NUMBER for all the blocks in FN.  */
4756 
4757 void
number_blocks(tree fn)4758 number_blocks (tree fn)
4759 {
4760   int i;
4761   int n_blocks;
4762   tree *block_vector;
4763 
4764   /* For XCOFF debugging output, we start numbering the blocks
4765      from 1 within each function, rather than keeping a running
4766      count.  */
4767 #if defined (XCOFF_DEBUGGING_INFO)
4768   if (write_symbols == XCOFF_DEBUG)
4769     next_block_index = 1;
4770 #endif
4771 
4772   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4773 
4774   /* The top-level BLOCK isn't numbered at all.  */
4775   for (i = 1; i < n_blocks; ++i)
4776     /* We number the blocks from two.  */
4777     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4778 
4779   free (block_vector);
4780 
4781   return;
4782 }
4783 
4784 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
4785 
4786 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4787 debug_find_var_in_block_tree (tree var, tree block)
4788 {
4789   tree t;
4790 
4791   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4792     if (t == var)
4793       return block;
4794 
4795   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4796     {
4797       tree ret = debug_find_var_in_block_tree (var, t);
4798       if (ret)
4799 	return ret;
4800     }
4801 
4802   return NULL_TREE;
4803 }
4804 
4805 /* Keep track of whether we're in a dummy function context.  If we are,
4806    we don't want to invoke the set_current_function hook, because we'll
4807    get into trouble if the hook calls target_reinit () recursively or
4808    when the initial initialization is not yet complete.  */
4809 
4810 static bool in_dummy_function;
4811 
4812 /* Invoke the target hook when setting cfun.  Update the optimization options
4813    if the function uses different options than the default.  */
4814 
4815 static void
invoke_set_current_function_hook(tree fndecl)4816 invoke_set_current_function_hook (tree fndecl)
4817 {
4818   if (!in_dummy_function)
4819     {
4820       tree opts = ((fndecl)
4821 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4822 		   : optimization_default_node);
4823 
4824       if (!opts)
4825 	opts = optimization_default_node;
4826 
4827       /* Change optimization options if needed.  */
4828       if (optimization_current_node != opts)
4829 	{
4830 	  optimization_current_node = opts;
4831 	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4832 	}
4833 
4834       targetm.set_current_function (fndecl);
4835       this_fn_optabs = this_target_optabs;
4836 
4837       if (opts != optimization_default_node)
4838 	{
4839 	  init_tree_optimization_optabs (opts);
4840 	  if (TREE_OPTIMIZATION_OPTABS (opts))
4841 	    this_fn_optabs = (struct target_optabs *)
4842 	      TREE_OPTIMIZATION_OPTABS (opts);
4843 	}
4844     }
4845 }
4846 
4847 /* cfun should never be set directly; use this function.  */
4848 
4849 void
set_cfun(struct function * new_cfun,bool force)4850 set_cfun (struct function *new_cfun, bool force)
4851 {
4852   if (cfun != new_cfun || force)
4853     {
4854       cfun = new_cfun;
4855       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4856       redirect_edge_var_map_empty ();
4857     }
4858 }
4859 
4860 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
4861 
4862 static vec<function *> cfun_stack;
4863 
4864 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4865    current_function_decl accordingly.  */
4866 
4867 void
push_cfun(struct function * new_cfun)4868 push_cfun (struct function *new_cfun)
4869 {
4870   gcc_assert ((!cfun && !current_function_decl)
4871 	      || (cfun && current_function_decl == cfun->decl));
4872   cfun_stack.safe_push (cfun);
4873   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4874   set_cfun (new_cfun);
4875 }
4876 
4877 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4878 
4879 void
pop_cfun(void)4880 pop_cfun (void)
4881 {
4882   struct function *new_cfun = cfun_stack.pop ();
4883   /* When in_dummy_function, we do have a cfun but current_function_decl is
4884      NULL.  We also allow pushing NULL cfun and subsequently changing
4885      current_function_decl to something else and have both restored by
4886      pop_cfun.  */
4887   gcc_checking_assert (in_dummy_function
4888 		       || !cfun
4889 		       || current_function_decl == cfun->decl);
4890   set_cfun (new_cfun);
4891   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4892 }
4893 
4894 /* Return value of funcdef and increase it.  */
4895 int
get_next_funcdef_no(void)4896 get_next_funcdef_no (void)
4897 {
4898   return funcdef_no++;
4899 }
4900 
4901 /* Return value of funcdef.  */
4902 int
get_last_funcdef_no(void)4903 get_last_funcdef_no (void)
4904 {
4905   return funcdef_no;
4906 }
4907 
4908 /* Allocate a function structure for FNDECL and set its contents
4909    to the defaults.  Set cfun to the newly-allocated object.
4910    Some of the helper functions invoked during initialization assume
4911    that cfun has already been set.  Therefore, assign the new object
4912    directly into cfun and invoke the back end hook explicitly at the
4913    very end, rather than initializing a temporary and calling set_cfun
4914    on it.
4915 
4916    ABSTRACT_P is true if this is a function that will never be seen by
4917    the middle-end.  Such functions are front-end concepts (like C++
4918    function templates) that do not correspond directly to functions
4919    placed in object files.  */
4920 
4921 void
allocate_struct_function(tree fndecl,bool abstract_p)4922 allocate_struct_function (tree fndecl, bool abstract_p)
4923 {
4924   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4925 
4926   cfun = ggc_cleared_alloc<function> ();
4927 
4928   init_eh_for_function ();
4929 
4930   if (init_machine_status)
4931     cfun->machine = (*init_machine_status) ();
4932 
4933 #ifdef OVERRIDE_ABI_FORMAT
4934   OVERRIDE_ABI_FORMAT (fndecl);
4935 #endif
4936 
4937   if (fndecl != NULL_TREE)
4938     {
4939       DECL_STRUCT_FUNCTION (fndecl) = cfun;
4940       cfun->decl = fndecl;
4941       current_function_funcdef_no = get_next_funcdef_no ();
4942     }
4943 
4944   invoke_set_current_function_hook (fndecl);
4945 
4946   if (fndecl != NULL_TREE)
4947     {
4948       tree result = DECL_RESULT (fndecl);
4949 
4950       if (!abstract_p)
4951 	{
4952 	  /* Now that we have activated any function-specific attributes
4953 	     that might affect layout, particularly vector modes, relayout
4954 	     each of the parameters and the result.  */
4955 	  relayout_decl (result);
4956 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4957 	       parm = DECL_CHAIN (parm))
4958 	    relayout_decl (parm);
4959 
4960 	  /* Similarly relayout the function decl.  */
4961 	  targetm.target_option.relayout_function (fndecl);
4962 	}
4963 
4964       if (!abstract_p && aggregate_value_p (result, fndecl))
4965 	{
4966 #ifdef PCC_STATIC_STRUCT_RETURN
4967 	  cfun->returns_pcc_struct = 1;
4968 #endif
4969 	  cfun->returns_struct = 1;
4970 	}
4971 
4972       cfun->stdarg = stdarg_p (fntype);
4973 
4974       /* Assume all registers in stdarg functions need to be saved.  */
4975       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4976       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4977 
4978       /* ??? This could be set on a per-function basis by the front-end
4979          but is this worth the hassle?  */
4980       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4981       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4982 
4983       if (!profile_flag && !flag_instrument_function_entry_exit)
4984 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4985     }
4986 
4987   /* Don't enable begin stmt markers if var-tracking at assignments is
4988      disabled.  The markers make little sense without the variable
4989      binding annotations among them.  */
4990   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4991     && MAY_HAVE_DEBUG_MARKER_STMTS;
4992 }
4993 
4994 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4995    instead of just setting it.  */
4996 
4997 void
push_struct_function(tree fndecl)4998 push_struct_function (tree fndecl)
4999 {
5000   /* When in_dummy_function we might be in the middle of a pop_cfun and
5001      current_function_decl and cfun may not match.  */
5002   gcc_assert (in_dummy_function
5003 	      || (!cfun && !current_function_decl)
5004 	      || (cfun && current_function_decl == cfun->decl));
5005   cfun_stack.safe_push (cfun);
5006   current_function_decl = fndecl;
5007   allocate_struct_function (fndecl, false);
5008 }
5009 
5010 /* Reset crtl and other non-struct-function variables to defaults as
5011    appropriate for emitting rtl at the start of a function.  */
5012 
5013 static void
prepare_function_start(void)5014 prepare_function_start (void)
5015 {
5016   gcc_assert (!get_last_insn ());
5017   init_temp_slots ();
5018   init_emit ();
5019   init_varasm_status ();
5020   init_expr ();
5021   default_rtl_profile ();
5022 
5023   if (flag_stack_usage_info)
5024     {
5025       cfun->su = ggc_cleared_alloc<stack_usage> ();
5026       cfun->su->static_stack_size = -1;
5027     }
5028 
5029   cse_not_expected = ! optimize;
5030 
5031   /* Caller save not needed yet.  */
5032   caller_save_needed = 0;
5033 
5034   /* We haven't done register allocation yet.  */
5035   reg_renumber = 0;
5036 
5037   /* Indicate that we have not instantiated virtual registers yet.  */
5038   virtuals_instantiated = 0;
5039 
5040   /* Indicate that we want CONCATs now.  */
5041   generating_concat_p = 1;
5042 
5043   /* Indicate we have no need of a frame pointer yet.  */
5044   frame_pointer_needed = 0;
5045 }
5046 
5047 void
push_dummy_function(bool with_decl)5048 push_dummy_function (bool with_decl)
5049 {
5050   tree fn_decl, fn_type, fn_result_decl;
5051 
5052   gcc_assert (!in_dummy_function);
5053   in_dummy_function = true;
5054 
5055   if (with_decl)
5056     {
5057       fn_type = build_function_type_list (void_type_node, NULL_TREE);
5058       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5059 			    fn_type);
5060       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5061 					 NULL_TREE, void_type_node);
5062       DECL_RESULT (fn_decl) = fn_result_decl;
5063     }
5064   else
5065     fn_decl = NULL_TREE;
5066 
5067   push_struct_function (fn_decl);
5068 }
5069 
5070 /* Initialize the rtl expansion mechanism so that we can do simple things
5071    like generate sequences.  This is used to provide a context during global
5072    initialization of some passes.  You must call expand_dummy_function_end
5073    to exit this context.  */
5074 
5075 void
init_dummy_function_start(void)5076 init_dummy_function_start (void)
5077 {
5078   push_dummy_function (false);
5079   prepare_function_start ();
5080 }
5081 
5082 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5083    and initialize static variables for generating RTL for the statements
5084    of the function.  */
5085 
5086 void
init_function_start(tree subr)5087 init_function_start (tree subr)
5088 {
5089   /* Initialize backend, if needed.  */
5090   initialize_rtl ();
5091 
5092   prepare_function_start ();
5093   decide_function_section (subr);
5094 
5095   /* Warn if this value is an aggregate type,
5096      regardless of which calling convention we are using for it.  */
5097   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5098     warning (OPT_Waggregate_return, "function returns an aggregate");
5099 }
5100 
5101 /* Expand code to verify the stack_protect_guard.  This is invoked at
5102    the end of a function to be protected.  */
5103 
5104 void
stack_protect_epilogue(void)5105 stack_protect_epilogue (void)
5106 {
5107   tree guard_decl = targetm.stack_protect_guard ();
5108   rtx_code_label *label = gen_label_rtx ();
5109   rtx x, y;
5110   rtx_insn *seq;
5111 
5112   x = expand_normal (crtl->stack_protect_guard);
5113   if (guard_decl)
5114     y = expand_normal (guard_decl);
5115   else
5116     y = const0_rtx;
5117 
5118   /* Allow the target to compare Y with X without leaking either into
5119      a register.  */
5120   if (targetm.have_stack_protect_test ()
5121       && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5122     emit_insn (seq);
5123   else
5124     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5125 
5126   /* The noreturn predictor has been moved to the tree level.  The rtl-level
5127      predictors estimate this branch about 20%, which isn't enough to get
5128      things moved out of line.  Since this is the only extant case of adding
5129      a noreturn function at the rtl level, it doesn't seem worth doing ought
5130      except adding the prediction by hand.  */
5131   rtx_insn *tmp = get_last_insn ();
5132   if (JUMP_P (tmp))
5133     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5134 
5135   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5136   free_temp_slots ();
5137   emit_label (label);
5138 }
5139 
5140 /* Start the RTL for a new function, and set variables used for
5141    emitting RTL.
5142    SUBR is the FUNCTION_DECL node.
5143    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5144    the function's parameters, which must be run at any return statement.  */
5145 
5146 void
expand_function_start(tree subr)5147 expand_function_start (tree subr)
5148 {
5149   /* Make sure volatile mem refs aren't considered
5150      valid operands of arithmetic insns.  */
5151   init_recog_no_volatile ();
5152 
5153   crtl->profile
5154     = (profile_flag
5155        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5156 
5157   crtl->limit_stack
5158     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5159 
5160   /* Make the label for return statements to jump to.  Do not special
5161      case machines with special return instructions -- they will be
5162      handled later during jump, ifcvt, or epilogue creation.  */
5163   return_label = gen_label_rtx ();
5164 
5165   /* Initialize rtx used to return the value.  */
5166   /* Do this before assign_parms so that we copy the struct value address
5167      before any library calls that assign parms might generate.  */
5168 
5169   /* Decide whether to return the value in memory or in a register.  */
5170   tree res = DECL_RESULT (subr);
5171   if (aggregate_value_p (res, subr))
5172     {
5173       /* Returning something that won't go in a register.  */
5174       rtx value_address = 0;
5175 
5176 #ifdef PCC_STATIC_STRUCT_RETURN
5177       if (cfun->returns_pcc_struct)
5178 	{
5179 	  int size = int_size_in_bytes (TREE_TYPE (res));
5180 	  value_address = assemble_static_space (size);
5181 	}
5182       else
5183 #endif
5184 	{
5185 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5186 	  /* Expect to be passed the address of a place to store the value.
5187 	     If it is passed as an argument, assign_parms will take care of
5188 	     it.  */
5189 	  if (sv)
5190 	    {
5191 	      value_address = gen_reg_rtx (Pmode);
5192 	      emit_move_insn (value_address, sv);
5193 	    }
5194 	}
5195       if (value_address)
5196 	{
5197 	  rtx x = value_address;
5198 	  if (!DECL_BY_REFERENCE (res))
5199 	    {
5200 	      x = gen_rtx_MEM (DECL_MODE (res), x);
5201 	      set_mem_attributes (x, res, 1);
5202 	    }
5203 	  set_parm_rtl (res, x);
5204 	}
5205     }
5206   else if (DECL_MODE (res) == VOIDmode)
5207     /* If return mode is void, this decl rtl should not be used.  */
5208     set_parm_rtl (res, NULL_RTX);
5209   else
5210     {
5211       /* Compute the return values into a pseudo reg, which we will copy
5212 	 into the true return register after the cleanups are done.  */
5213       tree return_type = TREE_TYPE (res);
5214 
5215       /* If we may coalesce this result, make sure it has the expected mode
5216 	 in case it was promoted.  But we need not bother about BLKmode.  */
5217       machine_mode promoted_mode
5218 	= flag_tree_coalesce_vars && is_gimple_reg (res)
5219 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5220 	  : BLKmode;
5221 
5222       if (promoted_mode != BLKmode)
5223 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5224       else if (TYPE_MODE (return_type) != BLKmode
5225 	       && targetm.calls.return_in_msb (return_type))
5226 	/* expand_function_end will insert the appropriate padding in
5227 	   this case.  Use the return value's natural (unpadded) mode
5228 	   within the function proper.  */
5229 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5230       else
5231 	{
5232 	  /* In order to figure out what mode to use for the pseudo, we
5233 	     figure out what the mode of the eventual return register will
5234 	     actually be, and use that.  */
5235 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5236 
5237 	  /* Structures that are returned in registers are not
5238 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5239 	  if (REG_P (hard_reg))
5240 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5241 	  else
5242 	    {
5243 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5244 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
5245 	    }
5246 	}
5247 
5248       /* Set DECL_REGISTER flag so that expand_function_end will copy the
5249 	 result to the real return register(s).  */
5250       DECL_REGISTER (res) = 1;
5251 
5252       if (chkp_function_instrumented_p (current_function_decl))
5253 	{
5254 	  tree return_type = TREE_TYPE (res);
5255 	  rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5256 								 subr, 1);
5257 	  SET_DECL_BOUNDS_RTL (res, bounds);
5258 	}
5259     }
5260 
5261   /* Initialize rtx for parameters and local variables.
5262      In some cases this requires emitting insns.  */
5263   assign_parms (subr);
5264 
5265   /* If function gets a static chain arg, store it.  */
5266   if (cfun->static_chain_decl)
5267     {
5268       tree parm = cfun->static_chain_decl;
5269       rtx local, chain;
5270       rtx_insn *insn;
5271       int unsignedp;
5272 
5273       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5274       chain = targetm.calls.static_chain (current_function_decl, true);
5275 
5276       set_decl_incoming_rtl (parm, chain, false);
5277       set_parm_rtl (parm, local);
5278       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5279 
5280       if (GET_MODE (local) != GET_MODE (chain))
5281 	{
5282 	  convert_move (local, chain, unsignedp);
5283 	  insn = get_last_insn ();
5284 	}
5285       else
5286 	insn = emit_move_insn (local, chain);
5287 
5288       /* Mark the register as eliminable, similar to parameters.  */
5289       if (MEM_P (chain)
5290 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5291 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5292 
5293       /* If we aren't optimizing, save the static chain onto the stack.  */
5294       if (!optimize)
5295 	{
5296 	  tree saved_static_chain_decl
5297 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5298 			  DECL_NAME (parm), TREE_TYPE (parm));
5299 	  rtx saved_static_chain_rtx
5300 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5301 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5302 	  emit_move_insn (saved_static_chain_rtx, chain);
5303 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5304 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5305 	}
5306     }
5307 
5308   /* The following was moved from init_function_start.
5309      The move was supposed to make sdb output more accurate.  */
5310   /* Indicate the beginning of the function body,
5311      as opposed to parm setup.  */
5312   emit_note (NOTE_INSN_FUNCTION_BEG);
5313 
5314   gcc_assert (NOTE_P (get_last_insn ()));
5315 
5316   parm_birth_insn = get_last_insn ();
5317 
5318   /* If the function receives a non-local goto, then store the
5319      bits we need to restore the frame pointer.  */
5320   if (cfun->nonlocal_goto_save_area)
5321     {
5322       tree t_save;
5323       rtx r_save;
5324 
5325       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5326       gcc_assert (DECL_RTL_SET_P (var));
5327 
5328       t_save = build4 (ARRAY_REF,
5329 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5330 		       cfun->nonlocal_goto_save_area,
5331 		       integer_zero_node, NULL_TREE, NULL_TREE);
5332       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5333       gcc_assert (GET_MODE (r_save) == Pmode);
5334 
5335       emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5336       update_nonlocal_goto_save_area ();
5337     }
5338 
5339   if (crtl->profile)
5340     {
5341 #ifdef PROFILE_HOOK
5342       PROFILE_HOOK (current_function_funcdef_no);
5343 #endif
5344     }
5345 
5346   /* If we are doing generic stack checking, the probe should go here.  */
5347   if (flag_stack_check == GENERIC_STACK_CHECK)
5348     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5349 }
5350 
5351 void
pop_dummy_function(void)5352 pop_dummy_function (void)
5353 {
5354   pop_cfun ();
5355   in_dummy_function = false;
5356 }
5357 
5358 /* Undo the effects of init_dummy_function_start.  */
5359 void
expand_dummy_function_end(void)5360 expand_dummy_function_end (void)
5361 {
5362   gcc_assert (in_dummy_function);
5363 
5364   /* End any sequences that failed to be closed due to syntax errors.  */
5365   while (in_sequence_p ())
5366     end_sequence ();
5367 
5368   /* Outside function body, can't compute type's actual size
5369      until next function's body starts.  */
5370 
5371   free_after_parsing (cfun);
5372   free_after_compilation (cfun);
5373   pop_dummy_function ();
5374 }
5375 
5376 /* Helper for diddle_return_value.  */
5377 
5378 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5379 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5380 {
5381   if (! outgoing)
5382     return;
5383 
5384   if (REG_P (outgoing))
5385     (*doit) (outgoing, arg);
5386   else if (GET_CODE (outgoing) == PARALLEL)
5387     {
5388       int i;
5389 
5390       for (i = 0; i < XVECLEN (outgoing, 0); i++)
5391 	{
5392 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5393 
5394 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5395 	    (*doit) (x, arg);
5396 	}
5397     }
5398 }
5399 
5400 /* Call DOIT for each hard register used as a return value from
5401    the current function.  */
5402 
5403 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5404 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5405 {
5406   diddle_return_value_1 (doit, arg, crtl->return_bnd);
5407   diddle_return_value_1 (doit, arg, crtl->return_rtx);
5408 }
5409 
5410 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5411 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5412 {
5413   emit_clobber (reg);
5414 }
5415 
5416 void
clobber_return_register(void)5417 clobber_return_register (void)
5418 {
5419   diddle_return_value (do_clobber_return_reg, NULL);
5420 
5421   /* In case we do use pseudo to return value, clobber it too.  */
5422   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5423     {
5424       tree decl_result = DECL_RESULT (current_function_decl);
5425       rtx decl_rtl = DECL_RTL (decl_result);
5426       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5427 	{
5428 	  do_clobber_return_reg (decl_rtl, NULL);
5429 	}
5430     }
5431 }
5432 
5433 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5434 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5435 {
5436   emit_use (reg);
5437 }
5438 
5439 static void
use_return_register(void)5440 use_return_register (void)
5441 {
5442   diddle_return_value (do_use_return_reg, NULL);
5443 }
5444 
5445 /* Set the location of the insn chain starting at INSN to LOC.  */
5446 
5447 static void
set_insn_locations(rtx_insn * insn,int loc)5448 set_insn_locations (rtx_insn *insn, int loc)
5449 {
5450   while (insn != NULL)
5451     {
5452       if (INSN_P (insn))
5453 	INSN_LOCATION (insn) = loc;
5454       insn = NEXT_INSN (insn);
5455     }
5456 }
5457 
5458 /* Generate RTL for the end of the current function.  */
5459 
5460 void
expand_function_end(void)5461 expand_function_end (void)
5462 {
5463   /* If arg_pointer_save_area was referenced only from a nested
5464      function, we will not have initialized it yet.  Do that now.  */
5465   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5466     get_arg_pointer_save_area ();
5467 
5468   /* If we are doing generic stack checking and this function makes calls,
5469      do a stack probe at the start of the function to ensure we have enough
5470      space for another stack frame.  */
5471   if (flag_stack_check == GENERIC_STACK_CHECK)
5472     {
5473       rtx_insn *insn, *seq;
5474 
5475       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5476 	if (CALL_P (insn))
5477 	  {
5478 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5479 	    start_sequence ();
5480 	    if (STACK_CHECK_MOVING_SP)
5481 	      anti_adjust_stack_and_probe (max_frame_size, true);
5482 	    else
5483 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5484 	    seq = get_insns ();
5485 	    end_sequence ();
5486 	    set_insn_locations (seq, prologue_location);
5487 	    emit_insn_before (seq, stack_check_probe_note);
5488 	    break;
5489 	  }
5490     }
5491 
5492   /* End any sequences that failed to be closed due to syntax errors.  */
5493   while (in_sequence_p ())
5494     end_sequence ();
5495 
5496   clear_pending_stack_adjust ();
5497   do_pending_stack_adjust ();
5498 
5499   /* Output a linenumber for the end of the function.
5500      SDB depended on this.  */
5501   set_curr_insn_location (input_location);
5502 
5503   /* Before the return label (if any), clobber the return
5504      registers so that they are not propagated live to the rest of
5505      the function.  This can only happen with functions that drop
5506      through; if there had been a return statement, there would
5507      have either been a return rtx, or a jump to the return label.
5508 
5509      We delay actual code generation after the current_function_value_rtx
5510      is computed.  */
5511   rtx_insn *clobber_after = get_last_insn ();
5512 
5513   /* Output the label for the actual return from the function.  */
5514   emit_label (return_label);
5515 
5516   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5517     {
5518       /* Let except.c know where it should emit the call to unregister
5519 	 the function context for sjlj exceptions.  */
5520       if (flag_exceptions)
5521 	sjlj_emit_function_exit_after (get_last_insn ());
5522     }
5523   else
5524     {
5525       /* We want to ensure that instructions that may trap are not
5526 	 moved into the epilogue by scheduling, because we don't
5527 	 always emit unwind information for the epilogue.  */
5528       if (cfun->can_throw_non_call_exceptions)
5529 	emit_insn (gen_blockage ());
5530     }
5531 
5532   /* If this is an implementation of throw, do what's necessary to
5533      communicate between __builtin_eh_return and the epilogue.  */
5534   expand_eh_return ();
5535 
5536   /* If scalar return value was computed in a pseudo-reg, or was a named
5537      return value that got dumped to the stack, copy that to the hard
5538      return register.  */
5539   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5540     {
5541       tree decl_result = DECL_RESULT (current_function_decl);
5542       rtx decl_rtl = DECL_RTL (decl_result);
5543 
5544       if (REG_P (decl_rtl)
5545 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5546 	  : DECL_REGISTER (decl_result))
5547 	{
5548 	  rtx real_decl_rtl = crtl->return_rtx;
5549 	  complex_mode cmode;
5550 
5551 	  /* This should be set in assign_parms.  */
5552 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5553 
5554 	  /* If this is a BLKmode structure being returned in registers,
5555 	     then use the mode computed in expand_return.  Note that if
5556 	     decl_rtl is memory, then its mode may have been changed,
5557 	     but that crtl->return_rtx has not.  */
5558 	  if (GET_MODE (real_decl_rtl) == BLKmode)
5559 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5560 
5561 	  /* If a non-BLKmode return value should be padded at the least
5562 	     significant end of the register, shift it left by the appropriate
5563 	     amount.  BLKmode results are handled using the group load/store
5564 	     machinery.  */
5565 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5566 	      && REG_P (real_decl_rtl)
5567 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5568 	    {
5569 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5570 					   REGNO (real_decl_rtl)),
5571 			      decl_rtl);
5572 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5573 	    }
5574 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5575 	    {
5576 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5577 		 move the result to the real return registers.  Otherwise, do
5578 		 a group load from decl_rtl for a named return.  */
5579 	      if (GET_CODE (decl_rtl) == PARALLEL)
5580 		emit_group_move (real_decl_rtl, decl_rtl);
5581 	      else
5582 		emit_group_load (real_decl_rtl, decl_rtl,
5583 				 TREE_TYPE (decl_result),
5584 				 int_size_in_bytes (TREE_TYPE (decl_result)));
5585 	    }
5586 	  /* In the case of complex integer modes smaller than a word, we'll
5587 	     need to generate some non-trivial bitfield insertions.  Do that
5588 	     on a pseudo and not the hard register.  */
5589 	  else if (GET_CODE (decl_rtl) == CONCAT
5590 		   && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5591 		   && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5592 	    {
5593 	      int old_generating_concat_p;
5594 	      rtx tmp;
5595 
5596 	      old_generating_concat_p = generating_concat_p;
5597 	      generating_concat_p = 0;
5598 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5599 	      generating_concat_p = old_generating_concat_p;
5600 
5601 	      emit_move_insn (tmp, decl_rtl);
5602 	      emit_move_insn (real_decl_rtl, tmp);
5603 	    }
5604 	  /* If a named return value dumped decl_return to memory, then
5605 	     we may need to re-do the PROMOTE_MODE signed/unsigned
5606 	     extension.  */
5607 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5608 	    {
5609 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5610 	      promote_function_mode (TREE_TYPE (decl_result),
5611 				     GET_MODE (decl_rtl), &unsignedp,
5612 				     TREE_TYPE (current_function_decl), 1);
5613 
5614 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5615 	    }
5616 	  else
5617 	    emit_move_insn (real_decl_rtl, decl_rtl);
5618 	}
5619     }
5620 
5621   /* If returning a structure, arrange to return the address of the value
5622      in a place where debuggers expect to find it.
5623 
5624      If returning a structure PCC style,
5625      the caller also depends on this value.
5626      And cfun->returns_pcc_struct is not necessarily set.  */
5627   if ((cfun->returns_struct || cfun->returns_pcc_struct)
5628       && !targetm.calls.omit_struct_return_reg)
5629     {
5630       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5631       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5632       rtx outgoing;
5633 
5634       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5635 	type = TREE_TYPE (type);
5636       else
5637 	value_address = XEXP (value_address, 0);
5638 
5639       outgoing = targetm.calls.function_value (build_pointer_type (type),
5640 					       current_function_decl, true);
5641 
5642       /* Mark this as a function return value so integrate will delete the
5643 	 assignment and USE below when inlining this function.  */
5644       REG_FUNCTION_VALUE_P (outgoing) = 1;
5645 
5646       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5647       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5648       value_address = convert_memory_address (mode, value_address);
5649 
5650       emit_move_insn (outgoing, value_address);
5651 
5652       /* Show return register used to hold result (in this case the address
5653 	 of the result.  */
5654       crtl->return_rtx = outgoing;
5655     }
5656 
5657   /* Emit the actual code to clobber return register.  Don't emit
5658      it if clobber_after is a barrier, then the previous basic block
5659      certainly doesn't fall thru into the exit block.  */
5660   if (!BARRIER_P (clobber_after))
5661     {
5662       start_sequence ();
5663       clobber_return_register ();
5664       rtx_insn *seq = get_insns ();
5665       end_sequence ();
5666 
5667       emit_insn_after (seq, clobber_after);
5668     }
5669 
5670   /* Output the label for the naked return from the function.  */
5671   if (naked_return_label)
5672     emit_label (naked_return_label);
5673 
5674   /* @@@ This is a kludge.  We want to ensure that instructions that
5675      may trap are not moved into the epilogue by scheduling, because
5676      we don't always emit unwind information for the epilogue.  */
5677   if (cfun->can_throw_non_call_exceptions
5678       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5679     emit_insn (gen_blockage ());
5680 
5681   /* If stack protection is enabled for this function, check the guard.  */
5682   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
5683     stack_protect_epilogue ();
5684 
5685   /* If we had calls to alloca, and this machine needs
5686      an accurate stack pointer to exit the function,
5687      insert some code to save and restore the stack pointer.  */
5688   if (! EXIT_IGNORE_STACK
5689       && cfun->calls_alloca)
5690     {
5691       rtx tem = 0;
5692 
5693       start_sequence ();
5694       emit_stack_save (SAVE_FUNCTION, &tem);
5695       rtx_insn *seq = get_insns ();
5696       end_sequence ();
5697       emit_insn_before (seq, parm_birth_insn);
5698 
5699       emit_stack_restore (SAVE_FUNCTION, tem);
5700     }
5701 
5702   /* ??? This should no longer be necessary since stupid is no longer with
5703      us, but there are some parts of the compiler (eg reload_combine, and
5704      sh mach_dep_reorg) that still try and compute their own lifetime info
5705      instead of using the general framework.  */
5706   use_return_register ();
5707 }
5708 
5709 rtx
get_arg_pointer_save_area(void)5710 get_arg_pointer_save_area (void)
5711 {
5712   rtx ret = arg_pointer_save_area;
5713 
5714   if (! ret)
5715     {
5716       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5717       arg_pointer_save_area = ret;
5718     }
5719 
5720   if (! crtl->arg_pointer_save_area_init)
5721     {
5722       /* Save the arg pointer at the beginning of the function.  The
5723 	 generated stack slot may not be a valid memory address, so we
5724 	 have to check it and fix it if necessary.  */
5725       start_sequence ();
5726       emit_move_insn (validize_mem (copy_rtx (ret)),
5727                       crtl->args.internal_arg_pointer);
5728       rtx_insn *seq = get_insns ();
5729       end_sequence ();
5730 
5731       push_topmost_sequence ();
5732       emit_insn_after (seq, entry_of_function ());
5733       pop_topmost_sequence ();
5734 
5735       crtl->arg_pointer_save_area_init = true;
5736     }
5737 
5738   return ret;
5739 }
5740 
5741 
5742 /* If debugging dumps are requested, dump information about how the
5743    target handled -fstack-check=clash for the prologue.
5744 
5745    PROBES describes what if any probes were emitted.
5746 
5747    RESIDUALS indicates if the prologue had any residual allocation
5748    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
5749 
5750 void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)5751 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5752 {
5753   if (!dump_file)
5754     return;
5755 
5756   switch (probes)
5757     {
5758     case NO_PROBE_NO_FRAME:
5759       fprintf (dump_file,
5760 	       "Stack clash no probe no stack adjustment in prologue.\n");
5761       break;
5762     case NO_PROBE_SMALL_FRAME:
5763       fprintf (dump_file,
5764 	       "Stack clash no probe small stack adjustment in prologue.\n");
5765       break;
5766     case PROBE_INLINE:
5767       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5768       break;
5769     case PROBE_LOOP:
5770       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5771       break;
5772     }
5773 
5774   if (residuals)
5775     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5776   else
5777     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5778 
5779   if (frame_pointer_needed)
5780     fprintf (dump_file, "Stack clash frame pointer needed.\n");
5781   else
5782     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5783 
5784   if (TREE_THIS_VOLATILE (cfun->decl))
5785     fprintf (dump_file,
5786 	     "Stack clash noreturn prologue, assuming no implicit"
5787 	     " probes in caller.\n");
5788   else
5789     fprintf (dump_file,
5790 	     "Stack clash not noreturn prologue.\n");
5791 }
5792 
5793 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5794    for the first time.  */
5795 
5796 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5797 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5798 {
5799   rtx_insn *tmp;
5800   hash_table<insn_cache_hasher> *hash = *hashp;
5801 
5802   if (hash == NULL)
5803     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5804 
5805   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5806     {
5807       rtx *slot = hash->find_slot (tmp, INSERT);
5808       gcc_assert (*slot == NULL);
5809       *slot = tmp;
5810     }
5811 }
5812 
5813 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5814    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5815    insn, then record COPY as well.  */
5816 
5817 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5818 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5819 {
5820   hash_table<insn_cache_hasher> *hash;
5821   rtx *slot;
5822 
5823   hash = epilogue_insn_hash;
5824   if (!hash || !hash->find (insn))
5825     {
5826       hash = prologue_insn_hash;
5827       if (!hash || !hash->find (insn))
5828 	return;
5829     }
5830 
5831   slot = hash->find_slot (copy, INSERT);
5832   gcc_assert (*slot == NULL);
5833   *slot = copy;
5834 }
5835 
5836 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5837    we can be running after reorg, SEQUENCE rtl is possible.  */
5838 
5839 static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)5840 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5841 {
5842   if (hash == NULL)
5843     return false;
5844 
5845   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5846     {
5847       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5848       int i;
5849       for (i = seq->len () - 1; i >= 0; i--)
5850 	if (hash->find (seq->element (i)))
5851 	  return true;
5852       return false;
5853     }
5854 
5855   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5856 }
5857 
5858 int
prologue_contains(const rtx_insn * insn)5859 prologue_contains (const rtx_insn *insn)
5860 {
5861   return contains (insn, prologue_insn_hash);
5862 }
5863 
5864 int
epilogue_contains(const rtx_insn * insn)5865 epilogue_contains (const rtx_insn *insn)
5866 {
5867   return contains (insn, epilogue_insn_hash);
5868 }
5869 
5870 int
prologue_epilogue_contains(const rtx_insn * insn)5871 prologue_epilogue_contains (const rtx_insn *insn)
5872 {
5873   if (contains (insn, prologue_insn_hash))
5874     return 1;
5875   if (contains (insn, epilogue_insn_hash))
5876     return 1;
5877   return 0;
5878 }
5879 
5880 void
record_prologue_seq(rtx_insn * seq)5881 record_prologue_seq (rtx_insn *seq)
5882 {
5883   record_insns (seq, NULL, &prologue_insn_hash);
5884 }
5885 
5886 void
record_epilogue_seq(rtx_insn * seq)5887 record_epilogue_seq (rtx_insn *seq)
5888 {
5889   record_insns (seq, NULL, &epilogue_insn_hash);
5890 }
5891 
5892 /* Set JUMP_LABEL for a return insn.  */
5893 
5894 void
set_return_jump_label(rtx_insn * returnjump)5895 set_return_jump_label (rtx_insn *returnjump)
5896 {
5897   rtx pat = PATTERN (returnjump);
5898   if (GET_CODE (pat) == PARALLEL)
5899     pat = XVECEXP (pat, 0, 0);
5900   if (ANY_RETURN_P (pat))
5901     JUMP_LABEL (returnjump) = pat;
5902   else
5903     JUMP_LABEL (returnjump) = ret_rtx;
5904 }
5905 
5906 /* Return a sequence to be used as the split prologue for the current
5907    function, or NULL.  */
5908 
5909 static rtx_insn *
make_split_prologue_seq(void)5910 make_split_prologue_seq (void)
5911 {
5912   if (!flag_split_stack
5913       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5914     return NULL;
5915 
5916   start_sequence ();
5917   emit_insn (targetm.gen_split_stack_prologue ());
5918   rtx_insn *seq = get_insns ();
5919   end_sequence ();
5920 
5921   record_insns (seq, NULL, &prologue_insn_hash);
5922   set_insn_locations (seq, prologue_location);
5923 
5924   return seq;
5925 }
5926 
5927 /* Return a sequence to be used as the prologue for the current function,
5928    or NULL.  */
5929 
5930 static rtx_insn *
make_prologue_seq(void)5931 make_prologue_seq (void)
5932 {
5933   if (!targetm.have_prologue ())
5934     return NULL;
5935 
5936   start_sequence ();
5937   rtx_insn *seq = targetm.gen_prologue ();
5938   emit_insn (seq);
5939 
5940   /* Insert an explicit USE for the frame pointer
5941      if the profiling is on and the frame pointer is required.  */
5942   if (crtl->profile && frame_pointer_needed)
5943     emit_use (hard_frame_pointer_rtx);
5944 
5945   /* Retain a map of the prologue insns.  */
5946   record_insns (seq, NULL, &prologue_insn_hash);
5947   emit_note (NOTE_INSN_PROLOGUE_END);
5948 
5949   /* Ensure that instructions are not moved into the prologue when
5950      profiling is on.  The call to the profiling routine can be
5951      emitted within the live range of a call-clobbered register.  */
5952   if (!targetm.profile_before_prologue () && crtl->profile)
5953     emit_insn (gen_blockage ());
5954 
5955   seq = get_insns ();
5956   end_sequence ();
5957   set_insn_locations (seq, prologue_location);
5958 
5959   return seq;
5960 }
5961 
5962 /* Return a sequence to be used as the epilogue for the current function,
5963    or NULL.  */
5964 
5965 static rtx_insn *
make_epilogue_seq(void)5966 make_epilogue_seq (void)
5967 {
5968   if (!targetm.have_epilogue ())
5969     return NULL;
5970 
5971   start_sequence ();
5972   emit_note (NOTE_INSN_EPILOGUE_BEG);
5973   rtx_insn *seq = targetm.gen_epilogue ();
5974   if (seq)
5975     emit_jump_insn (seq);
5976 
5977   /* Retain a map of the epilogue insns.  */
5978   record_insns (seq, NULL, &epilogue_insn_hash);
5979   set_insn_locations (seq, epilogue_location);
5980 
5981   seq = get_insns ();
5982   rtx_insn *returnjump = get_last_insn ();
5983   end_sequence ();
5984 
5985   if (JUMP_P (returnjump))
5986     set_return_jump_label (returnjump);
5987 
5988   return seq;
5989 }
5990 
5991 
5992 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5993    this into place with notes indicating where the prologue ends and where
5994    the epilogue begins.  Update the basic block information when possible.
5995 
5996    Notes on epilogue placement:
5997    There are several kinds of edges to the exit block:
5998    * a single fallthru edge from LAST_BB
5999    * possibly, edges from blocks containing sibcalls
6000    * possibly, fake edges from infinite loops
6001 
6002    The epilogue is always emitted on the fallthru edge from the last basic
6003    block in the function, LAST_BB, into the exit block.
6004 
6005    If LAST_BB is empty except for a label, it is the target of every
6006    other basic block in the function that ends in a return.  If a
6007    target has a return or simple_return pattern (possibly with
6008    conditional variants), these basic blocks can be changed so that a
6009    return insn is emitted into them, and their target is adjusted to
6010    the real exit block.
6011 
6012    Notes on shrink wrapping: We implement a fairly conservative
6013    version of shrink-wrapping rather than the textbook one.  We only
6014    generate a single prologue and a single epilogue.  This is
6015    sufficient to catch a number of interesting cases involving early
6016    exits.
6017 
6018    First, we identify the blocks that require the prologue to occur before
6019    them.  These are the ones that modify a call-saved register, or reference
6020    any of the stack or frame pointer registers.  To simplify things, we then
6021    mark everything reachable from these blocks as also requiring a prologue.
6022    This takes care of loops automatically, and avoids the need to examine
6023    whether MEMs reference the frame, since it is sufficient to check for
6024    occurrences of the stack or frame pointer.
6025 
6026    We then compute the set of blocks for which the need for a prologue
6027    is anticipatable (borrowing terminology from the shrink-wrapping
6028    description in Muchnick's book).  These are the blocks which either
6029    require a prologue themselves, or those that have only successors
6030    where the prologue is anticipatable.  The prologue needs to be
6031    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6032    is not.  For the moment, we ensure that only one such edge exists.
6033 
6034    The epilogue is placed as described above, but we make a
6035    distinction between inserting return and simple_return patterns
6036    when modifying other blocks that end in a return.  Blocks that end
6037    in a sibcall omit the sibcall_epilogue if the block is not in
6038    ANTIC.  */
6039 
6040 void
thread_prologue_and_epilogue_insns(void)6041 thread_prologue_and_epilogue_insns (void)
6042 {
6043   df_analyze ();
6044 
6045   /* Can't deal with multiple successors of the entry block at the
6046      moment.  Function should always have at least one entry
6047      point.  */
6048   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6049 
6050   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6051   edge orig_entry_edge = entry_edge;
6052 
6053   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6054   rtx_insn *prologue_seq = make_prologue_seq ();
6055   rtx_insn *epilogue_seq = make_epilogue_seq ();
6056 
6057   /* Try to perform a kind of shrink-wrapping, making sure the
6058      prologue/epilogue is emitted only around those parts of the
6059      function that require it.  */
6060   try_shrink_wrapping (&entry_edge, prologue_seq);
6061 
6062   /* If the target can handle splitting the prologue/epilogue into separate
6063      components, try to shrink-wrap these components separately.  */
6064   try_shrink_wrapping_separate (entry_edge->dest);
6065 
6066   /* If that did anything for any component we now need the generate the
6067      "main" prologue again.  Because some targets require some of these
6068      to be called in a specific order (i386 requires the split prologue
6069      to be first, for example), we create all three sequences again here.
6070      If this does not work for some target, that target should not enable
6071      separate shrink-wrapping.  */
6072   if (crtl->shrink_wrapped_separate)
6073     {
6074       split_prologue_seq = make_split_prologue_seq ();
6075       prologue_seq = make_prologue_seq ();
6076       epilogue_seq = make_epilogue_seq ();
6077     }
6078 
6079   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6080 
6081   /* A small fib -- epilogue is not yet completed, but we wish to re-use
6082      this marker for the splits of EH_RETURN patterns, and nothing else
6083      uses the flag in the meantime.  */
6084   epilogue_completed = 1;
6085 
6086   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
6087      some targets, these get split to a special version of the epilogue
6088      code.  In order to be able to properly annotate these with unwind
6089      info, try to split them now.  If we get a valid split, drop an
6090      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
6091   edge e;
6092   edge_iterator ei;
6093   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6094     {
6095       rtx_insn *prev, *last, *trial;
6096 
6097       if (e->flags & EDGE_FALLTHRU)
6098 	continue;
6099       last = BB_END (e->src);
6100       if (!eh_returnjump_p (last))
6101 	continue;
6102 
6103       prev = PREV_INSN (last);
6104       trial = try_split (PATTERN (last), last, 1);
6105       if (trial == last)
6106 	continue;
6107 
6108       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6109       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6110     }
6111 
6112   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6113 
6114   if (exit_fallthru_edge)
6115     {
6116       if (epilogue_seq)
6117 	{
6118 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6119 	  commit_edge_insertions ();
6120 
6121 	  /* The epilogue insns we inserted may cause the exit edge to no longer
6122 	     be fallthru.  */
6123 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6124 	    {
6125 	      if (((e->flags & EDGE_FALLTHRU) != 0)
6126 		  && returnjump_p (BB_END (e->src)))
6127 		e->flags &= ~EDGE_FALLTHRU;
6128 	    }
6129 	}
6130       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6131 	{
6132 	  /* We have a fall-through edge to the exit block, the source is not
6133 	     at the end of the function, and there will be an assembler epilogue
6134 	     at the end of the function.
6135 	     We can't use force_nonfallthru here, because that would try to
6136 	     use return.  Inserting a jump 'by hand' is extremely messy, so
6137 	     we take advantage of cfg_layout_finalize using
6138 	     fixup_fallthru_exit_predecessor.  */
6139 	  cfg_layout_initialize (0);
6140 	  basic_block cur_bb;
6141 	  FOR_EACH_BB_FN (cur_bb, cfun)
6142 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
6143 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6144 	      cur_bb->aux = cur_bb->next_bb;
6145 	  cfg_layout_finalize ();
6146 	}
6147     }
6148 
6149   /* Insert the prologue.  */
6150 
6151   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6152 
6153   if (split_prologue_seq || prologue_seq)
6154     {
6155       rtx_insn *split_prologue_insn = split_prologue_seq;
6156       if (split_prologue_seq)
6157 	{
6158 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6159 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
6160 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6161 	}
6162 
6163       rtx_insn *prologue_insn = prologue_seq;
6164       if (prologue_seq)
6165 	{
6166 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6167 	    prologue_insn = NEXT_INSN (prologue_insn);
6168 	  insert_insn_on_edge (prologue_seq, entry_edge);
6169 	}
6170 
6171       commit_edge_insertions ();
6172 
6173       /* Look for basic blocks within the prologue insns.  */
6174       if (split_prologue_insn
6175 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6176 	split_prologue_insn = NULL;
6177       if (prologue_insn
6178 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
6179 	prologue_insn = NULL;
6180       if (split_prologue_insn || prologue_insn)
6181 	{
6182 	  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6183 	  bitmap_clear (blocks);
6184 	  if (split_prologue_insn)
6185 	    bitmap_set_bit (blocks,
6186 			    BLOCK_FOR_INSN (split_prologue_insn)->index);
6187 	  if (prologue_insn)
6188 	    bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6189 	  find_many_sub_basic_blocks (blocks);
6190 	}
6191     }
6192 
6193   default_rtl_profile ();
6194 
6195   /* Emit sibling epilogues before any sibling call sites.  */
6196   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6197        (e = ei_safe_edge (ei));
6198        ei_next (&ei))
6199     {
6200       /* Skip those already handled, the ones that run without prologue.  */
6201       if (e->flags & EDGE_IGNORE)
6202 	{
6203 	  e->flags &= ~EDGE_IGNORE;
6204 	  continue;
6205 	}
6206 
6207       rtx_insn *insn = BB_END (e->src);
6208 
6209       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6210 	continue;
6211 
6212       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6213 	{
6214 	  start_sequence ();
6215 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6216 	  emit_insn (ep_seq);
6217 	  rtx_insn *seq = get_insns ();
6218 	  end_sequence ();
6219 
6220 	  /* Retain a map of the epilogue insns.  Used in life analysis to
6221 	     avoid getting rid of sibcall epilogue insns.  Do this before we
6222 	     actually emit the sequence.  */
6223 	  record_insns (seq, NULL, &epilogue_insn_hash);
6224 	  set_insn_locations (seq, epilogue_location);
6225 
6226 	  emit_insn_before (seq, insn);
6227 	}
6228     }
6229 
6230   if (epilogue_seq)
6231     {
6232       rtx_insn *insn, *next;
6233 
6234       /* Similarly, move any line notes that appear after the epilogue.
6235          There is no need, however, to be quite so anal about the existence
6236 	 of such a note.  Also possibly move
6237 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6238 	 info generation.  */
6239       for (insn = epilogue_seq; insn; insn = next)
6240 	{
6241 	  next = NEXT_INSN (insn);
6242 	  if (NOTE_P (insn)
6243 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6244 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6245 	}
6246     }
6247 
6248   /* Threading the prologue and epilogue changes the artificial refs
6249      in the entry and exit blocks.  */
6250   epilogue_completed = 1;
6251   df_update_entry_exit_and_calls ();
6252 }
6253 
6254 /* Reposition the prologue-end and epilogue-begin notes after
6255    instruction scheduling.  */
6256 
6257 void
reposition_prologue_and_epilogue_notes(void)6258 reposition_prologue_and_epilogue_notes (void)
6259 {
6260   if (!targetm.have_prologue ()
6261       && !targetm.have_epilogue ()
6262       && !targetm.have_sibcall_epilogue ())
6263     return;
6264 
6265   /* Since the hash table is created on demand, the fact that it is
6266      non-null is a signal that it is non-empty.  */
6267   if (prologue_insn_hash != NULL)
6268     {
6269       size_t len = prologue_insn_hash->elements ();
6270       rtx_insn *insn, *last = NULL, *note = NULL;
6271 
6272       /* Scan from the beginning until we reach the last prologue insn.  */
6273       /* ??? While we do have the CFG intact, there are two problems:
6274 	 (1) The prologue can contain loops (typically probing the stack),
6275 	     which means that the end of the prologue isn't in the first bb.
6276 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6277       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6278 	{
6279 	  if (NOTE_P (insn))
6280 	    {
6281 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6282 		note = insn;
6283 	    }
6284 	  else if (contains (insn, prologue_insn_hash))
6285 	    {
6286 	      last = insn;
6287 	      if (--len == 0)
6288 		break;
6289 	    }
6290 	}
6291 
6292       if (last)
6293 	{
6294 	  if (note == NULL)
6295 	    {
6296 	      /* Scan forward looking for the PROLOGUE_END note.  It should
6297 		 be right at the beginning of the block, possibly with other
6298 		 insn notes that got moved there.  */
6299 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6300 		{
6301 		  if (NOTE_P (note)
6302 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6303 		    break;
6304 		}
6305 	    }
6306 
6307 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6308 	  if (LABEL_P (last))
6309 	    last = NEXT_INSN (last);
6310 	  reorder_insns (note, note, last);
6311 	}
6312     }
6313 
6314   if (epilogue_insn_hash != NULL)
6315     {
6316       edge_iterator ei;
6317       edge e;
6318 
6319       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6320 	{
6321 	  rtx_insn *insn, *first = NULL, *note = NULL;
6322 	  basic_block bb = e->src;
6323 
6324 	  /* Scan from the beginning until we reach the first epilogue insn. */
6325 	  FOR_BB_INSNS (bb, insn)
6326 	    {
6327 	      if (NOTE_P (insn))
6328 		{
6329 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6330 		    {
6331 		      note = insn;
6332 		      if (first != NULL)
6333 			break;
6334 		    }
6335 		}
6336 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6337 		{
6338 		  first = insn;
6339 		  if (note != NULL)
6340 		    break;
6341 		}
6342 	    }
6343 
6344 	  if (note)
6345 	    {
6346 	      /* If the function has a single basic block, and no real
6347 		 epilogue insns (e.g. sibcall with no cleanup), the
6348 		 epilogue note can get scheduled before the prologue
6349 		 note.  If we have frame related prologue insns, having
6350 		 them scanned during the epilogue will result in a crash.
6351 		 In this case re-order the epilogue note to just before
6352 		 the last insn in the block.  */
6353 	      if (first == NULL)
6354 		first = BB_END (bb);
6355 
6356 	      if (PREV_INSN (first) != note)
6357 		reorder_insns (note, note, PREV_INSN (first));
6358 	    }
6359 	}
6360     }
6361 }
6362 
6363 /* Returns the name of function declared by FNDECL.  */
6364 const char *
fndecl_name(tree fndecl)6365 fndecl_name (tree fndecl)
6366 {
6367   if (fndecl == NULL)
6368     return "(nofn)";
6369   return lang_hooks.decl_printable_name (fndecl, 1);
6370 }
6371 
6372 /* Returns the name of function FN.  */
6373 const char *
function_name(struct function * fn)6374 function_name (struct function *fn)
6375 {
6376   tree fndecl = (fn == NULL) ? NULL : fn->decl;
6377   return fndecl_name (fndecl);
6378 }
6379 
6380 /* Returns the name of the current function.  */
6381 const char *
current_function_name(void)6382 current_function_name (void)
6383 {
6384   return function_name (cfun);
6385 }
6386 
6387 
6388 static unsigned int
rest_of_handle_check_leaf_regs(void)6389 rest_of_handle_check_leaf_regs (void)
6390 {
6391 #ifdef LEAF_REGISTERS
6392   crtl->uses_only_leaf_regs
6393     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6394 #endif
6395   return 0;
6396 }
6397 
6398 /* Insert a TYPE into the used types hash table of CFUN.  */
6399 
6400 static void
used_types_insert_helper(tree type,struct function * func)6401 used_types_insert_helper (tree type, struct function *func)
6402 {
6403   if (type != NULL && func != NULL)
6404     {
6405       if (func->used_types_hash == NULL)
6406 	func->used_types_hash = hash_set<tree>::create_ggc (37);
6407 
6408       func->used_types_hash->add (type);
6409     }
6410 }
6411 
6412 /* Given a type, insert it into the used hash table in cfun.  */
6413 void
used_types_insert(tree t)6414 used_types_insert (tree t)
6415 {
6416   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6417     if (TYPE_NAME (t))
6418       break;
6419     else
6420       t = TREE_TYPE (t);
6421   if (TREE_CODE (t) == ERROR_MARK)
6422     return;
6423   if (TYPE_NAME (t) == NULL_TREE
6424       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6425     t = TYPE_MAIN_VARIANT (t);
6426   if (debug_info_level > DINFO_LEVEL_NONE)
6427     {
6428       if (cfun)
6429 	used_types_insert_helper (t, cfun);
6430       else
6431 	{
6432 	  /* So this might be a type referenced by a global variable.
6433 	     Record that type so that we can later decide to emit its
6434 	     debug information.  */
6435 	  vec_safe_push (types_used_by_cur_var_decl, t);
6436 	}
6437     }
6438 }
6439 
6440 /* Helper to Hash a struct types_used_by_vars_entry.  */
6441 
6442 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6443 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6444 {
6445   gcc_assert (entry && entry->var_decl && entry->type);
6446 
6447   return iterative_hash_object (entry->type,
6448 				iterative_hash_object (entry->var_decl, 0));
6449 }
6450 
6451 /* Hash function of the types_used_by_vars_entry hash table.  */
6452 
6453 hashval_t
hash(types_used_by_vars_entry * entry)6454 used_type_hasher::hash (types_used_by_vars_entry *entry)
6455 {
6456   return hash_types_used_by_vars_entry (entry);
6457 }
6458 
6459 /*Equality function of the types_used_by_vars_entry hash table.  */
6460 
6461 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6462 used_type_hasher::equal (types_used_by_vars_entry *e1,
6463 			 types_used_by_vars_entry *e2)
6464 {
6465   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6466 }
6467 
6468 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6469 
6470 void
types_used_by_var_decl_insert(tree type,tree var_decl)6471 types_used_by_var_decl_insert (tree type, tree var_decl)
6472 {
6473   if (type != NULL && var_decl != NULL)
6474     {
6475       types_used_by_vars_entry **slot;
6476       struct types_used_by_vars_entry e;
6477       e.var_decl = var_decl;
6478       e.type = type;
6479       if (types_used_by_vars_hash == NULL)
6480 	types_used_by_vars_hash
6481 	  = hash_table<used_type_hasher>::create_ggc (37);
6482 
6483       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6484       if (*slot == NULL)
6485 	{
6486 	  struct types_used_by_vars_entry *entry;
6487 	  entry = ggc_alloc<types_used_by_vars_entry> ();
6488 	  entry->type = type;
6489 	  entry->var_decl = var_decl;
6490 	  *slot = entry;
6491 	}
6492     }
6493 }
6494 
6495 namespace {
6496 
6497 const pass_data pass_data_leaf_regs =
6498 {
6499   RTL_PASS, /* type */
6500   "*leaf_regs", /* name */
6501   OPTGROUP_NONE, /* optinfo_flags */
6502   TV_NONE, /* tv_id */
6503   0, /* properties_required */
6504   0, /* properties_provided */
6505   0, /* properties_destroyed */
6506   0, /* todo_flags_start */
6507   0, /* todo_flags_finish */
6508 };
6509 
6510 class pass_leaf_regs : public rtl_opt_pass
6511 {
6512 public:
pass_leaf_regs(gcc::context * ctxt)6513   pass_leaf_regs (gcc::context *ctxt)
6514     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6515   {}
6516 
6517   /* opt_pass methods: */
execute(function *)6518   virtual unsigned int execute (function *)
6519     {
6520       return rest_of_handle_check_leaf_regs ();
6521     }
6522 
6523 }; // class pass_leaf_regs
6524 
6525 } // anon namespace
6526 
6527 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6528 make_pass_leaf_regs (gcc::context *ctxt)
6529 {
6530   return new pass_leaf_regs (ctxt);
6531 }
6532 
6533 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6534 rest_of_handle_thread_prologue_and_epilogue (void)
6535 {
6536   /* prepare_shrink_wrap is sensitive to the block structure of the control
6537      flow graph, so clean it up first.  */
6538   if (optimize)
6539     cleanup_cfg (0);
6540 
6541   /* On some machines, the prologue and epilogue code, or parts thereof,
6542      can be represented as RTL.  Doing so lets us schedule insns between
6543      it and the rest of the code and also allows delayed branch
6544      scheduling to operate in the epilogue.  */
6545   thread_prologue_and_epilogue_insns ();
6546 
6547   /* Some non-cold blocks may now be only reachable from cold blocks.
6548      Fix that up.  */
6549   fixup_partitions ();
6550 
6551   /* Shrink-wrapping can result in unreachable edges in the epilogue,
6552      see PR57320.  */
6553   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6554 
6555   /* The stack usage info is finalized during prologue expansion.  */
6556   if (flag_stack_usage_info)
6557     output_stack_usage ();
6558 
6559   return 0;
6560 }
6561 
6562 namespace {
6563 
6564 const pass_data pass_data_thread_prologue_and_epilogue =
6565 {
6566   RTL_PASS, /* type */
6567   "pro_and_epilogue", /* name */
6568   OPTGROUP_NONE, /* optinfo_flags */
6569   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6570   0, /* properties_required */
6571   0, /* properties_provided */
6572   0, /* properties_destroyed */
6573   0, /* todo_flags_start */
6574   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6575 };
6576 
6577 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6578 {
6579 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6580   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6581     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6582   {}
6583 
6584   /* opt_pass methods: */
execute(function *)6585   virtual unsigned int execute (function *)
6586     {
6587       return rest_of_handle_thread_prologue_and_epilogue ();
6588     }
6589 
6590 }; // class pass_thread_prologue_and_epilogue
6591 
6592 } // anon namespace
6593 
6594 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6595 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6596 {
6597   return new pass_thread_prologue_and_epilogue (ctxt);
6598 }
6599 
6600 
6601 /* This mini-pass fixes fall-out from SSA in asm statements that have
6602    in-out constraints.  Say you start with
6603 
6604      orig = inout;
6605      asm ("": "+mr" (inout));
6606      use (orig);
6607 
6608    which is transformed very early to use explicit output and match operands:
6609 
6610      orig = inout;
6611      asm ("": "=mr" (inout) : "0" (inout));
6612      use (orig);
6613 
6614    Or, after SSA and copyprop,
6615 
6616      asm ("": "=mr" (inout_2) : "0" (inout_1));
6617      use (inout_1);
6618 
6619    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6620    they represent two separate values, so they will get different pseudo
6621    registers during expansion.  Then, since the two operands need to match
6622    per the constraints, but use different pseudo registers, reload can
6623    only register a reload for these operands.  But reloads can only be
6624    satisfied by hardregs, not by memory, so we need a register for this
6625    reload, just because we are presented with non-matching operands.
6626    So, even though we allow memory for this operand, no memory can be
6627    used for it, just because the two operands don't match.  This can
6628    cause reload failures on register-starved targets.
6629 
6630    So it's a symptom of reload not being able to use memory for reloads
6631    or, alternatively it's also a symptom of both operands not coming into
6632    reload as matching (in which case the pseudo could go to memory just
6633    fine, as the alternative allows it, and no reload would be necessary).
6634    We fix the latter problem here, by transforming
6635 
6636      asm ("": "=mr" (inout_2) : "0" (inout_1));
6637 
6638    back to
6639 
6640      inout_2 = inout_1;
6641      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6642 
6643 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6644 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6645 {
6646   int i;
6647   bool changed = false;
6648   rtx op = SET_SRC (p_sets[0]);
6649   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6650   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6651   bool *output_matched = XALLOCAVEC (bool, noutputs);
6652 
6653   memset (output_matched, 0, noutputs * sizeof (bool));
6654   for (i = 0; i < ninputs; i++)
6655     {
6656       rtx input, output;
6657       rtx_insn *insns;
6658       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6659       char *end;
6660       int match, j;
6661 
6662       if (*constraint == '%')
6663 	constraint++;
6664 
6665       match = strtoul (constraint, &end, 10);
6666       if (end == constraint)
6667 	continue;
6668 
6669       gcc_assert (match < noutputs);
6670       output = SET_DEST (p_sets[match]);
6671       input = RTVEC_ELT (inputs, i);
6672       /* Only do the transformation for pseudos.  */
6673       if (! REG_P (output)
6674 	  || rtx_equal_p (output, input)
6675 	  || !(REG_P (input) || SUBREG_P (input)
6676 	       || MEM_P (input) || CONSTANT_P (input))
6677 	  || !general_operand (input, GET_MODE (output)))
6678 	continue;
6679 
6680       /* We can't do anything if the output is also used as input,
6681 	 as we're going to overwrite it.  */
6682       for (j = 0; j < ninputs; j++)
6683         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6684 	  break;
6685       if (j != ninputs)
6686 	continue;
6687 
6688       /* Avoid changing the same input several times.  For
6689 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6690 	 only change in once (to out1), rather than changing it
6691 	 first to out1 and afterwards to out2.  */
6692       if (i > 0)
6693 	{
6694 	  for (j = 0; j < noutputs; j++)
6695 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6696 	      break;
6697 	  if (j != noutputs)
6698 	    continue;
6699 	}
6700       output_matched[match] = true;
6701 
6702       start_sequence ();
6703       emit_move_insn (output, copy_rtx (input));
6704       insns = get_insns ();
6705       end_sequence ();
6706       emit_insn_before (insns, insn);
6707 
6708       /* Now replace all mentions of the input with output.  We can't
6709 	 just replace the occurrence in inputs[i], as the register might
6710 	 also be used in some other input (or even in an address of an
6711 	 output), which would mean possibly increasing the number of
6712 	 inputs by one (namely 'output' in addition), which might pose
6713 	 a too complicated problem for reload to solve.  E.g. this situation:
6714 
6715 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6716 
6717 	 Here 'input' is used in two occurrences as input (once for the
6718 	 input operand, once for the address in the second output operand).
6719 	 If we would replace only the occurrence of the input operand (to
6720 	 make the matching) we would be left with this:
6721 
6722 	   output = input
6723 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6724 
6725 	 Now we suddenly have two different input values (containing the same
6726 	 value, but different pseudos) where we formerly had only one.
6727 	 With more complicated asms this might lead to reload failures
6728 	 which wouldn't have happen without this pass.  So, iterate over
6729 	 all operands and replace all occurrences of the register used.  */
6730       for (j = 0; j < noutputs; j++)
6731 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6732 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6733 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6734 					      input, output);
6735       for (j = 0; j < ninputs; j++)
6736 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6737 	  RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6738 					       input, output);
6739 
6740       changed = true;
6741     }
6742 
6743   if (changed)
6744     df_insn_rescan (insn);
6745 }
6746 
6747 /* Add the decl D to the local_decls list of FUN.  */
6748 
6749 void
add_local_decl(struct function * fun,tree d)6750 add_local_decl (struct function *fun, tree d)
6751 {
6752   gcc_assert (VAR_P (d));
6753   vec_safe_push (fun->local_decls, d);
6754 }
6755 
6756 namespace {
6757 
6758 const pass_data pass_data_match_asm_constraints =
6759 {
6760   RTL_PASS, /* type */
6761   "asmcons", /* name */
6762   OPTGROUP_NONE, /* optinfo_flags */
6763   TV_NONE, /* tv_id */
6764   0, /* properties_required */
6765   0, /* properties_provided */
6766   0, /* properties_destroyed */
6767   0, /* todo_flags_start */
6768   0, /* todo_flags_finish */
6769 };
6770 
6771 class pass_match_asm_constraints : public rtl_opt_pass
6772 {
6773 public:
pass_match_asm_constraints(gcc::context * ctxt)6774   pass_match_asm_constraints (gcc::context *ctxt)
6775     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6776   {}
6777 
6778   /* opt_pass methods: */
6779   virtual unsigned int execute (function *);
6780 
6781 }; // class pass_match_asm_constraints
6782 
6783 unsigned
execute(function * fun)6784 pass_match_asm_constraints::execute (function *fun)
6785 {
6786   basic_block bb;
6787   rtx_insn *insn;
6788   rtx pat, *p_sets;
6789   int noutputs;
6790 
6791   if (!crtl->has_asm_statement)
6792     return 0;
6793 
6794   df_set_flags (DF_DEFER_INSN_RESCAN);
6795   FOR_EACH_BB_FN (bb, fun)
6796     {
6797       FOR_BB_INSNS (bb, insn)
6798 	{
6799 	  if (!INSN_P (insn))
6800 	    continue;
6801 
6802 	  pat = PATTERN (insn);
6803 	  if (GET_CODE (pat) == PARALLEL)
6804 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6805 	  else if (GET_CODE (pat) == SET)
6806 	    p_sets = &PATTERN (insn), noutputs = 1;
6807 	  else
6808 	    continue;
6809 
6810 	  if (GET_CODE (*p_sets) == SET
6811 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6812 	    match_asm_constraints_1 (insn, p_sets, noutputs);
6813 	 }
6814     }
6815 
6816   return TODO_df_finish;
6817 }
6818 
6819 } // anon namespace
6820 
6821 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6822 make_pass_match_asm_constraints (gcc::context *ctxt)
6823 {
6824   return new pass_match_asm_constraints (ctxt);
6825 }
6826 
6827 
6828 #include "gt-function.h"
6829