1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file handles the generation of rtl code from tree structure
21    at the level of the function as a whole.
22    It creates the rtl expressions for parameters and auto variables
23    and has full responsibility for allocating stack slots.
24 
25    `expand_function_start' is called at the beginning of a function,
26    before the function body is parsed, and `expand_function_end' is
27    called after parsing the body.
28 
29    Call `assign_stack_local' to allocate a stack slot for a local variable.
30    This is usually done during the RTL generation for the function body,
31    but it can also be done in the reload pass when a pseudo-register does
32    not get a hard register.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "tm_p.h"
45 #include "stringpool.h"
46 #include "expmed.h"
47 #include "optabs.h"
48 #include "regs.h"
49 #include "emit-rtl.h"
50 #include "recog.h"
51 #include "rtl-error.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "varasm.h"
56 #include "except.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "expr.h"
61 #include "optabs-tree.h"
62 #include "output.h"
63 #include "langhooks.h"
64 #include "common/common-target.h"
65 #include "gimplify.h"
66 #include "tree-pass.h"
67 #include "cfgrtl.h"
68 #include "cfganal.h"
69 #include "cfgbuild.h"
70 #include "cfgcleanup.h"
71 #include "cfgexpand.h"
72 #include "shrink-wrap.h"
73 #include "toplev.h"
74 #include "rtl-iter.h"
75 #include "tree-chkp.h"
76 #include "rtl-chkp.h"
77 #include "tree-dfa.h"
78 #include "tree-ssa.h"
79 
80 /* So we can assign to cfun in this file.  */
81 #undef cfun
82 
83 #ifndef STACK_ALIGNMENT_NEEDED
84 #define STACK_ALIGNMENT_NEEDED 1
85 #endif
86 
87 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
88 
89 /* Round a value to the lowest integer less than it that is a multiple of
90    the required alignment.  Avoid using division in case the value is
91    negative.  Assume the alignment is a power of two.  */
92 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 
94 /* Similar, but round to the next highest integer that meets the
95    alignment.  */
96 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 
98 /* Nonzero once virtual register instantiation has been done.
99    assign_stack_local uses frame_pointer_rtx when this is nonzero.
100    calls.c:emit_library_call_value_1 uses it to set up
101    post-instantiation libcalls.  */
102 int virtuals_instantiated;
103 
104 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
105 static GTY(()) int funcdef_no;
106 
107 /* These variables hold pointers to functions to create and destroy
108    target specific, per-function data structures.  */
109 struct machine_function * (*init_machine_status) (void);
110 
111 /* The currently compiled function.  */
112 struct function *cfun = 0;
113 
114 /* These hashes record the prologue and epilogue insns.  */
115 
116 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
117 {
hashinsn_cache_hasher118   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher119   static bool equal (rtx a, rtx b) { return a == b; }
120 };
121 
122 static GTY((cache))
123   hash_table<insn_cache_hasher> *prologue_insn_hash;
124 static GTY((cache))
125   hash_table<insn_cache_hasher> *epilogue_insn_hash;
126 
127 
128 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
129 vec<tree, va_gc> *types_used_by_cur_var_decl;
130 
131 /* Forward declarations.  */
132 
133 static struct temp_slot *find_temp_slot_from_address (rtx);
134 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
135 static void pad_below (struct args_size *, machine_mode, tree);
136 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
137 static int all_blocks (tree, tree *);
138 static tree *get_block_vector (tree, int *);
139 extern tree debug_find_var_in_block_tree (tree, tree);
140 /* We always define `record_insns' even if it's not used so that we
141    can always export `prologue_epilogue_contains'.  */
142 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
143      ATTRIBUTE_UNUSED;
144 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
145 static void prepare_function_start (void);
146 static void do_clobber_return_reg (rtx, void *);
147 static void do_use_return_reg (rtx, void *);
148 
149 
150 /* Stack of nested functions.  */
151 /* Keep track of the cfun stack.  */
152 
153 static vec<function *> function_context_stack;
154 
155 /* Save the current context for compilation of a nested function.
156    This is called from language-specific code.  */
157 
158 void
push_function_context(void)159 push_function_context (void)
160 {
161   if (cfun == 0)
162     allocate_struct_function (NULL, false);
163 
164   function_context_stack.safe_push (cfun);
165   set_cfun (NULL);
166 }
167 
168 /* Restore the last saved context, at the end of a nested function.
169    This function is called from language-specific code.  */
170 
171 void
pop_function_context(void)172 pop_function_context (void)
173 {
174   struct function *p = function_context_stack.pop ();
175   set_cfun (p);
176   current_function_decl = p->decl;
177 
178   /* Reset variables that have known state during rtx generation.  */
179   virtuals_instantiated = 0;
180   generating_concat_p = 1;
181 }
182 
183 /* Clear out all parts of the state in F that can safely be discarded
184    after the function has been parsed, but not compiled, to let
185    garbage collection reclaim the memory.  */
186 
187 void
free_after_parsing(struct function * f)188 free_after_parsing (struct function *f)
189 {
190   f->language = 0;
191 }
192 
193 /* Clear out all parts of the state in F that can safely be discarded
194    after the function has been compiled, to let garbage collection
195    reclaim the memory.  */
196 
197 void
free_after_compilation(struct function * f)198 free_after_compilation (struct function *f)
199 {
200   prologue_insn_hash = NULL;
201   epilogue_insn_hash = NULL;
202 
203   free (crtl->emit.regno_pointer_align);
204 
205   memset (crtl, 0, sizeof (struct rtl_data));
206   f->eh = NULL;
207   f->machine = NULL;
208   f->cfg = NULL;
209   f->curr_properties &= ~PROP_cfg;
210 
211   regno_reg_rtx = NULL;
212 }
213 
214 /* Return size needed for stack frame based on slots so far allocated.
215    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
216    the caller may have to do that.  */
217 
218 HOST_WIDE_INT
get_frame_size(void)219 get_frame_size (void)
220 {
221   if (FRAME_GROWS_DOWNWARD)
222     return -frame_offset;
223   else
224     return frame_offset;
225 }
226 
227 /* Issue an error message and return TRUE if frame OFFSET overflows in
228    the signed target pointer arithmetics for function FUNC.  Otherwise
229    return FALSE.  */
230 
231 bool
frame_offset_overflow(HOST_WIDE_INT offset,tree func)232 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
233 {
234   unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
235 
236   if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
237 	       /* Leave room for the fixed part of the frame.  */
238 	       - 64 * UNITS_PER_WORD)
239     {
240       error_at (DECL_SOURCE_LOCATION (func),
241 		"total size of local objects too large");
242       return TRUE;
243     }
244 
245   return FALSE;
246 }
247 
248 /* Return stack slot alignment in bits for TYPE and MODE.  */
249 
250 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)251 get_stack_local_alignment (tree type, machine_mode mode)
252 {
253   unsigned int alignment;
254 
255   if (mode == BLKmode)
256     alignment = BIGGEST_ALIGNMENT;
257   else
258     alignment = GET_MODE_ALIGNMENT (mode);
259 
260   /* Allow the frond-end to (possibly) increase the alignment of this
261      stack slot.  */
262   if (! type)
263     type = lang_hooks.types.type_for_mode (mode, 0);
264 
265   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
266 }
267 
268 /* Determine whether it is possible to fit a stack slot of size SIZE and
269    alignment ALIGNMENT into an area in the stack frame that starts at
270    frame offset START and has a length of LENGTH.  If so, store the frame
271    offset to be used for the stack slot in *POFFSET and return true;
272    return false otherwise.  This function will extend the frame size when
273    given a start/length pair that lies at the end of the frame.  */
274 
275 static bool
try_fit_stack_local(HOST_WIDE_INT start,HOST_WIDE_INT length,HOST_WIDE_INT size,unsigned int alignment,HOST_WIDE_INT * poffset)276 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
277 		     HOST_WIDE_INT size, unsigned int alignment,
278 		     HOST_WIDE_INT *poffset)
279 {
280   HOST_WIDE_INT this_frame_offset;
281   int frame_off, frame_alignment, frame_phase;
282 
283   /* Calculate how many bytes the start of local variables is off from
284      stack alignment.  */
285   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
286   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
287   frame_phase = frame_off ? frame_alignment - frame_off : 0;
288 
289   /* Round the frame offset to the specified alignment.  */
290 
291   /*  We must be careful here, since FRAME_OFFSET might be negative and
292       division with a negative dividend isn't as well defined as we might
293       like.  So we instead assume that ALIGNMENT is a power of two and
294       use logical operations which are unambiguous.  */
295   if (FRAME_GROWS_DOWNWARD)
296     this_frame_offset
297       = (FLOOR_ROUND (start + length - size - frame_phase,
298 		      (unsigned HOST_WIDE_INT) alignment)
299 	 + frame_phase);
300   else
301     this_frame_offset
302       = (CEIL_ROUND (start - frame_phase,
303 		     (unsigned HOST_WIDE_INT) alignment)
304 	 + frame_phase);
305 
306   /* See if it fits.  If this space is at the edge of the frame,
307      consider extending the frame to make it fit.  Our caller relies on
308      this when allocating a new slot.  */
309   if (frame_offset == start && this_frame_offset < frame_offset)
310     frame_offset = this_frame_offset;
311   else if (this_frame_offset < start)
312     return false;
313   else if (start + length == frame_offset
314 	   && this_frame_offset + size > start + length)
315     frame_offset = this_frame_offset + size;
316   else if (this_frame_offset + size > start + length)
317     return false;
318 
319   *poffset = this_frame_offset;
320   return true;
321 }
322 
323 /* Create a new frame_space structure describing free space in the stack
324    frame beginning at START and ending at END, and chain it into the
325    function's frame_space_list.  */
326 
327 static void
add_frame_space(HOST_WIDE_INT start,HOST_WIDE_INT end)328 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
329 {
330   struct frame_space *space = ggc_alloc<frame_space> ();
331   space->next = crtl->frame_space_list;
332   crtl->frame_space_list = space;
333   space->start = start;
334   space->length = end - start;
335 }
336 
337 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
338    with machine mode MODE.
339 
340    ALIGN controls the amount of alignment for the address of the slot:
341    0 means according to MODE,
342    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
343    -2 means use BITS_PER_UNIT,
344    positive specifies alignment boundary in bits.
345 
346    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
347    alignment and ASLK_RECORD_PAD bit set if we should remember
348    extra space we allocated for alignment purposes.  When we are
349    called from assign_stack_temp_for_type, it is not set so we don't
350    track the same stack slot in two independent lists.
351 
352    We do not round to stack_boundary here.  */
353 
354 rtx
assign_stack_local_1(machine_mode mode,HOST_WIDE_INT size,int align,int kind)355 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
356 		      int align, int kind)
357 {
358   rtx x, addr;
359   int bigend_correction = 0;
360   HOST_WIDE_INT slot_offset = 0, old_frame_offset;
361   unsigned int alignment, alignment_in_bits;
362 
363   if (align == 0)
364     {
365       alignment = get_stack_local_alignment (NULL, mode);
366       alignment /= BITS_PER_UNIT;
367     }
368   else if (align == -1)
369     {
370       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
371       size = CEIL_ROUND (size, alignment);
372     }
373   else if (align == -2)
374     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
375   else
376     alignment = align / BITS_PER_UNIT;
377 
378   alignment_in_bits = alignment * BITS_PER_UNIT;
379 
380   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
381   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
382     {
383       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
384       alignment = alignment_in_bits / BITS_PER_UNIT;
385     }
386 
387   if (SUPPORTS_STACK_ALIGNMENT)
388     {
389       if (crtl->stack_alignment_estimated < alignment_in_bits)
390 	{
391           if (!crtl->stack_realign_processed)
392 	    crtl->stack_alignment_estimated = alignment_in_bits;
393           else
394 	    {
395 	      /* If stack is realigned and stack alignment value
396 		 hasn't been finalized, it is OK not to increase
397 		 stack_alignment_estimated.  The bigger alignment
398 		 requirement is recorded in stack_alignment_needed
399 		 below.  */
400 	      gcc_assert (!crtl->stack_realign_finalized);
401 	      if (!crtl->stack_realign_needed)
402 		{
403 		  /* It is OK to reduce the alignment as long as the
404 		     requested size is 0 or the estimated stack
405 		     alignment >= mode alignment.  */
406 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
407 		              || size == 0
408 			      || (crtl->stack_alignment_estimated
409 				  >= GET_MODE_ALIGNMENT (mode)));
410 		  alignment_in_bits = crtl->stack_alignment_estimated;
411 		  alignment = alignment_in_bits / BITS_PER_UNIT;
412 		}
413 	    }
414 	}
415     }
416 
417   if (crtl->stack_alignment_needed < alignment_in_bits)
418     crtl->stack_alignment_needed = alignment_in_bits;
419   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
420     crtl->max_used_stack_slot_alignment = alignment_in_bits;
421 
422   if (mode != BLKmode || size != 0)
423     {
424       if (kind & ASLK_RECORD_PAD)
425 	{
426 	  struct frame_space **psp;
427 
428 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
429 	    {
430 	      struct frame_space *space = *psp;
431 	      if (!try_fit_stack_local (space->start, space->length, size,
432 					alignment, &slot_offset))
433 		continue;
434 	      *psp = space->next;
435 	      if (slot_offset > space->start)
436 		add_frame_space (space->start, slot_offset);
437 	      if (slot_offset + size < space->start + space->length)
438 		add_frame_space (slot_offset + size,
439 				 space->start + space->length);
440 	      goto found_space;
441 	    }
442 	}
443     }
444   else if (!STACK_ALIGNMENT_NEEDED)
445     {
446       slot_offset = frame_offset;
447       goto found_space;
448     }
449 
450   old_frame_offset = frame_offset;
451 
452   if (FRAME_GROWS_DOWNWARD)
453     {
454       frame_offset -= size;
455       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
456 
457       if (kind & ASLK_RECORD_PAD)
458 	{
459 	  if (slot_offset > frame_offset)
460 	    add_frame_space (frame_offset, slot_offset);
461 	  if (slot_offset + size < old_frame_offset)
462 	    add_frame_space (slot_offset + size, old_frame_offset);
463 	}
464     }
465   else
466     {
467       frame_offset += size;
468       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
469 
470       if (kind & ASLK_RECORD_PAD)
471 	{
472 	  if (slot_offset > old_frame_offset)
473 	    add_frame_space (old_frame_offset, slot_offset);
474 	  if (slot_offset + size < frame_offset)
475 	    add_frame_space (slot_offset + size, frame_offset);
476 	}
477     }
478 
479  found_space:
480   /* On a big-endian machine, if we are allocating more space than we will use,
481      use the least significant bytes of those that are allocated.  */
482   if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
483     bigend_correction = size - GET_MODE_SIZE (mode);
484 
485   /* If we have already instantiated virtual registers, return the actual
486      address relative to the frame pointer.  */
487   if (virtuals_instantiated)
488     addr = plus_constant (Pmode, frame_pointer_rtx,
489 			  trunc_int_for_mode
490 			  (slot_offset + bigend_correction
491 			   + STARTING_FRAME_OFFSET, Pmode));
492   else
493     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
494 			  trunc_int_for_mode
495 			  (slot_offset + bigend_correction,
496 			   Pmode));
497 
498   x = gen_rtx_MEM (mode, addr);
499   set_mem_align (x, alignment_in_bits);
500   MEM_NOTRAP_P (x) = 1;
501 
502   stack_slot_list
503     = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
504 
505   if (frame_offset_overflow (frame_offset, current_function_decl))
506     frame_offset = 0;
507 
508   return x;
509 }
510 
511 /* Wrap up assign_stack_local_1 with last parameter as false.  */
512 
513 rtx
assign_stack_local(machine_mode mode,HOST_WIDE_INT size,int align)514 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
515 {
516   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
517 }
518 
519 /* In order to evaluate some expressions, such as function calls returning
520    structures in memory, we need to temporarily allocate stack locations.
521    We record each allocated temporary in the following structure.
522 
523    Associated with each temporary slot is a nesting level.  When we pop up
524    one level, all temporaries associated with the previous level are freed.
525    Normally, all temporaries are freed after the execution of the statement
526    in which they were created.  However, if we are inside a ({...}) grouping,
527    the result may be in a temporary and hence must be preserved.  If the
528    result could be in a temporary, we preserve it if we can determine which
529    one it is in.  If we cannot determine which temporary may contain the
530    result, all temporaries are preserved.  A temporary is preserved by
531    pretending it was allocated at the previous nesting level.  */
532 
533 struct GTY(()) temp_slot {
534   /* Points to next temporary slot.  */
535   struct temp_slot *next;
536   /* Points to previous temporary slot.  */
537   struct temp_slot *prev;
538   /* The rtx to used to reference the slot.  */
539   rtx slot;
540   /* The size, in units, of the slot.  */
541   HOST_WIDE_INT size;
542   /* The type of the object in the slot, or zero if it doesn't correspond
543      to a type.  We use this to determine whether a slot can be reused.
544      It can be reused if objects of the type of the new slot will always
545      conflict with objects of the type of the old slot.  */
546   tree type;
547   /* The alignment (in bits) of the slot.  */
548   unsigned int align;
549   /* Nonzero if this temporary is currently in use.  */
550   char in_use;
551   /* Nesting level at which this slot is being used.  */
552   int level;
553   /* The offset of the slot from the frame_pointer, including extra space
554      for alignment.  This info is for combine_temp_slots.  */
555   HOST_WIDE_INT base_offset;
556   /* The size of the slot, including extra space for alignment.  This
557      info is for combine_temp_slots.  */
558   HOST_WIDE_INT full_size;
559 };
560 
561 /* Entry for the below hash table.  */
562 struct GTY((for_user)) temp_slot_address_entry {
563   hashval_t hash;
564   rtx address;
565   struct temp_slot *temp_slot;
566 };
567 
568 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
569 {
570   static hashval_t hash (temp_slot_address_entry *);
571   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
572 };
573 
574 /* A table of addresses that represent a stack slot.  The table is a mapping
575    from address RTXen to a temp slot.  */
576 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
577 static size_t n_temp_slots_in_use;
578 
579 /* Removes temporary slot TEMP from LIST.  */
580 
581 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)582 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
583 {
584   if (temp->next)
585     temp->next->prev = temp->prev;
586   if (temp->prev)
587     temp->prev->next = temp->next;
588   else
589     *list = temp->next;
590 
591   temp->prev = temp->next = NULL;
592 }
593 
594 /* Inserts temporary slot TEMP to LIST.  */
595 
596 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)597 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
598 {
599   temp->next = *list;
600   if (*list)
601     (*list)->prev = temp;
602   temp->prev = NULL;
603   *list = temp;
604 }
605 
606 /* Returns the list of used temp slots at LEVEL.  */
607 
608 static struct temp_slot **
temp_slots_at_level(int level)609 temp_slots_at_level (int level)
610 {
611   if (level >= (int) vec_safe_length (used_temp_slots))
612     vec_safe_grow_cleared (used_temp_slots, level + 1);
613 
614   return &(*used_temp_slots)[level];
615 }
616 
617 /* Returns the maximal temporary slot level.  */
618 
619 static int
max_slot_level(void)620 max_slot_level (void)
621 {
622   if (!used_temp_slots)
623     return -1;
624 
625   return used_temp_slots->length () - 1;
626 }
627 
628 /* Moves temporary slot TEMP to LEVEL.  */
629 
630 static void
move_slot_to_level(struct temp_slot * temp,int level)631 move_slot_to_level (struct temp_slot *temp, int level)
632 {
633   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
634   insert_slot_to_list (temp, temp_slots_at_level (level));
635   temp->level = level;
636 }
637 
638 /* Make temporary slot TEMP available.  */
639 
640 static void
make_slot_available(struct temp_slot * temp)641 make_slot_available (struct temp_slot *temp)
642 {
643   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
644   insert_slot_to_list (temp, &avail_temp_slots);
645   temp->in_use = 0;
646   temp->level = -1;
647   n_temp_slots_in_use--;
648 }
649 
650 /* Compute the hash value for an address -> temp slot mapping.
651    The value is cached on the mapping entry.  */
652 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)653 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
654 {
655   int do_not_record = 0;
656   return hash_rtx (t->address, GET_MODE (t->address),
657 		   &do_not_record, NULL, false);
658 }
659 
660 /* Return the hash value for an address -> temp slot mapping.  */
661 hashval_t
hash(temp_slot_address_entry * t)662 temp_address_hasher::hash (temp_slot_address_entry *t)
663 {
664   return t->hash;
665 }
666 
667 /* Compare two address -> temp slot mapping entries.  */
668 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)669 temp_address_hasher::equal (temp_slot_address_entry *t1,
670 			    temp_slot_address_entry *t2)
671 {
672   return exp_equiv_p (t1->address, t2->address, 0, true);
673 }
674 
675 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
676 static void
insert_temp_slot_address(rtx address,struct temp_slot * temp_slot)677 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
678 {
679   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
680   t->address = address;
681   t->temp_slot = temp_slot;
682   t->hash = temp_slot_address_compute_hash (t);
683   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
684 }
685 
686 /* Remove an address -> temp slot mapping entry if the temp slot is
687    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
688 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)689 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
690 {
691   const struct temp_slot_address_entry *t = *slot;
692   if (! t->temp_slot->in_use)
693     temp_slot_address_table->clear_slot (slot);
694   return 1;
695 }
696 
697 /* Remove all mappings of addresses to unused temp slots.  */
698 static void
remove_unused_temp_slot_addresses(void)699 remove_unused_temp_slot_addresses (void)
700 {
701   /* Use quicker clearing if there aren't any active temp slots.  */
702   if (n_temp_slots_in_use)
703     temp_slot_address_table->traverse
704       <void *, remove_unused_temp_slot_addresses_1> (NULL);
705   else
706     temp_slot_address_table->empty ();
707 }
708 
709 /* Find the temp slot corresponding to the object at address X.  */
710 
711 static struct temp_slot *
find_temp_slot_from_address(rtx x)712 find_temp_slot_from_address (rtx x)
713 {
714   struct temp_slot *p;
715   struct temp_slot_address_entry tmp, *t;
716 
717   /* First try the easy way:
718      See if X exists in the address -> temp slot mapping.  */
719   tmp.address = x;
720   tmp.temp_slot = NULL;
721   tmp.hash = temp_slot_address_compute_hash (&tmp);
722   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
723   if (t)
724     return t->temp_slot;
725 
726   /* If we have a sum involving a register, see if it points to a temp
727      slot.  */
728   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
729       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
730     return p;
731   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
732 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
733     return p;
734 
735   /* Last resort: Address is a virtual stack var address.  */
736   if (GET_CODE (x) == PLUS
737       && XEXP (x, 0) == virtual_stack_vars_rtx
738       && CONST_INT_P (XEXP (x, 1)))
739     {
740       int i;
741       for (i = max_slot_level (); i >= 0; i--)
742 	for (p = *temp_slots_at_level (i); p; p = p->next)
743 	  {
744 	    if (INTVAL (XEXP (x, 1)) >= p->base_offset
745 		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
746 	      return p;
747 	  }
748     }
749 
750   return NULL;
751 }
752 
753 /* Allocate a temporary stack slot and record it for possible later
754    reuse.
755 
756    MODE is the machine mode to be given to the returned rtx.
757 
758    SIZE is the size in units of the space required.  We do no rounding here
759    since assign_stack_local will do any required rounding.
760 
761    TYPE is the type that will be used for the stack slot.  */
762 
763 rtx
assign_stack_temp_for_type(machine_mode mode,HOST_WIDE_INT size,tree type)764 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
765 			    tree type)
766 {
767   unsigned int align;
768   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
769   rtx slot;
770 
771   /* If SIZE is -1 it means that somebody tried to allocate a temporary
772      of a variable size.  */
773   gcc_assert (size != -1);
774 
775   align = get_stack_local_alignment (type, mode);
776 
777   /* Try to find an available, already-allocated temporary of the proper
778      mode which meets the size and alignment requirements.  Choose the
779      smallest one with the closest alignment.
780 
781      If assign_stack_temp is called outside of the tree->rtl expansion,
782      we cannot reuse the stack slots (that may still refer to
783      VIRTUAL_STACK_VARS_REGNUM).  */
784   if (!virtuals_instantiated)
785     {
786       for (p = avail_temp_slots; p; p = p->next)
787 	{
788 	  if (p->align >= align && p->size >= size
789 	      && GET_MODE (p->slot) == mode
790 	      && objects_must_conflict_p (p->type, type)
791 	      && (best_p == 0 || best_p->size > p->size
792 		  || (best_p->size == p->size && best_p->align > p->align)))
793 	    {
794 	      if (p->align == align && p->size == size)
795 		{
796 		  selected = p;
797 		  cut_slot_from_list (selected, &avail_temp_slots);
798 		  best_p = 0;
799 		  break;
800 		}
801 	      best_p = p;
802 	    }
803 	}
804     }
805 
806   /* Make our best, if any, the one to use.  */
807   if (best_p)
808     {
809       selected = best_p;
810       cut_slot_from_list (selected, &avail_temp_slots);
811 
812       /* If there are enough aligned bytes left over, make them into a new
813 	 temp_slot so that the extra bytes don't get wasted.  Do this only
814 	 for BLKmode slots, so that we can be sure of the alignment.  */
815       if (GET_MODE (best_p->slot) == BLKmode)
816 	{
817 	  int alignment = best_p->align / BITS_PER_UNIT;
818 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
819 
820 	  if (best_p->size - rounded_size >= alignment)
821 	    {
822 	      p = ggc_alloc<temp_slot> ();
823 	      p->in_use = 0;
824 	      p->size = best_p->size - rounded_size;
825 	      p->base_offset = best_p->base_offset + rounded_size;
826 	      p->full_size = best_p->full_size - rounded_size;
827 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
828 	      p->align = best_p->align;
829 	      p->type = best_p->type;
830 	      insert_slot_to_list (p, &avail_temp_slots);
831 
832 	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
833 						   stack_slot_list);
834 
835 	      best_p->size = rounded_size;
836 	      best_p->full_size = rounded_size;
837 	    }
838 	}
839     }
840 
841   /* If we still didn't find one, make a new temporary.  */
842   if (selected == 0)
843     {
844       HOST_WIDE_INT frame_offset_old = frame_offset;
845 
846       p = ggc_alloc<temp_slot> ();
847 
848       /* We are passing an explicit alignment request to assign_stack_local.
849 	 One side effect of that is assign_stack_local will not round SIZE
850 	 to ensure the frame offset remains suitably aligned.
851 
852 	 So for requests which depended on the rounding of SIZE, we go ahead
853 	 and round it now.  We also make sure ALIGNMENT is at least
854 	 BIGGEST_ALIGNMENT.  */
855       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
856       p->slot = assign_stack_local_1 (mode,
857 				      (mode == BLKmode
858 				       ? CEIL_ROUND (size,
859 						     (int) align
860 						     / BITS_PER_UNIT)
861 				       : size),
862 				      align, 0);
863 
864       p->align = align;
865 
866       /* The following slot size computation is necessary because we don't
867 	 know the actual size of the temporary slot until assign_stack_local
868 	 has performed all the frame alignment and size rounding for the
869 	 requested temporary.  Note that extra space added for alignment
870 	 can be either above or below this stack slot depending on which
871 	 way the frame grows.  We include the extra space if and only if it
872 	 is above this slot.  */
873       if (FRAME_GROWS_DOWNWARD)
874 	p->size = frame_offset_old - frame_offset;
875       else
876 	p->size = size;
877 
878       /* Now define the fields used by combine_temp_slots.  */
879       if (FRAME_GROWS_DOWNWARD)
880 	{
881 	  p->base_offset = frame_offset;
882 	  p->full_size = frame_offset_old - frame_offset;
883 	}
884       else
885 	{
886 	  p->base_offset = frame_offset_old;
887 	  p->full_size = frame_offset - frame_offset_old;
888 	}
889 
890       selected = p;
891     }
892 
893   p = selected;
894   p->in_use = 1;
895   p->type = type;
896   p->level = temp_slot_level;
897   n_temp_slots_in_use++;
898 
899   pp = temp_slots_at_level (p->level);
900   insert_slot_to_list (p, pp);
901   insert_temp_slot_address (XEXP (p->slot, 0), p);
902 
903   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
904   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
905   stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
906 
907   /* If we know the alias set for the memory that will be used, use
908      it.  If there's no TYPE, then we don't know anything about the
909      alias set for the memory.  */
910   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
911   set_mem_align (slot, align);
912 
913   /* If a type is specified, set the relevant flags.  */
914   if (type != 0)
915     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
916   MEM_NOTRAP_P (slot) = 1;
917 
918   return slot;
919 }
920 
921 /* Allocate a temporary stack slot and record it for possible later
922    reuse.  First two arguments are same as in preceding function.  */
923 
924 rtx
assign_stack_temp(machine_mode mode,HOST_WIDE_INT size)925 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
926 {
927   return assign_stack_temp_for_type (mode, size, NULL_TREE);
928 }
929 
930 /* Assign a temporary.
931    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
932    and so that should be used in error messages.  In either case, we
933    allocate of the given type.
934    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
935    it is 0 if a register is OK.
936    DONT_PROMOTE is 1 if we should not promote values in register
937    to wider modes.  */
938 
939 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)940 assign_temp (tree type_or_decl, int memory_required,
941 	     int dont_promote ATTRIBUTE_UNUSED)
942 {
943   tree type, decl;
944   machine_mode mode;
945 #ifdef PROMOTE_MODE
946   int unsignedp;
947 #endif
948 
949   if (DECL_P (type_or_decl))
950     decl = type_or_decl, type = TREE_TYPE (decl);
951   else
952     decl = NULL, type = type_or_decl;
953 
954   mode = TYPE_MODE (type);
955 #ifdef PROMOTE_MODE
956   unsignedp = TYPE_UNSIGNED (type);
957 #endif
958 
959   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
960      end.  See also create_tmp_var for the gimplification-time check.  */
961   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
962 
963   if (mode == BLKmode || memory_required)
964     {
965       HOST_WIDE_INT size = int_size_in_bytes (type);
966       rtx tmp;
967 
968       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
969 	 problems with allocating the stack space.  */
970       if (size == 0)
971 	size = 1;
972 
973       /* Unfortunately, we don't yet know how to allocate variable-sized
974 	 temporaries.  However, sometimes we can find a fixed upper limit on
975 	 the size, so try that instead.  */
976       else if (size == -1)
977 	size = max_int_size_in_bytes (type);
978 
979       /* The size of the temporary may be too large to fit into an integer.  */
980       /* ??? Not sure this should happen except for user silliness, so limit
981 	 this to things that aren't compiler-generated temporaries.  The
982 	 rest of the time we'll die in assign_stack_temp_for_type.  */
983       if (decl && size == -1
984 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
985 	{
986 	  error ("size of variable %q+D is too large", decl);
987 	  size = 1;
988 	}
989 
990       tmp = assign_stack_temp_for_type (mode, size, type);
991       return tmp;
992     }
993 
994 #ifdef PROMOTE_MODE
995   if (! dont_promote)
996     mode = promote_mode (type, mode, &unsignedp);
997 #endif
998 
999   return gen_reg_rtx (mode);
1000 }
1001 
1002 /* Combine temporary stack slots which are adjacent on the stack.
1003 
1004    This allows for better use of already allocated stack space.  This is only
1005    done for BLKmode slots because we can be sure that we won't have alignment
1006    problems in this case.  */
1007 
1008 static void
combine_temp_slots(void)1009 combine_temp_slots (void)
1010 {
1011   struct temp_slot *p, *q, *next, *next_q;
1012   int num_slots;
1013 
1014   /* We can't combine slots, because the information about which slot
1015      is in which alias set will be lost.  */
1016   if (flag_strict_aliasing)
1017     return;
1018 
1019   /* If there are a lot of temp slots, don't do anything unless
1020      high levels of optimization.  */
1021   if (! flag_expensive_optimizations)
1022     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1023       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1024 	return;
1025 
1026   for (p = avail_temp_slots; p; p = next)
1027     {
1028       int delete_p = 0;
1029 
1030       next = p->next;
1031 
1032       if (GET_MODE (p->slot) != BLKmode)
1033 	continue;
1034 
1035       for (q = p->next; q; q = next_q)
1036 	{
1037        	  int delete_q = 0;
1038 
1039 	  next_q = q->next;
1040 
1041 	  if (GET_MODE (q->slot) != BLKmode)
1042 	    continue;
1043 
1044 	  if (p->base_offset + p->full_size == q->base_offset)
1045 	    {
1046 	      /* Q comes after P; combine Q into P.  */
1047 	      p->size += q->size;
1048 	      p->full_size += q->full_size;
1049 	      delete_q = 1;
1050 	    }
1051 	  else if (q->base_offset + q->full_size == p->base_offset)
1052 	    {
1053 	      /* P comes after Q; combine P into Q.  */
1054 	      q->size += p->size;
1055 	      q->full_size += p->full_size;
1056 	      delete_p = 1;
1057 	      break;
1058 	    }
1059 	  if (delete_q)
1060 	    cut_slot_from_list (q, &avail_temp_slots);
1061 	}
1062 
1063       /* Either delete P or advance past it.  */
1064       if (delete_p)
1065 	cut_slot_from_list (p, &avail_temp_slots);
1066     }
1067 }
1068 
1069 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1070    slot that previously was known by OLD_RTX.  */
1071 
1072 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1073 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1074 {
1075   struct temp_slot *p;
1076 
1077   if (rtx_equal_p (old_rtx, new_rtx))
1078     return;
1079 
1080   p = find_temp_slot_from_address (old_rtx);
1081 
1082   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
1083      NEW_RTX is a register, see if one operand of the PLUS is a
1084      temporary location.  If so, NEW_RTX points into it.  Otherwise,
1085      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1086      in common between them.  If so, try a recursive call on those
1087      values.  */
1088   if (p == 0)
1089     {
1090       if (GET_CODE (old_rtx) != PLUS)
1091 	return;
1092 
1093       if (REG_P (new_rtx))
1094 	{
1095 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1096 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1097 	  return;
1098 	}
1099       else if (GET_CODE (new_rtx) != PLUS)
1100 	return;
1101 
1102       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1103 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1104       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1105 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1106       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1107 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1108       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1109 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1110 
1111       return;
1112     }
1113 
1114   /* Otherwise add an alias for the temp's address.  */
1115   insert_temp_slot_address (new_rtx, p);
1116 }
1117 
1118 /* If X could be a reference to a temporary slot, mark that slot as
1119    belonging to the to one level higher than the current level.  If X
1120    matched one of our slots, just mark that one.  Otherwise, we can't
1121    easily predict which it is, so upgrade all of them.
1122 
1123    This is called when an ({...}) construct occurs and a statement
1124    returns a value in memory.  */
1125 
1126 void
preserve_temp_slots(rtx x)1127 preserve_temp_slots (rtx x)
1128 {
1129   struct temp_slot *p = 0, *next;
1130 
1131   if (x == 0)
1132     return;
1133 
1134   /* If X is a register that is being used as a pointer, see if we have
1135      a temporary slot we know it points to.  */
1136   if (REG_P (x) && REG_POINTER (x))
1137     p = find_temp_slot_from_address (x);
1138 
1139   /* If X is not in memory or is at a constant address, it cannot be in
1140      a temporary slot.  */
1141   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1142     return;
1143 
1144   /* First see if we can find a match.  */
1145   if (p == 0)
1146     p = find_temp_slot_from_address (XEXP (x, 0));
1147 
1148   if (p != 0)
1149     {
1150       if (p->level == temp_slot_level)
1151 	move_slot_to_level (p, temp_slot_level - 1);
1152       return;
1153     }
1154 
1155   /* Otherwise, preserve all non-kept slots at this level.  */
1156   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1157     {
1158       next = p->next;
1159       move_slot_to_level (p, temp_slot_level - 1);
1160     }
1161 }
1162 
1163 /* Free all temporaries used so far.  This is normally called at the
1164    end of generating code for a statement.  */
1165 
1166 void
free_temp_slots(void)1167 free_temp_slots (void)
1168 {
1169   struct temp_slot *p, *next;
1170   bool some_available = false;
1171 
1172   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173     {
1174       next = p->next;
1175       make_slot_available (p);
1176       some_available = true;
1177     }
1178 
1179   if (some_available)
1180     {
1181       remove_unused_temp_slot_addresses ();
1182       combine_temp_slots ();
1183     }
1184 }
1185 
1186 /* Push deeper into the nesting level for stack temporaries.  */
1187 
1188 void
push_temp_slots(void)1189 push_temp_slots (void)
1190 {
1191   temp_slot_level++;
1192 }
1193 
1194 /* Pop a temporary nesting level.  All slots in use in the current level
1195    are freed.  */
1196 
1197 void
pop_temp_slots(void)1198 pop_temp_slots (void)
1199 {
1200   free_temp_slots ();
1201   temp_slot_level--;
1202 }
1203 
1204 /* Initialize temporary slots.  */
1205 
1206 void
init_temp_slots(void)1207 init_temp_slots (void)
1208 {
1209   /* We have not allocated any temporaries yet.  */
1210   avail_temp_slots = 0;
1211   vec_alloc (used_temp_slots, 0);
1212   temp_slot_level = 0;
1213   n_temp_slots_in_use = 0;
1214 
1215   /* Set up the table to map addresses to temp slots.  */
1216   if (! temp_slot_address_table)
1217     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1218   else
1219     temp_slot_address_table->empty ();
1220 }
1221 
1222 /* Functions and data structures to keep track of the values hard regs
1223    had at the start of the function.  */
1224 
1225 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1226    and has_hard_reg_initial_val..  */
1227 struct GTY(()) initial_value_pair {
1228   rtx hard_reg;
1229   rtx pseudo;
1230 };
1231 /* ???  This could be a VEC but there is currently no way to define an
1232    opaque VEC type.  This could be worked around by defining struct
1233    initial_value_pair in function.h.  */
1234 struct GTY(()) initial_value_struct {
1235   int num_entries;
1236   int max_entries;
1237   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1238 };
1239 
1240 /* If a pseudo represents an initial hard reg (or expression), return
1241    it, else return NULL_RTX.  */
1242 
1243 rtx
get_hard_reg_initial_reg(rtx reg)1244 get_hard_reg_initial_reg (rtx reg)
1245 {
1246   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1247   int i;
1248 
1249   if (ivs == 0)
1250     return NULL_RTX;
1251 
1252   for (i = 0; i < ivs->num_entries; i++)
1253     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1254       return ivs->entries[i].hard_reg;
1255 
1256   return NULL_RTX;
1257 }
1258 
1259 /* Make sure that there's a pseudo register of mode MODE that stores the
1260    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
1261 
1262 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1263 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1264 {
1265   struct initial_value_struct *ivs;
1266   rtx rv;
1267 
1268   rv = has_hard_reg_initial_val (mode, regno);
1269   if (rv)
1270     return rv;
1271 
1272   ivs = crtl->hard_reg_initial_vals;
1273   if (ivs == 0)
1274     {
1275       ivs = ggc_alloc<initial_value_struct> ();
1276       ivs->num_entries = 0;
1277       ivs->max_entries = 5;
1278       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1279       crtl->hard_reg_initial_vals = ivs;
1280     }
1281 
1282   if (ivs->num_entries >= ivs->max_entries)
1283     {
1284       ivs->max_entries += 5;
1285       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1286 				    ivs->max_entries);
1287     }
1288 
1289   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1290   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1291 
1292   return ivs->entries[ivs->num_entries++].pseudo;
1293 }
1294 
1295 /* See if get_hard_reg_initial_val has been used to create a pseudo
1296    for the initial value of hard register REGNO in mode MODE.  Return
1297    the associated pseudo if so, otherwise return NULL.  */
1298 
1299 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1300 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1301 {
1302   struct initial_value_struct *ivs;
1303   int i;
1304 
1305   ivs = crtl->hard_reg_initial_vals;
1306   if (ivs != 0)
1307     for (i = 0; i < ivs->num_entries; i++)
1308       if (GET_MODE (ivs->entries[i].hard_reg) == mode
1309 	  && REGNO (ivs->entries[i].hard_reg) == regno)
1310 	return ivs->entries[i].pseudo;
1311 
1312   return NULL_RTX;
1313 }
1314 
1315 unsigned int
emit_initial_value_sets(void)1316 emit_initial_value_sets (void)
1317 {
1318   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1319   int i;
1320   rtx_insn *seq;
1321 
1322   if (ivs == 0)
1323     return 0;
1324 
1325   start_sequence ();
1326   for (i = 0; i < ivs->num_entries; i++)
1327     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1328   seq = get_insns ();
1329   end_sequence ();
1330 
1331   emit_insn_at_entry (seq);
1332   return 0;
1333 }
1334 
1335 /* Return the hardreg-pseudoreg initial values pair entry I and
1336    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
1337 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1338 initial_value_entry (int i, rtx *hreg, rtx *preg)
1339 {
1340   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1341   if (!ivs || i >= ivs->num_entries)
1342     return false;
1343 
1344   *hreg = ivs->entries[i].hard_reg;
1345   *preg = ivs->entries[i].pseudo;
1346   return true;
1347 }
1348 
1349 /* These routines are responsible for converting virtual register references
1350    to the actual hard register references once RTL generation is complete.
1351 
1352    The following four variables are used for communication between the
1353    routines.  They contain the offsets of the virtual registers from their
1354    respective hard registers.  */
1355 
1356 static int in_arg_offset;
1357 static int var_offset;
1358 static int dynamic_offset;
1359 static int out_arg_offset;
1360 static int cfa_offset;
1361 
1362 /* In most machines, the stack pointer register is equivalent to the bottom
1363    of the stack.  */
1364 
1365 #ifndef STACK_POINTER_OFFSET
1366 #define STACK_POINTER_OFFSET	0
1367 #endif
1368 
1369 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1370 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1371 #endif
1372 
1373 /* If not defined, pick an appropriate default for the offset of dynamically
1374    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1375    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1376 
1377 #ifndef STACK_DYNAMIC_OFFSET
1378 
1379 /* The bottom of the stack points to the actual arguments.  If
1380    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1381    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1382    stack space for register parameters is not pushed by the caller, but
1383    rather part of the fixed stack areas and hence not included in
1384    `crtl->outgoing_args_size'.  Nevertheless, we must allow
1385    for it when allocating stack dynamic objects.  */
1386 
1387 #ifdef INCOMING_REG_PARM_STACK_SPACE
1388 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1389 ((ACCUMULATE_OUTGOING_ARGS						      \
1390   ? (crtl->outgoing_args_size				      \
1391      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1392 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1393   : 0) + (STACK_POINTER_OFFSET))
1394 #else
1395 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1396 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0)	      \
1397  + (STACK_POINTER_OFFSET))
1398 #endif
1399 #endif
1400 
1401 
1402 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1403    is a virtual register, return the equivalent hard register and set the
1404    offset indirectly through the pointer.  Otherwise, return 0.  */
1405 
1406 static rtx
instantiate_new_reg(rtx x,HOST_WIDE_INT * poffset)1407 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1408 {
1409   rtx new_rtx;
1410   HOST_WIDE_INT offset;
1411 
1412   if (x == virtual_incoming_args_rtx)
1413     {
1414       if (stack_realign_drap)
1415         {
1416 	  /* Replace virtual_incoming_args_rtx with internal arg
1417 	     pointer if DRAP is used to realign stack.  */
1418           new_rtx = crtl->args.internal_arg_pointer;
1419           offset = 0;
1420         }
1421       else
1422         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1423     }
1424   else if (x == virtual_stack_vars_rtx)
1425     new_rtx = frame_pointer_rtx, offset = var_offset;
1426   else if (x == virtual_stack_dynamic_rtx)
1427     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1428   else if (x == virtual_outgoing_args_rtx)
1429     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1430   else if (x == virtual_cfa_rtx)
1431     {
1432 #ifdef FRAME_POINTER_CFA_OFFSET
1433       new_rtx = frame_pointer_rtx;
1434 #else
1435       new_rtx = arg_pointer_rtx;
1436 #endif
1437       offset = cfa_offset;
1438     }
1439   else if (x == virtual_preferred_stack_boundary_rtx)
1440     {
1441       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1442       offset = 0;
1443     }
1444   else
1445     return NULL_RTX;
1446 
1447   *poffset = offset;
1448   return new_rtx;
1449 }
1450 
1451 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1452    registers present inside of *LOC.  The expression is simplified,
1453    as much as possible, but is not to be considered "valid" in any sense
1454    implied by the target.  Return true if any change is made.  */
1455 
1456 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1457 instantiate_virtual_regs_in_rtx (rtx *loc)
1458 {
1459   if (!*loc)
1460     return false;
1461   bool changed = false;
1462   subrtx_ptr_iterator::array_type array;
1463   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1464     {
1465       rtx *loc = *iter;
1466       if (rtx x = *loc)
1467 	{
1468 	  rtx new_rtx;
1469 	  HOST_WIDE_INT offset;
1470 	  switch (GET_CODE (x))
1471 	    {
1472 	    case REG:
1473 	      new_rtx = instantiate_new_reg (x, &offset);
1474 	      if (new_rtx)
1475 		{
1476 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1477 		  changed = true;
1478 		}
1479 	      iter.skip_subrtxes ();
1480 	      break;
1481 
1482 	    case PLUS:
1483 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1484 	      if (new_rtx)
1485 		{
1486 		  XEXP (x, 0) = new_rtx;
1487 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
1488 		  changed = true;
1489 		  iter.skip_subrtxes ();
1490 		  break;
1491 		}
1492 
1493 	      /* FIXME -- from old code */
1494 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1495 		 we can commute the PLUS and SUBREG because pointers into the
1496 		 frame are well-behaved.  */
1497 	      break;
1498 
1499 	    default:
1500 	      break;
1501 	    }
1502 	}
1503     }
1504   return changed;
1505 }
1506 
1507 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1508    matches the predicate for insn CODE operand OPERAND.  */
1509 
1510 static int
safe_insn_predicate(int code,int operand,rtx x)1511 safe_insn_predicate (int code, int operand, rtx x)
1512 {
1513   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1514 }
1515 
1516 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1517    registers present inside of insn.  The result will be a valid insn.  */
1518 
1519 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1520 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1521 {
1522   HOST_WIDE_INT offset;
1523   int insn_code, i;
1524   bool any_change = false;
1525   rtx set, new_rtx, x;
1526   rtx_insn *seq;
1527 
1528   /* There are some special cases to be handled first.  */
1529   set = single_set (insn);
1530   if (set)
1531     {
1532       /* We're allowed to assign to a virtual register.  This is interpreted
1533 	 to mean that the underlying register gets assigned the inverse
1534 	 transformation.  This is used, for example, in the handling of
1535 	 non-local gotos.  */
1536       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1537       if (new_rtx)
1538 	{
1539 	  start_sequence ();
1540 
1541 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1542 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1543 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
1544 	  x = force_operand (x, new_rtx);
1545 	  if (x != new_rtx)
1546 	    emit_move_insn (new_rtx, x);
1547 
1548 	  seq = get_insns ();
1549 	  end_sequence ();
1550 
1551 	  emit_insn_before (seq, insn);
1552 	  delete_insn (insn);
1553 	  return;
1554 	}
1555 
1556       /* Handle a straight copy from a virtual register by generating a
1557 	 new add insn.  The difference between this and falling through
1558 	 to the generic case is avoiding a new pseudo and eliminating a
1559 	 move insn in the initial rtl stream.  */
1560       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1561       if (new_rtx && offset != 0
1562 	  && REG_P (SET_DEST (set))
1563 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1564 	{
1565 	  start_sequence ();
1566 
1567 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1568 				   gen_int_mode (offset,
1569 						 GET_MODE (SET_DEST (set))),
1570 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1571 	  if (x != SET_DEST (set))
1572 	    emit_move_insn (SET_DEST (set), x);
1573 
1574 	  seq = get_insns ();
1575 	  end_sequence ();
1576 
1577 	  emit_insn_before (seq, insn);
1578 	  delete_insn (insn);
1579 	  return;
1580 	}
1581 
1582       extract_insn (insn);
1583       insn_code = INSN_CODE (insn);
1584 
1585       /* Handle a plus involving a virtual register by determining if the
1586 	 operands remain valid if they're modified in place.  */
1587       if (GET_CODE (SET_SRC (set)) == PLUS
1588 	  && recog_data.n_operands >= 3
1589 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1590 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1591 	  && CONST_INT_P (recog_data.operand[2])
1592 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1593 	{
1594 	  offset += INTVAL (recog_data.operand[2]);
1595 
1596 	  /* If the sum is zero, then replace with a plain move.  */
1597 	  if (offset == 0
1598 	      && REG_P (SET_DEST (set))
1599 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1600 	    {
1601 	      start_sequence ();
1602 	      emit_move_insn (SET_DEST (set), new_rtx);
1603 	      seq = get_insns ();
1604 	      end_sequence ();
1605 
1606 	      emit_insn_before (seq, insn);
1607 	      delete_insn (insn);
1608 	      return;
1609 	    }
1610 
1611 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1612 
1613 	  /* Using validate_change and apply_change_group here leaves
1614 	     recog_data in an invalid state.  Since we know exactly what
1615 	     we want to check, do those two by hand.  */
1616 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
1617 	      && safe_insn_predicate (insn_code, 2, x))
1618 	    {
1619 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1620 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1621 	      any_change = true;
1622 
1623 	      /* Fall through into the regular operand fixup loop in
1624 		 order to take care of operands other than 1 and 2.  */
1625 	    }
1626 	}
1627     }
1628   else
1629     {
1630       extract_insn (insn);
1631       insn_code = INSN_CODE (insn);
1632     }
1633 
1634   /* In the general case, we expect virtual registers to appear only in
1635      operands, and then only as either bare registers or inside memories.  */
1636   for (i = 0; i < recog_data.n_operands; ++i)
1637     {
1638       x = recog_data.operand[i];
1639       switch (GET_CODE (x))
1640 	{
1641 	case MEM:
1642 	  {
1643 	    rtx addr = XEXP (x, 0);
1644 
1645 	    if (!instantiate_virtual_regs_in_rtx (&addr))
1646 	      continue;
1647 
1648 	    start_sequence ();
1649 	    x = replace_equiv_address (x, addr, true);
1650 	    /* It may happen that the address with the virtual reg
1651 	       was valid (e.g. based on the virtual stack reg, which might
1652 	       be acceptable to the predicates with all offsets), whereas
1653 	       the address now isn't anymore, for instance when the address
1654 	       is still offsetted, but the base reg isn't virtual-stack-reg
1655 	       anymore.  Below we would do a force_reg on the whole operand,
1656 	       but this insn might actually only accept memory.  Hence,
1657 	       before doing that last resort, try to reload the address into
1658 	       a register, so this operand stays a MEM.  */
1659 	    if (!safe_insn_predicate (insn_code, i, x))
1660 	      {
1661 		addr = force_reg (GET_MODE (addr), addr);
1662 		x = replace_equiv_address (x, addr, true);
1663 	      }
1664 	    seq = get_insns ();
1665 	    end_sequence ();
1666 	    if (seq)
1667 	      emit_insn_before (seq, insn);
1668 	  }
1669 	  break;
1670 
1671 	case REG:
1672 	  new_rtx = instantiate_new_reg (x, &offset);
1673 	  if (new_rtx == NULL)
1674 	    continue;
1675 	  if (offset == 0)
1676 	    x = new_rtx;
1677 	  else
1678 	    {
1679 	      start_sequence ();
1680 
1681 	      /* Careful, special mode predicates may have stuff in
1682 		 insn_data[insn_code].operand[i].mode that isn't useful
1683 		 to us for computing a new value.  */
1684 	      /* ??? Recognize address_operand and/or "p" constraints
1685 		 to see if (plus new offset) is a valid before we put
1686 		 this through expand_simple_binop.  */
1687 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1688 				       gen_int_mode (offset, GET_MODE (x)),
1689 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
1690 	      seq = get_insns ();
1691 	      end_sequence ();
1692 	      emit_insn_before (seq, insn);
1693 	    }
1694 	  break;
1695 
1696 	case SUBREG:
1697 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1698 	  if (new_rtx == NULL)
1699 	    continue;
1700 	  if (offset != 0)
1701 	    {
1702 	      start_sequence ();
1703 	      new_rtx = expand_simple_binop
1704 		(GET_MODE (new_rtx), PLUS, new_rtx,
1705 		 gen_int_mode (offset, GET_MODE (new_rtx)),
1706 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1707 	      seq = get_insns ();
1708 	      end_sequence ();
1709 	      emit_insn_before (seq, insn);
1710 	    }
1711 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1712 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
1713 	  gcc_assert (x);
1714 	  break;
1715 
1716 	default:
1717 	  continue;
1718 	}
1719 
1720       /* At this point, X contains the new value for the operand.
1721 	 Validate the new value vs the insn predicate.  Note that
1722 	 asm insns will have insn_code -1 here.  */
1723       if (!safe_insn_predicate (insn_code, i, x))
1724 	{
1725 	  start_sequence ();
1726 	  if (REG_P (x))
1727 	    {
1728 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1729 	      x = copy_to_reg (x);
1730 	    }
1731 	  else
1732 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
1733 	  seq = get_insns ();
1734 	  end_sequence ();
1735 	  if (seq)
1736 	    emit_insn_before (seq, insn);
1737 	}
1738 
1739       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1740       any_change = true;
1741     }
1742 
1743   if (any_change)
1744     {
1745       /* Propagate operand changes into the duplicates.  */
1746       for (i = 0; i < recog_data.n_dups; ++i)
1747 	*recog_data.dup_loc[i]
1748 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1749 
1750       /* Force re-recognition of the instruction for validation.  */
1751       INSN_CODE (insn) = -1;
1752     }
1753 
1754   if (asm_noperands (PATTERN (insn)) >= 0)
1755     {
1756       if (!check_asm_operands (PATTERN (insn)))
1757 	{
1758 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1759 	  /* For asm goto, instead of fixing up all the edges
1760 	     just clear the template and clear input operands
1761 	     (asm goto doesn't have any output operands).  */
1762 	  if (JUMP_P (insn))
1763 	    {
1764 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
1765 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1766 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1767 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1768 	    }
1769 	  else
1770 	    delete_insn (insn);
1771 	}
1772     }
1773   else
1774     {
1775       if (recog_memoized (insn) < 0)
1776 	fatal_insn_not_found (insn);
1777     }
1778 }
1779 
1780 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1781    do any instantiation required.  */
1782 
1783 void
instantiate_decl_rtl(rtx x)1784 instantiate_decl_rtl (rtx x)
1785 {
1786   rtx addr;
1787 
1788   if (x == 0)
1789     return;
1790 
1791   /* If this is a CONCAT, recurse for the pieces.  */
1792   if (GET_CODE (x) == CONCAT)
1793     {
1794       instantiate_decl_rtl (XEXP (x, 0));
1795       instantiate_decl_rtl (XEXP (x, 1));
1796       return;
1797     }
1798 
1799   /* If this is not a MEM, no need to do anything.  Similarly if the
1800      address is a constant or a register that is not a virtual register.  */
1801   if (!MEM_P (x))
1802     return;
1803 
1804   addr = XEXP (x, 0);
1805   if (CONSTANT_P (addr)
1806       || (REG_P (addr)
1807 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1808 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1809     return;
1810 
1811   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1812 }
1813 
1814 /* Helper for instantiate_decls called via walk_tree: Process all decls
1815    in the given DECL_VALUE_EXPR.  */
1816 
1817 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1818 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1819 {
1820   tree t = *tp;
1821   if (! EXPR_P (t))
1822     {
1823       *walk_subtrees = 0;
1824       if (DECL_P (t))
1825 	{
1826 	  if (DECL_RTL_SET_P (t))
1827 	    instantiate_decl_rtl (DECL_RTL (t));
1828 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1829 	      && DECL_INCOMING_RTL (t))
1830 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1831 	  if ((TREE_CODE (t) == VAR_DECL
1832 	       || TREE_CODE (t) == RESULT_DECL)
1833 	      && DECL_HAS_VALUE_EXPR_P (t))
1834 	    {
1835 	      tree v = DECL_VALUE_EXPR (t);
1836 	      walk_tree (&v, instantiate_expr, NULL, NULL);
1837 	    }
1838 	}
1839     }
1840   return NULL;
1841 }
1842 
1843 /* Subroutine of instantiate_decls: Process all decls in the given
1844    BLOCK node and all its subblocks.  */
1845 
1846 static void
instantiate_decls_1(tree let)1847 instantiate_decls_1 (tree let)
1848 {
1849   tree t;
1850 
1851   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1852     {
1853       if (DECL_RTL_SET_P (t))
1854 	instantiate_decl_rtl (DECL_RTL (t));
1855       if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1856 	{
1857 	  tree v = DECL_VALUE_EXPR (t);
1858 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1859 	}
1860     }
1861 
1862   /* Process all subblocks.  */
1863   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1864     instantiate_decls_1 (t);
1865 }
1866 
1867 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1868    all virtual registers in their DECL_RTL's.  */
1869 
1870 static void
instantiate_decls(tree fndecl)1871 instantiate_decls (tree fndecl)
1872 {
1873   tree decl;
1874   unsigned ix;
1875 
1876   /* Process all parameters of the function.  */
1877   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1878     {
1879       instantiate_decl_rtl (DECL_RTL (decl));
1880       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1881       if (DECL_HAS_VALUE_EXPR_P (decl))
1882 	{
1883 	  tree v = DECL_VALUE_EXPR (decl);
1884 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1885 	}
1886     }
1887 
1888   if ((decl = DECL_RESULT (fndecl))
1889       && TREE_CODE (decl) == RESULT_DECL)
1890     {
1891       if (DECL_RTL_SET_P (decl))
1892 	instantiate_decl_rtl (DECL_RTL (decl));
1893       if (DECL_HAS_VALUE_EXPR_P (decl))
1894 	{
1895 	  tree v = DECL_VALUE_EXPR (decl);
1896 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1897 	}
1898     }
1899 
1900   /* Process the saved static chain if it exists.  */
1901   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1902   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1903     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1904 
1905   /* Now process all variables defined in the function or its subblocks.  */
1906   instantiate_decls_1 (DECL_INITIAL (fndecl));
1907 
1908   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1909     if (DECL_RTL_SET_P (decl))
1910       instantiate_decl_rtl (DECL_RTL (decl));
1911   vec_free (cfun->local_decls);
1912 }
1913 
1914 /* Pass through the INSNS of function FNDECL and convert virtual register
1915    references to hard register references.  */
1916 
1917 static unsigned int
instantiate_virtual_regs(void)1918 instantiate_virtual_regs (void)
1919 {
1920   rtx_insn *insn;
1921 
1922   /* Compute the offsets to use for this function.  */
1923   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1924   var_offset = STARTING_FRAME_OFFSET;
1925   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1926   out_arg_offset = STACK_POINTER_OFFSET;
1927 #ifdef FRAME_POINTER_CFA_OFFSET
1928   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1929 #else
1930   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1931 #endif
1932 
1933   /* Initialize recognition, indicating that volatile is OK.  */
1934   init_recog ();
1935 
1936   /* Scan through all the insns, instantiating every virtual register still
1937      present.  */
1938   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1939     if (INSN_P (insn))
1940       {
1941 	/* These patterns in the instruction stream can never be recognized.
1942 	   Fortunately, they shouldn't contain virtual registers either.  */
1943         if (GET_CODE (PATTERN (insn)) == USE
1944 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1945 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1946 	  continue;
1947 	else if (DEBUG_INSN_P (insn))
1948 	  instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1949 	else
1950 	  instantiate_virtual_regs_in_insn (insn);
1951 
1952 	if (insn->deleted ())
1953 	  continue;
1954 
1955 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1956 
1957 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1958 	if (CALL_P (insn))
1959 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1960       }
1961 
1962   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1963   instantiate_decls (current_function_decl);
1964 
1965   targetm.instantiate_decls ();
1966 
1967   /* Indicate that, from now on, assign_stack_local should use
1968      frame_pointer_rtx.  */
1969   virtuals_instantiated = 1;
1970 
1971   return 0;
1972 }
1973 
1974 namespace {
1975 
1976 const pass_data pass_data_instantiate_virtual_regs =
1977 {
1978   RTL_PASS, /* type */
1979   "vregs", /* name */
1980   OPTGROUP_NONE, /* optinfo_flags */
1981   TV_NONE, /* tv_id */
1982   0, /* properties_required */
1983   0, /* properties_provided */
1984   0, /* properties_destroyed */
1985   0, /* todo_flags_start */
1986   0, /* todo_flags_finish */
1987 };
1988 
1989 class pass_instantiate_virtual_regs : public rtl_opt_pass
1990 {
1991 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)1992   pass_instantiate_virtual_regs (gcc::context *ctxt)
1993     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
1994   {}
1995 
1996   /* opt_pass methods: */
execute(function *)1997   virtual unsigned int execute (function *)
1998     {
1999       return instantiate_virtual_regs ();
2000     }
2001 
2002 }; // class pass_instantiate_virtual_regs
2003 
2004 } // anon namespace
2005 
2006 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2007 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2008 {
2009   return new pass_instantiate_virtual_regs (ctxt);
2010 }
2011 
2012 
2013 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2014    This means a type for which function calls must pass an address to the
2015    function or get an address back from the function.
2016    EXP may be a type node or an expression (whose type is tested).  */
2017 
2018 int
aggregate_value_p(const_tree exp,const_tree fntype)2019 aggregate_value_p (const_tree exp, const_tree fntype)
2020 {
2021   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2022   int i, regno, nregs;
2023   rtx reg;
2024 
2025   if (fntype)
2026     switch (TREE_CODE (fntype))
2027       {
2028       case CALL_EXPR:
2029 	{
2030 	  tree fndecl = get_callee_fndecl (fntype);
2031 	  if (fndecl)
2032 	    fntype = TREE_TYPE (fndecl);
2033 	  else if (CALL_EXPR_FN (fntype))
2034 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2035 	  else
2036 	    /* For internal functions, assume nothing needs to be
2037 	       returned in memory.  */
2038 	    return 0;
2039 	}
2040 	break;
2041       case FUNCTION_DECL:
2042 	fntype = TREE_TYPE (fntype);
2043 	break;
2044       case FUNCTION_TYPE:
2045       case METHOD_TYPE:
2046         break;
2047       case IDENTIFIER_NODE:
2048 	fntype = NULL_TREE;
2049 	break;
2050       default:
2051 	/* We don't expect other tree types here.  */
2052 	gcc_unreachable ();
2053       }
2054 
2055   if (VOID_TYPE_P (type))
2056     return 0;
2057 
2058   /* If a record should be passed the same as its first (and only) member
2059      don't pass it as an aggregate.  */
2060   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2061     return aggregate_value_p (first_field (type), fntype);
2062 
2063   /* If the front end has decided that this needs to be passed by
2064      reference, do so.  */
2065   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2066       && DECL_BY_REFERENCE (exp))
2067     return 1;
2068 
2069   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
2070   if (fntype && TREE_ADDRESSABLE (fntype))
2071     return 1;
2072 
2073   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2074      and thus can't be returned in registers.  */
2075   if (TREE_ADDRESSABLE (type))
2076     return 1;
2077 
2078   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2079     return 1;
2080 
2081   if (targetm.calls.return_in_memory (type, fntype))
2082     return 1;
2083 
2084   /* Make sure we have suitable call-clobbered regs to return
2085      the value in; if not, we must return it in memory.  */
2086   reg = hard_function_value (type, 0, fntype, 0);
2087 
2088   /* If we have something other than a REG (e.g. a PARALLEL), then assume
2089      it is OK.  */
2090   if (!REG_P (reg))
2091     return 0;
2092 
2093   regno = REGNO (reg);
2094   nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2095   for (i = 0; i < nregs; i++)
2096     if (! call_used_regs[regno + i])
2097       return 1;
2098 
2099   return 0;
2100 }
2101 
2102 /* Return true if we should assign DECL a pseudo register; false if it
2103    should live on the local stack.  */
2104 
2105 bool
use_register_for_decl(const_tree decl)2106 use_register_for_decl (const_tree decl)
2107 {
2108   if (TREE_CODE (decl) == SSA_NAME)
2109     {
2110       /* We often try to use the SSA_NAME, instead of its underlying
2111 	 decl, to get type information and guide decisions, to avoid
2112 	 differences of behavior between anonymous and named
2113 	 variables, but in this one case we have to go for the actual
2114 	 variable if there is one.  The main reason is that, at least
2115 	 at -O0, we want to place user variables on the stack, but we
2116 	 don't mind using pseudos for anonymous or ignored temps.
2117 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2118 	 should go in pseudos, whereas their corresponding variables
2119 	 might have to go on the stack.  So, disregarding the decl
2120 	 here would negatively impact debug info at -O0, enable
2121 	 coalescing between SSA_NAMEs that ought to get different
2122 	 stack/pseudo assignments, and get the incoming argument
2123 	 processing thoroughly confused by PARM_DECLs expected to live
2124 	 in stack slots but assigned to pseudos.  */
2125       if (!SSA_NAME_VAR (decl))
2126 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2127 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2128 
2129       decl = SSA_NAME_VAR (decl);
2130     }
2131 
2132   /* Honor volatile.  */
2133   if (TREE_SIDE_EFFECTS (decl))
2134     return false;
2135 
2136   /* Honor addressability.  */
2137   if (TREE_ADDRESSABLE (decl))
2138     return false;
2139 
2140   /* RESULT_DECLs are a bit special in that they're assigned without
2141      regard to use_register_for_decl, but we generally only store in
2142      them.  If we coalesce their SSA NAMEs, we'd better return a
2143      result that matches the assignment in expand_function_start.  */
2144   if (TREE_CODE (decl) == RESULT_DECL)
2145     {
2146       /* If it's not an aggregate, we're going to use a REG or a
2147 	 PARALLEL containing a REG.  */
2148       if (!aggregate_value_p (decl, current_function_decl))
2149 	return true;
2150 
2151       /* If expand_function_start determines the return value, we'll
2152 	 use MEM if it's not by reference.  */
2153       if (cfun->returns_pcc_struct
2154 	  || (targetm.calls.struct_value_rtx
2155 	      (TREE_TYPE (current_function_decl), 1)))
2156 	return DECL_BY_REFERENCE (decl);
2157 
2158       /* Otherwise, we're taking an extra all.function_result_decl
2159 	 argument.  It's set up in assign_parms_augmented_arg_list,
2160 	 under the (negated) conditions above, and then it's used to
2161 	 set up the RESULT_DECL rtl in assign_params, after looping
2162 	 over all parameters.  Now, if the RESULT_DECL is not by
2163 	 reference, we'll use a MEM either way.  */
2164       if (!DECL_BY_REFERENCE (decl))
2165 	return false;
2166 
2167       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2168 	 the function_result_decl's assignment.  Since it's a pointer,
2169 	 we can short-circuit a number of the tests below, and we must
2170 	 duplicat e them because we don't have the
2171 	 function_result_decl to test.  */
2172       if (!targetm.calls.allocate_stack_slots_for_args ())
2173 	return true;
2174       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
2175       if (optimize)
2176 	return true;
2177       /* We don't set DECL_REGISTER for the function_result_decl.  */
2178       return false;
2179     }
2180 
2181   /* Decl is implicitly addressible by bound stores and loads
2182      if it is an aggregate holding bounds.  */
2183   if (chkp_function_instrumented_p (current_function_decl)
2184       && TREE_TYPE (decl)
2185       && !BOUNDED_P (decl)
2186       && chkp_type_has_pointer (TREE_TYPE (decl)))
2187     return false;
2188 
2189   /* Only register-like things go in registers.  */
2190   if (DECL_MODE (decl) == BLKmode)
2191     return false;
2192 
2193   /* If -ffloat-store specified, don't put explicit float variables
2194      into registers.  */
2195   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2196      propagates values across these stores, and it probably shouldn't.  */
2197   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2198     return false;
2199 
2200   if (!targetm.calls.allocate_stack_slots_for_args ())
2201     return true;
2202 
2203   /* If we're not interested in tracking debugging information for
2204      this decl, then we can certainly put it in a register.  */
2205   if (DECL_IGNORED_P (decl))
2206     return true;
2207 
2208   if (optimize)
2209     return true;
2210 
2211   if (!DECL_REGISTER (decl))
2212     return false;
2213 
2214   switch (TREE_CODE (TREE_TYPE (decl)))
2215     {
2216     case RECORD_TYPE:
2217     case UNION_TYPE:
2218     case QUAL_UNION_TYPE:
2219       /* When not optimizing, disregard register keyword for variables with
2220 	 types containing methods, otherwise the methods won't be callable
2221 	 from the debugger.  */
2222       if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2223 	return false;
2224       break;
2225     default:
2226       break;
2227     }
2228 
2229   return true;
2230 }
2231 
2232 /* Structures to communicate between the subroutines of assign_parms.
2233    The first holds data persistent across all parameters, the second
2234    is cleared out for each parameter.  */
2235 
2236 struct assign_parm_data_all
2237 {
2238   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2239      should become a job of the target or otherwise encapsulated.  */
2240   CUMULATIVE_ARGS args_so_far_v;
2241   cumulative_args_t args_so_far;
2242   struct args_size stack_args_size;
2243   tree function_result_decl;
2244   tree orig_fnargs;
2245   rtx_insn *first_conversion_insn;
2246   rtx_insn *last_conversion_insn;
2247   HOST_WIDE_INT pretend_args_size;
2248   HOST_WIDE_INT extra_pretend_bytes;
2249   int reg_parm_stack_space;
2250 };
2251 
2252 struct assign_parm_data_one
2253 {
2254   tree nominal_type;
2255   tree passed_type;
2256   rtx entry_parm;
2257   rtx stack_parm;
2258   machine_mode nominal_mode;
2259   machine_mode passed_mode;
2260   machine_mode promoted_mode;
2261   struct locate_and_pad_arg_data locate;
2262   int partial;
2263   BOOL_BITFIELD named_arg : 1;
2264   BOOL_BITFIELD passed_pointer : 1;
2265   BOOL_BITFIELD on_stack : 1;
2266   BOOL_BITFIELD loaded_in_reg : 1;
2267 };
2268 
2269 struct bounds_parm_data
2270 {
2271   assign_parm_data_one parm_data;
2272   tree bounds_parm;
2273   tree ptr_parm;
2274   rtx ptr_entry;
2275   int bound_no;
2276 };
2277 
2278 /* A subroutine of assign_parms.  Initialize ALL.  */
2279 
2280 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2281 assign_parms_initialize_all (struct assign_parm_data_all *all)
2282 {
2283   tree fntype ATTRIBUTE_UNUSED;
2284 
2285   memset (all, 0, sizeof (*all));
2286 
2287   fntype = TREE_TYPE (current_function_decl);
2288 
2289 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2290   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2291 #else
2292   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2293 			current_function_decl, -1);
2294 #endif
2295   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2296 
2297 #ifdef INCOMING_REG_PARM_STACK_SPACE
2298   all->reg_parm_stack_space
2299     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2300 #endif
2301 }
2302 
2303 /* If ARGS contains entries with complex types, split the entry into two
2304    entries of the component type.  Return a new list of substitutions are
2305    needed, else the old list.  */
2306 
2307 static void
split_complex_args(vec<tree> * args)2308 split_complex_args (vec<tree> *args)
2309 {
2310   unsigned i;
2311   tree p;
2312 
2313   FOR_EACH_VEC_ELT (*args, i, p)
2314     {
2315       tree type = TREE_TYPE (p);
2316       if (TREE_CODE (type) == COMPLEX_TYPE
2317 	  && targetm.calls.split_complex_arg (type))
2318 	{
2319 	  tree decl;
2320 	  tree subtype = TREE_TYPE (type);
2321 	  bool addressable = TREE_ADDRESSABLE (p);
2322 
2323 	  /* Rewrite the PARM_DECL's type with its component.  */
2324 	  p = copy_node (p);
2325 	  TREE_TYPE (p) = subtype;
2326 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2327 	  DECL_MODE (p) = VOIDmode;
2328 	  DECL_SIZE (p) = NULL;
2329 	  DECL_SIZE_UNIT (p) = NULL;
2330 	  /* If this arg must go in memory, put it in a pseudo here.
2331 	     We can't allow it to go in memory as per normal parms,
2332 	     because the usual place might not have the imag part
2333 	     adjacent to the real part.  */
2334 	  DECL_ARTIFICIAL (p) = addressable;
2335 	  DECL_IGNORED_P (p) = addressable;
2336 	  TREE_ADDRESSABLE (p) = 0;
2337 	  layout_decl (p, 0);
2338 	  (*args)[i] = p;
2339 
2340 	  /* Build a second synthetic decl.  */
2341 	  decl = build_decl (EXPR_LOCATION (p),
2342 			     PARM_DECL, NULL_TREE, subtype);
2343 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2344 	  DECL_ARTIFICIAL (decl) = addressable;
2345 	  DECL_IGNORED_P (decl) = addressable;
2346 	  layout_decl (decl, 0);
2347 	  args->safe_insert (++i, decl);
2348 	}
2349     }
2350 }
2351 
2352 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2353    the hidden struct return argument, and (abi willing) complex args.
2354    Return the new parameter list.  */
2355 
2356 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2357 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2358 {
2359   tree fndecl = current_function_decl;
2360   tree fntype = TREE_TYPE (fndecl);
2361   vec<tree> fnargs = vNULL;
2362   tree arg;
2363 
2364   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2365     fnargs.safe_push (arg);
2366 
2367   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2368 
2369   /* If struct value address is treated as the first argument, make it so.  */
2370   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2371       && ! cfun->returns_pcc_struct
2372       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2373     {
2374       tree type = build_pointer_type (TREE_TYPE (fntype));
2375       tree decl;
2376 
2377       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2378 			 PARM_DECL, get_identifier (".result_ptr"), type);
2379       DECL_ARG_TYPE (decl) = type;
2380       DECL_ARTIFICIAL (decl) = 1;
2381       DECL_NAMELESS (decl) = 1;
2382       TREE_CONSTANT (decl) = 1;
2383       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
2384 	 changes, the end of the RESULT_DECL handling block in
2385 	 use_register_for_decl must be adjusted to match.  */
2386 
2387       DECL_CHAIN (decl) = all->orig_fnargs;
2388       all->orig_fnargs = decl;
2389       fnargs.safe_insert (0, decl);
2390 
2391       all->function_result_decl = decl;
2392 
2393       /* If function is instrumented then bounds of the
2394 	 passed structure address is the second argument.  */
2395       if (chkp_function_instrumented_p (fndecl))
2396 	{
2397 	  decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2398 			     PARM_DECL, get_identifier (".result_bnd"),
2399 			     pointer_bounds_type_node);
2400 	  DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2401 	  DECL_ARTIFICIAL (decl) = 1;
2402 	  DECL_NAMELESS (decl) = 1;
2403 	  TREE_CONSTANT (decl) = 1;
2404 
2405 	  DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2406 	  DECL_CHAIN (all->orig_fnargs) = decl;
2407 	  fnargs.safe_insert (1, decl);
2408 	}
2409     }
2410 
2411   /* If the target wants to split complex arguments into scalars, do so.  */
2412   if (targetm.calls.split_complex_arg)
2413     split_complex_args (&fnargs);
2414 
2415   return fnargs;
2416 }
2417 
2418 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2419    data for the parameter.  Incorporate ABI specifics such as pass-by-
2420    reference and type promotion.  */
2421 
2422 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2423 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2424 			     struct assign_parm_data_one *data)
2425 {
2426   tree nominal_type, passed_type;
2427   machine_mode nominal_mode, passed_mode, promoted_mode;
2428   int unsignedp;
2429 
2430   memset (data, 0, sizeof (*data));
2431 
2432   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
2433   if (!cfun->stdarg)
2434     data->named_arg = 1;  /* No variadic parms.  */
2435   else if (DECL_CHAIN (parm))
2436     data->named_arg = 1;  /* Not the last non-variadic parm. */
2437   else if (targetm.calls.strict_argument_naming (all->args_so_far))
2438     data->named_arg = 1;  /* Only variadic ones are unnamed.  */
2439   else
2440     data->named_arg = 0;  /* Treat as variadic.  */
2441 
2442   nominal_type = TREE_TYPE (parm);
2443   passed_type = DECL_ARG_TYPE (parm);
2444 
2445   /* Look out for errors propagating this far.  Also, if the parameter's
2446      type is void then its value doesn't matter.  */
2447   if (TREE_TYPE (parm) == error_mark_node
2448       /* This can happen after weird syntax errors
2449 	 or if an enum type is defined among the parms.  */
2450       || TREE_CODE (parm) != PARM_DECL
2451       || passed_type == NULL
2452       || VOID_TYPE_P (nominal_type))
2453     {
2454       nominal_type = passed_type = void_type_node;
2455       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2456       goto egress;
2457     }
2458 
2459   /* Find mode of arg as it is passed, and mode of arg as it should be
2460      during execution of this function.  */
2461   passed_mode = TYPE_MODE (passed_type);
2462   nominal_mode = TYPE_MODE (nominal_type);
2463 
2464   /* If the parm is to be passed as a transparent union or record, use the
2465      type of the first field for the tests below.  We have already verified
2466      that the modes are the same.  */
2467   if ((TREE_CODE (passed_type) == UNION_TYPE
2468        || TREE_CODE (passed_type) == RECORD_TYPE)
2469       && TYPE_TRANSPARENT_AGGR (passed_type))
2470     passed_type = TREE_TYPE (first_field (passed_type));
2471 
2472   /* See if this arg was passed by invisible reference.  */
2473   if (pass_by_reference (&all->args_so_far_v, passed_mode,
2474 			 passed_type, data->named_arg))
2475     {
2476       passed_type = nominal_type = build_pointer_type (passed_type);
2477       data->passed_pointer = true;
2478       passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2479     }
2480 
2481   /* Find mode as it is passed by the ABI.  */
2482   unsignedp = TYPE_UNSIGNED (passed_type);
2483   promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2484 				         TREE_TYPE (current_function_decl), 0);
2485 
2486  egress:
2487   data->nominal_type = nominal_type;
2488   data->passed_type = passed_type;
2489   data->nominal_mode = nominal_mode;
2490   data->passed_mode = passed_mode;
2491   data->promoted_mode = promoted_mode;
2492 }
2493 
2494 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2495 
2496 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2497 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2498 			    struct assign_parm_data_one *data, bool no_rtl)
2499 {
2500   int varargs_pretend_bytes = 0;
2501 
2502   targetm.calls.setup_incoming_varargs (all->args_so_far,
2503 					data->promoted_mode,
2504 					data->passed_type,
2505 					&varargs_pretend_bytes, no_rtl);
2506 
2507   /* If the back-end has requested extra stack space, record how much is
2508      needed.  Do not change pretend_args_size otherwise since it may be
2509      nonzero from an earlier partial argument.  */
2510   if (varargs_pretend_bytes > 0)
2511     all->pretend_args_size = varargs_pretend_bytes;
2512 }
2513 
2514 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2515    the incoming location of the current parameter.  */
2516 
2517 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2518 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2519 			    struct assign_parm_data_one *data)
2520 {
2521   HOST_WIDE_INT pretend_bytes = 0;
2522   rtx entry_parm;
2523   bool in_regs;
2524 
2525   if (data->promoted_mode == VOIDmode)
2526     {
2527       data->entry_parm = data->stack_parm = const0_rtx;
2528       return;
2529     }
2530 
2531   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2532 						    data->promoted_mode,
2533 						    data->passed_type,
2534 						    data->named_arg);
2535 
2536   if (entry_parm == 0)
2537     data->promoted_mode = data->passed_mode;
2538 
2539   /* Determine parm's home in the stack, in case it arrives in the stack
2540      or we should pretend it did.  Compute the stack position and rtx where
2541      the argument arrives and its size.
2542 
2543      There is one complexity here:  If this was a parameter that would
2544      have been passed in registers, but wasn't only because it is
2545      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2546      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2547      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2548      as it was the previous time.  */
2549   in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2550 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2551   in_regs = true;
2552 #endif
2553   if (!in_regs && !data->named_arg)
2554     {
2555       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2556 	{
2557 	  rtx tem;
2558 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
2559 						     data->promoted_mode,
2560 						     data->passed_type, true);
2561 	  in_regs = tem != NULL;
2562 	}
2563     }
2564 
2565   /* If this parameter was passed both in registers and in the stack, use
2566      the copy on the stack.  */
2567   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2568 					data->passed_type))
2569     entry_parm = 0;
2570 
2571   if (entry_parm)
2572     {
2573       int partial;
2574 
2575       partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2576 						 data->promoted_mode,
2577 						 data->passed_type,
2578 						 data->named_arg);
2579       data->partial = partial;
2580 
2581       /* The caller might already have allocated stack space for the
2582 	 register parameters.  */
2583       if (partial != 0 && all->reg_parm_stack_space == 0)
2584 	{
2585 	  /* Part of this argument is passed in registers and part
2586 	     is passed on the stack.  Ask the prologue code to extend
2587 	     the stack part so that we can recreate the full value.
2588 
2589 	     PRETEND_BYTES is the size of the registers we need to store.
2590 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2591 	     stack space that the prologue should allocate.
2592 
2593 	     Internally, gcc assumes that the argument pointer is aligned
2594 	     to STACK_BOUNDARY bits.  This is used both for alignment
2595 	     optimizations (see init_emit) and to locate arguments that are
2596 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2597 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2598 	     a stack boundary.  */
2599 
2600 	  /* We assume at most one partial arg, and it must be the first
2601 	     argument on the stack.  */
2602 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2603 
2604 	  pretend_bytes = partial;
2605 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2606 
2607 	  /* We want to align relative to the actual stack pointer, so
2608 	     don't include this in the stack size until later.  */
2609 	  all->extra_pretend_bytes = all->pretend_args_size;
2610 	}
2611     }
2612 
2613   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2614 		       all->reg_parm_stack_space,
2615 		       entry_parm ? data->partial : 0, current_function_decl,
2616 		       &all->stack_args_size, &data->locate);
2617 
2618   /* Update parm_stack_boundary if this parameter is passed in the
2619      stack.  */
2620   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2621     crtl->parm_stack_boundary = data->locate.boundary;
2622 
2623   /* Adjust offsets to include the pretend args.  */
2624   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2625   data->locate.slot_offset.constant += pretend_bytes;
2626   data->locate.offset.constant += pretend_bytes;
2627 
2628   data->entry_parm = entry_parm;
2629 }
2630 
2631 /* A subroutine of assign_parms.  If there is actually space on the stack
2632    for this parm, count it in stack_args_size and return true.  */
2633 
2634 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2635 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2636 			   struct assign_parm_data_one *data)
2637 {
2638   /* Bounds are never passed on the stack to keep compatibility
2639      with not instrumented code.  */
2640   if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2641     return false;
2642   /* Trivially true if we've no incoming register.  */
2643   else if (data->entry_parm == NULL)
2644     ;
2645   /* Also true if we're partially in registers and partially not,
2646      since we've arranged to drop the entire argument on the stack.  */
2647   else if (data->partial != 0)
2648     ;
2649   /* Also true if the target says that it's passed in both registers
2650      and on the stack.  */
2651   else if (GET_CODE (data->entry_parm) == PARALLEL
2652 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2653     ;
2654   /* Also true if the target says that there's stack allocated for
2655      all register parameters.  */
2656   else if (all->reg_parm_stack_space > 0)
2657     ;
2658   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2659   else
2660     return false;
2661 
2662   all->stack_args_size.constant += data->locate.size.constant;
2663   if (data->locate.size.var)
2664     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2665 
2666   return true;
2667 }
2668 
2669 /* A subroutine of assign_parms.  Given that this parameter is allocated
2670    stack space by the ABI, find it.  */
2671 
2672 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2673 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2674 {
2675   rtx offset_rtx, stack_parm;
2676   unsigned int align, boundary;
2677 
2678   /* If we're passing this arg using a reg, make its stack home the
2679      aligned stack slot.  */
2680   if (data->entry_parm)
2681     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2682   else
2683     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2684 
2685   stack_parm = crtl->args.internal_arg_pointer;
2686   if (offset_rtx != const0_rtx)
2687     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2688   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2689 
2690   if (!data->passed_pointer)
2691     {
2692       set_mem_attributes (stack_parm, parm, 1);
2693       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2694 	 while promoted mode's size is needed.  */
2695       if (data->promoted_mode != BLKmode
2696 	  && data->promoted_mode != DECL_MODE (parm))
2697 	{
2698 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2699 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2700 	    {
2701 	      int offset = subreg_lowpart_offset (DECL_MODE (parm),
2702 						  data->promoted_mode);
2703 	      if (offset)
2704 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2705 	    }
2706 	}
2707     }
2708 
2709   boundary = data->locate.boundary;
2710   align = BITS_PER_UNIT;
2711 
2712   /* If we're padding upward, we know that the alignment of the slot
2713      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2714      intentionally forcing upward padding.  Otherwise we have to come
2715      up with a guess at the alignment based on OFFSET_RTX.  */
2716   if (data->locate.where_pad != downward || data->entry_parm)
2717     align = boundary;
2718   else if (CONST_INT_P (offset_rtx))
2719     {
2720       align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2721       align = align & -align;
2722     }
2723   set_mem_align (stack_parm, align);
2724 
2725   if (data->entry_parm)
2726     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2727 
2728   data->stack_parm = stack_parm;
2729 }
2730 
2731 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2732    always valid and contiguous.  */
2733 
2734 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2735 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2736 {
2737   rtx entry_parm = data->entry_parm;
2738   rtx stack_parm = data->stack_parm;
2739 
2740   /* If this parm was passed part in regs and part in memory, pretend it
2741      arrived entirely in memory by pushing the register-part onto the stack.
2742      In the special case of a DImode or DFmode that is split, we could put
2743      it together in a pseudoreg directly, but for now that's not worth
2744      bothering with.  */
2745   if (data->partial != 0)
2746     {
2747       /* Handle calls that pass values in multiple non-contiguous
2748 	 locations.  The Irix 6 ABI has examples of this.  */
2749       if (GET_CODE (entry_parm) == PARALLEL)
2750 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2751 			  data->passed_type,
2752 			  int_size_in_bytes (data->passed_type));
2753       else
2754 	{
2755 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2756 	  move_block_from_reg (REGNO (entry_parm),
2757 			       validize_mem (copy_rtx (stack_parm)),
2758 			       data->partial / UNITS_PER_WORD);
2759 	}
2760 
2761       entry_parm = stack_parm;
2762     }
2763 
2764   /* If we didn't decide this parm came in a register, by default it came
2765      on the stack.  */
2766   else if (entry_parm == NULL)
2767     entry_parm = stack_parm;
2768 
2769   /* When an argument is passed in multiple locations, we can't make use
2770      of this information, but we can save some copying if the whole argument
2771      is passed in a single register.  */
2772   else if (GET_CODE (entry_parm) == PARALLEL
2773 	   && data->nominal_mode != BLKmode
2774 	   && data->passed_mode != BLKmode)
2775     {
2776       size_t i, len = XVECLEN (entry_parm, 0);
2777 
2778       for (i = 0; i < len; i++)
2779 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2780 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2781 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2782 		== data->passed_mode)
2783 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2784 	  {
2785 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2786 	    break;
2787 	  }
2788     }
2789 
2790   data->entry_parm = entry_parm;
2791 }
2792 
2793 /* A subroutine of assign_parms.  Reconstitute any values which were
2794    passed in multiple registers and would fit in a single register.  */
2795 
2796 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2797 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2798 {
2799   rtx entry_parm = data->entry_parm;
2800 
2801   /* Convert the PARALLEL to a REG of the same mode as the parallel.
2802      This can be done with register operations rather than on the
2803      stack, even if we will store the reconstituted parameter on the
2804      stack later.  */
2805   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2806     {
2807       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2808       emit_group_store (parmreg, entry_parm, data->passed_type,
2809 			GET_MODE_SIZE (GET_MODE (entry_parm)));
2810       entry_parm = parmreg;
2811     }
2812 
2813   data->entry_parm = entry_parm;
2814 }
2815 
2816 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2817    always valid and properly aligned.  */
2818 
2819 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2820 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2821 {
2822   rtx stack_parm = data->stack_parm;
2823 
2824   /* If we can't trust the parm stack slot to be aligned enough for its
2825      ultimate type, don't use that slot after entry.  We'll make another
2826      stack slot, if we need one.  */
2827   if (stack_parm
2828       && ((STRICT_ALIGNMENT
2829 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2830 	  || (data->nominal_type
2831 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2832 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2833     stack_parm = NULL;
2834 
2835   /* If parm was passed in memory, and we need to convert it on entry,
2836      don't store it back in that same slot.  */
2837   else if (data->entry_parm == stack_parm
2838 	   && data->nominal_mode != BLKmode
2839 	   && data->nominal_mode != data->passed_mode)
2840     stack_parm = NULL;
2841 
2842   /* If stack protection is in effect for this function, don't leave any
2843      pointers in their passed stack slots.  */
2844   else if (crtl->stack_protect_guard
2845 	   && (flag_stack_protect == 2
2846 	       || data->passed_pointer
2847 	       || POINTER_TYPE_P (data->nominal_type)))
2848     stack_parm = NULL;
2849 
2850   data->stack_parm = stack_parm;
2851 }
2852 
2853 /* A subroutine of assign_parms.  Return true if the current parameter
2854    should be stored as a BLKmode in the current frame.  */
2855 
2856 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2857 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2858 {
2859   if (data->nominal_mode == BLKmode)
2860     return true;
2861   if (GET_MODE (data->entry_parm) == BLKmode)
2862     return true;
2863 
2864 #ifdef BLOCK_REG_PADDING
2865   /* Only assign_parm_setup_block knows how to deal with register arguments
2866      that are padded at the least significant end.  */
2867   if (REG_P (data->entry_parm)
2868       && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2869       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2870 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2871     return true;
2872 #endif
2873 
2874   return false;
2875 }
2876 
2877 /* A subroutine of assign_parms.  Arrange for the parameter to be
2878    present and valid in DATA->STACK_RTL.  */
2879 
2880 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2881 assign_parm_setup_block (struct assign_parm_data_all *all,
2882 			 tree parm, struct assign_parm_data_one *data)
2883 {
2884   rtx entry_parm = data->entry_parm;
2885   rtx stack_parm = data->stack_parm;
2886   rtx target_reg = NULL_RTX;
2887   bool in_conversion_seq = false;
2888   HOST_WIDE_INT size;
2889   HOST_WIDE_INT size_stored;
2890 
2891   if (GET_CODE (entry_parm) == PARALLEL)
2892     entry_parm = emit_group_move_into_temps (entry_parm);
2893 
2894   /* If we want the parameter in a pseudo, don't use a stack slot.  */
2895   if (is_gimple_reg (parm) && use_register_for_decl (parm))
2896     {
2897       tree def = ssa_default_def (cfun, parm);
2898       gcc_assert (def);
2899       machine_mode mode = promote_ssa_mode (def, NULL);
2900       rtx reg = gen_reg_rtx (mode);
2901       if (GET_CODE (reg) != CONCAT)
2902 	stack_parm = reg;
2903       else
2904 	{
2905 	  target_reg = reg;
2906 	  /* Avoid allocating a stack slot, if there isn't one
2907 	     preallocated by the ABI.  It might seem like we should
2908 	     always prefer a pseudo, but converting between
2909 	     floating-point and integer modes goes through the stack
2910 	     on various machines, so it's better to use the reserved
2911 	     stack slot than to risk wasting it and allocating more
2912 	     for the conversion.  */
2913 	  if (stack_parm == NULL_RTX)
2914 	    {
2915 	      int save = generating_concat_p;
2916 	      generating_concat_p = 0;
2917 	      stack_parm = gen_reg_rtx (mode);
2918 	      generating_concat_p = save;
2919 	    }
2920 	}
2921       data->stack_parm = NULL;
2922     }
2923 
2924   size = int_size_in_bytes (data->passed_type);
2925   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2926   if (stack_parm == 0)
2927     {
2928       DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2929       stack_parm = assign_stack_local (BLKmode, size_stored,
2930 				       DECL_ALIGN (parm));
2931       if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2932 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2933       set_mem_attributes (stack_parm, parm, 1);
2934     }
2935 
2936   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2937      calls that pass values in multiple non-contiguous locations.  */
2938   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2939     {
2940       rtx mem;
2941 
2942       /* Note that we will be storing an integral number of words.
2943 	 So we have to be careful to ensure that we allocate an
2944 	 integral number of words.  We do this above when we call
2945 	 assign_stack_local if space was not allocated in the argument
2946 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2947 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2948 	 if it becomes a problem.  Exception is when BLKmode arrives
2949 	 with arguments not conforming to word_mode.  */
2950 
2951       if (data->stack_parm == 0)
2952 	;
2953       else if (GET_CODE (entry_parm) == PARALLEL)
2954 	;
2955       else
2956 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2957 
2958       mem = validize_mem (copy_rtx (stack_parm));
2959 
2960       /* Handle values in multiple non-contiguous locations.  */
2961       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2962 	emit_group_store (mem, entry_parm, data->passed_type, size);
2963       else if (GET_CODE (entry_parm) == PARALLEL)
2964 	{
2965 	  push_to_sequence2 (all->first_conversion_insn,
2966 			     all->last_conversion_insn);
2967 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2968 	  all->first_conversion_insn = get_insns ();
2969 	  all->last_conversion_insn = get_last_insn ();
2970 	  end_sequence ();
2971 	  in_conversion_seq = true;
2972 	}
2973 
2974       else if (size == 0)
2975 	;
2976 
2977       /* If SIZE is that of a mode no bigger than a word, just use
2978 	 that mode's store operation.  */
2979       else if (size <= UNITS_PER_WORD)
2980 	{
2981 	  machine_mode mode
2982 	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2983 
2984 	  if (mode != BLKmode
2985 #ifdef BLOCK_REG_PADDING
2986 	      && (size == UNITS_PER_WORD
2987 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2988 		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2989 #endif
2990 	      )
2991 	    {
2992 	      rtx reg;
2993 
2994 	      /* We are really truncating a word_mode value containing
2995 		 SIZE bytes into a value of mode MODE.  If such an
2996 		 operation requires no actual instructions, we can refer
2997 		 to the value directly in mode MODE, otherwise we must
2998 		 start with the register in word_mode and explicitly
2999 		 convert it.  */
3000 	      if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
3001 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
3002 	      else
3003 		{
3004 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3005 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3006 		}
3007 	      emit_move_insn (change_address (mem, mode, 0), reg);
3008 	    }
3009 
3010 #ifdef BLOCK_REG_PADDING
3011 	  /* Storing the register in memory as a full word, as
3012 	     move_block_from_reg below would do, and then using the
3013 	     MEM in a smaller mode, has the effect of shifting right
3014 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
3015 	     shifting must be explicit.  */
3016 	  else if (!MEM_P (mem))
3017 	    {
3018 	      rtx x;
3019 
3020 	      /* If the assert below fails, we should have taken the
3021 		 mode != BLKmode path above, unless we have downward
3022 		 padding of smaller-than-word arguments on a machine
3023 		 with little-endian bytes, which would likely require
3024 		 additional changes to work correctly.  */
3025 	      gcc_checking_assert (BYTES_BIG_ENDIAN
3026 				   && (BLOCK_REG_PADDING (mode,
3027 							  data->passed_type, 1)
3028 				       == upward));
3029 
3030 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3031 
3032 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3033 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3034 				NULL_RTX, 1);
3035 	      x = force_reg (word_mode, x);
3036 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3037 
3038 	      emit_move_insn (mem, x);
3039 	    }
3040 #endif
3041 
3042 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3043 	     machine must be aligned to the left before storing
3044 	     to memory.  Note that the previous test doesn't
3045 	     handle all cases (e.g. SIZE == 3).  */
3046 	  else if (size != UNITS_PER_WORD
3047 #ifdef BLOCK_REG_PADDING
3048 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3049 		       == downward)
3050 #else
3051 		   && BYTES_BIG_ENDIAN
3052 #endif
3053 		   )
3054 	    {
3055 	      rtx tem, x;
3056 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3057 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3058 
3059 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3060 	      tem = change_address (mem, word_mode, 0);
3061 	      emit_move_insn (tem, x);
3062 	    }
3063 	  else
3064 	    move_block_from_reg (REGNO (entry_parm), mem,
3065 				 size_stored / UNITS_PER_WORD);
3066 	}
3067       else if (!MEM_P (mem))
3068 	{
3069 	  gcc_checking_assert (size > UNITS_PER_WORD);
3070 #ifdef BLOCK_REG_PADDING
3071 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3072 						  data->passed_type, 0)
3073 			       == upward);
3074 #endif
3075 	  emit_move_insn (mem, entry_parm);
3076 	}
3077       else
3078 	move_block_from_reg (REGNO (entry_parm), mem,
3079 			     size_stored / UNITS_PER_WORD);
3080     }
3081   else if (data->stack_parm == 0)
3082     {
3083       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3084       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3085 		       BLOCK_OP_NORMAL);
3086       all->first_conversion_insn = get_insns ();
3087       all->last_conversion_insn = get_last_insn ();
3088       end_sequence ();
3089       in_conversion_seq = true;
3090     }
3091 
3092   if (target_reg)
3093     {
3094       if (!in_conversion_seq)
3095 	emit_move_insn (target_reg, stack_parm);
3096       else
3097 	{
3098 	  push_to_sequence2 (all->first_conversion_insn,
3099 			     all->last_conversion_insn);
3100 	  emit_move_insn (target_reg, stack_parm);
3101 	  all->first_conversion_insn = get_insns ();
3102 	  all->last_conversion_insn = get_last_insn ();
3103 	  end_sequence ();
3104 	}
3105       stack_parm = target_reg;
3106     }
3107 
3108   data->stack_parm = stack_parm;
3109   set_parm_rtl (parm, stack_parm);
3110 }
3111 
3112 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
3113    parameter.  Get it there.  Perform all ABI specified conversions.  */
3114 
3115 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3116 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3117 		       struct assign_parm_data_one *data)
3118 {
3119   rtx parmreg, validated_mem;
3120   rtx equiv_stack_parm;
3121   machine_mode promoted_nominal_mode;
3122   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3123   bool did_conversion = false;
3124   bool need_conversion, moved;
3125   rtx rtl;
3126 
3127   /* Store the parm in a pseudoregister during the function, but we may
3128      need to do it in a wider mode.  Using 2 here makes the result
3129      consistent with promote_decl_mode and thus expand_expr_real_1.  */
3130   promoted_nominal_mode
3131     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3132 			     TREE_TYPE (current_function_decl), 2);
3133 
3134   parmreg = gen_reg_rtx (promoted_nominal_mode);
3135   if (!DECL_ARTIFICIAL (parm))
3136     mark_user_reg (parmreg);
3137 
3138   /* If this was an item that we received a pointer to,
3139      set rtl appropriately.  */
3140   if (data->passed_pointer)
3141     {
3142       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3143       set_mem_attributes (rtl, parm, 1);
3144     }
3145   else
3146     rtl = parmreg;
3147 
3148   assign_parm_remove_parallels (data);
3149 
3150   /* Copy the value into the register, thus bridging between
3151      assign_parm_find_data_types and expand_expr_real_1.  */
3152 
3153   equiv_stack_parm = data->stack_parm;
3154   validated_mem = validize_mem (copy_rtx (data->entry_parm));
3155 
3156   need_conversion = (data->nominal_mode != data->passed_mode
3157 		     || promoted_nominal_mode != data->promoted_mode);
3158   moved = false;
3159 
3160   if (need_conversion
3161       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3162       && data->nominal_mode == data->passed_mode
3163       && data->nominal_mode == GET_MODE (data->entry_parm))
3164     {
3165       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3166 	 mode, by the caller.  We now have to convert it to
3167 	 NOMINAL_MODE, if different.  However, PARMREG may be in
3168 	 a different mode than NOMINAL_MODE if it is being stored
3169 	 promoted.
3170 
3171 	 If ENTRY_PARM is a hard register, it might be in a register
3172 	 not valid for operating in its mode (e.g., an odd-numbered
3173 	 register for a DFmode).  In that case, moves are the only
3174 	 thing valid, so we can't do a convert from there.  This
3175 	 occurs when the calling sequence allow such misaligned
3176 	 usages.
3177 
3178 	 In addition, the conversion may involve a call, which could
3179 	 clobber parameters which haven't been copied to pseudo
3180 	 registers yet.
3181 
3182 	 First, we try to emit an insn which performs the necessary
3183 	 conversion.  We verify that this insn does not clobber any
3184 	 hard registers.  */
3185 
3186       enum insn_code icode;
3187       rtx op0, op1;
3188 
3189       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3190 			    unsignedp);
3191 
3192       op0 = parmreg;
3193       op1 = validated_mem;
3194       if (icode != CODE_FOR_nothing
3195 	  && insn_operand_matches (icode, 0, op0)
3196 	  && insn_operand_matches (icode, 1, op1))
3197 	{
3198 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3199 	  rtx_insn *insn, *insns;
3200 	  rtx t = op1;
3201 	  HARD_REG_SET hardregs;
3202 
3203 	  start_sequence ();
3204 	  /* If op1 is a hard register that is likely spilled, first
3205 	     force it into a pseudo, otherwise combiner might extend
3206 	     its lifetime too much.  */
3207 	  if (GET_CODE (t) == SUBREG)
3208 	    t = SUBREG_REG (t);
3209 	  if (REG_P (t)
3210 	      && HARD_REGISTER_P (t)
3211 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3212 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3213 	    {
3214 	      t = gen_reg_rtx (GET_MODE (op1));
3215 	      emit_move_insn (t, op1);
3216 	    }
3217 	  else
3218 	    t = op1;
3219 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3220 					   data->passed_mode, unsignedp);
3221 	  emit_insn (pat);
3222 	  insns = get_insns ();
3223 
3224 	  moved = true;
3225 	  CLEAR_HARD_REG_SET (hardregs);
3226 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3227 	    {
3228 	      if (INSN_P (insn))
3229 		note_stores (PATTERN (insn), record_hard_reg_sets,
3230 			     &hardregs);
3231 	      if (!hard_reg_set_empty_p (hardregs))
3232 		moved = false;
3233 	    }
3234 
3235 	  end_sequence ();
3236 
3237 	  if (moved)
3238 	    {
3239 	      emit_insn (insns);
3240 	      if (equiv_stack_parm != NULL_RTX)
3241 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3242 						  equiv_stack_parm);
3243 	    }
3244 	}
3245     }
3246 
3247   if (moved)
3248     /* Nothing to do.  */
3249     ;
3250   else if (need_conversion)
3251     {
3252       /* We did not have an insn to convert directly, or the sequence
3253 	 generated appeared unsafe.  We must first copy the parm to a
3254 	 pseudo reg, and save the conversion until after all
3255 	 parameters have been moved.  */
3256 
3257       int save_tree_used;
3258       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3259 
3260       emit_move_insn (tempreg, validated_mem);
3261 
3262       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3263       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3264 
3265       if (GET_CODE (tempreg) == SUBREG
3266 	  && GET_MODE (tempreg) == data->nominal_mode
3267 	  && REG_P (SUBREG_REG (tempreg))
3268 	  && data->nominal_mode == data->passed_mode
3269 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3270 	  && GET_MODE_SIZE (GET_MODE (tempreg))
3271 	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3272 	{
3273 	  /* The argument is already sign/zero extended, so note it
3274 	     into the subreg.  */
3275 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3276 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
3277 	}
3278 
3279       /* TREE_USED gets set erroneously during expand_assignment.  */
3280       save_tree_used = TREE_USED (parm);
3281       SET_DECL_RTL (parm, rtl);
3282       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3283       SET_DECL_RTL (parm, NULL_RTX);
3284       TREE_USED (parm) = save_tree_used;
3285       all->first_conversion_insn = get_insns ();
3286       all->last_conversion_insn = get_last_insn ();
3287       end_sequence ();
3288 
3289       did_conversion = true;
3290     }
3291   else
3292     emit_move_insn (parmreg, validated_mem);
3293 
3294   /* If we were passed a pointer but the actual value can safely live
3295      in a register, retrieve it and use it directly.  */
3296   if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3297     {
3298       /* We can't use nominal_mode, because it will have been set to
3299 	 Pmode above.  We must use the actual mode of the parm.  */
3300       if (use_register_for_decl (parm))
3301 	{
3302 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3303 	  mark_user_reg (parmreg);
3304 	}
3305       else
3306 	{
3307 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3308 					    TYPE_MODE (TREE_TYPE (parm)),
3309 					    TYPE_ALIGN (TREE_TYPE (parm)));
3310 	  parmreg
3311 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3312 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3313 				  align);
3314 	  set_mem_attributes (parmreg, parm, 1);
3315 	}
3316 
3317       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3318 	 the debug info in case it is not legitimate.  */
3319       if (GET_MODE (parmreg) != GET_MODE (rtl))
3320 	{
3321 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3322 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3323 
3324 	  push_to_sequence2 (all->first_conversion_insn,
3325 			     all->last_conversion_insn);
3326 	  emit_move_insn (tempreg, rtl);
3327 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3328 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3329 			  tempreg);
3330 	  all->first_conversion_insn = get_insns ();
3331 	  all->last_conversion_insn = get_last_insn ();
3332 	  end_sequence ();
3333 
3334 	  did_conversion = true;
3335 	}
3336       else
3337 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3338 
3339       rtl = parmreg;
3340 
3341       /* STACK_PARM is the pointer, not the parm, and PARMREG is
3342 	 now the parm.  */
3343       data->stack_parm = NULL;
3344     }
3345 
3346   set_parm_rtl (parm, rtl);
3347 
3348   /* Mark the register as eliminable if we did no conversion and it was
3349      copied from memory at a fixed offset, and the arg pointer was not
3350      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
3351      offset formed an invalid address, such memory-equivalences as we
3352      make here would screw up life analysis for it.  */
3353   if (data->nominal_mode == data->passed_mode
3354       && !did_conversion
3355       && data->stack_parm != 0
3356       && MEM_P (data->stack_parm)
3357       && data->locate.offset.var == 0
3358       && reg_mentioned_p (virtual_incoming_args_rtx,
3359 			  XEXP (data->stack_parm, 0)))
3360     {
3361       rtx_insn *linsn = get_last_insn ();
3362       rtx_insn *sinsn;
3363       rtx set;
3364 
3365       /* Mark complex types separately.  */
3366       if (GET_CODE (parmreg) == CONCAT)
3367 	{
3368 	  machine_mode submode
3369 	    = GET_MODE_INNER (GET_MODE (parmreg));
3370 	  int regnor = REGNO (XEXP (parmreg, 0));
3371 	  int regnoi = REGNO (XEXP (parmreg, 1));
3372 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3373 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
3374 					  GET_MODE_SIZE (submode));
3375 
3376 	  /* Scan backwards for the set of the real and
3377 	     imaginary parts.  */
3378 	  for (sinsn = linsn; sinsn != 0;
3379 	       sinsn = prev_nonnote_insn (sinsn))
3380 	    {
3381 	      set = single_set (sinsn);
3382 	      if (set == 0)
3383 		continue;
3384 
3385 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
3386 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3387 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
3388 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3389 	    }
3390 	}
3391       else
3392 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3393     }
3394 
3395   /* For pointer data type, suggest pointer register.  */
3396   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3397     mark_reg_pointer (parmreg,
3398 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3399 }
3400 
3401 /* A subroutine of assign_parms.  Allocate stack space to hold the current
3402    parameter.  Get it there.  Perform all ABI specified conversions.  */
3403 
3404 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3405 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3406 		         struct assign_parm_data_one *data)
3407 {
3408   /* Value must be stored in the stack slot STACK_PARM during function
3409      execution.  */
3410   bool to_conversion = false;
3411 
3412   assign_parm_remove_parallels (data);
3413 
3414   if (data->promoted_mode != data->nominal_mode)
3415     {
3416       /* Conversion is required.  */
3417       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3418 
3419       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3420 
3421       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3422       to_conversion = true;
3423 
3424       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3425 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
3426 
3427       if (data->stack_parm)
3428 	{
3429 	  int offset = subreg_lowpart_offset (data->nominal_mode,
3430 					      GET_MODE (data->stack_parm));
3431 	  /* ??? This may need a big-endian conversion on sparc64.  */
3432 	  data->stack_parm
3433 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
3434 	  if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3435 	    set_mem_offset (data->stack_parm,
3436 			    MEM_OFFSET (data->stack_parm) + offset);
3437 	}
3438     }
3439 
3440   if (data->entry_parm != data->stack_parm)
3441     {
3442       rtx src, dest;
3443 
3444       if (data->stack_parm == 0)
3445 	{
3446 	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3447 					    GET_MODE (data->entry_parm),
3448 					    TYPE_ALIGN (data->passed_type));
3449 	  data->stack_parm
3450 	    = assign_stack_local (GET_MODE (data->entry_parm),
3451 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3452 				  align);
3453 	  set_mem_attributes (data->stack_parm, parm, 1);
3454 	}
3455 
3456       dest = validize_mem (copy_rtx (data->stack_parm));
3457       src = validize_mem (copy_rtx (data->entry_parm));
3458 
3459       if (MEM_P (src))
3460 	{
3461 	  /* Use a block move to handle potentially misaligned entry_parm.  */
3462 	  if (!to_conversion)
3463 	    push_to_sequence2 (all->first_conversion_insn,
3464 			       all->last_conversion_insn);
3465 	  to_conversion = true;
3466 
3467 	  emit_block_move (dest, src,
3468 			   GEN_INT (int_size_in_bytes (data->passed_type)),
3469 			   BLOCK_OP_NORMAL);
3470 	}
3471       else
3472 	emit_move_insn (dest, src);
3473     }
3474 
3475   if (to_conversion)
3476     {
3477       all->first_conversion_insn = get_insns ();
3478       all->last_conversion_insn = get_last_insn ();
3479       end_sequence ();
3480     }
3481 
3482   set_parm_rtl (parm, data->stack_parm);
3483 }
3484 
3485 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
3486    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
3487 
3488 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3489 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3490 			      vec<tree> fnargs)
3491 {
3492   tree parm;
3493   tree orig_fnargs = all->orig_fnargs;
3494   unsigned i = 0;
3495 
3496   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3497     {
3498       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3499 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3500 	{
3501 	  rtx tmp, real, imag;
3502 	  machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3503 
3504 	  real = DECL_RTL (fnargs[i]);
3505 	  imag = DECL_RTL (fnargs[i + 1]);
3506 	  if (inner != GET_MODE (real))
3507 	    {
3508 	      real = gen_lowpart_SUBREG (inner, real);
3509 	      imag = gen_lowpart_SUBREG (inner, imag);
3510 	    }
3511 
3512 	  if (TREE_ADDRESSABLE (parm))
3513 	    {
3514 	      rtx rmem, imem;
3515 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3516 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3517 						DECL_MODE (parm),
3518 						TYPE_ALIGN (TREE_TYPE (parm)));
3519 
3520 	      /* split_complex_arg put the real and imag parts in
3521 		 pseudos.  Move them to memory.  */
3522 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
3523 	      set_mem_attributes (tmp, parm, 1);
3524 	      rmem = adjust_address_nv (tmp, inner, 0);
3525 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3526 	      push_to_sequence2 (all->first_conversion_insn,
3527 				 all->last_conversion_insn);
3528 	      emit_move_insn (rmem, real);
3529 	      emit_move_insn (imem, imag);
3530 	      all->first_conversion_insn = get_insns ();
3531 	      all->last_conversion_insn = get_last_insn ();
3532 	      end_sequence ();
3533 	    }
3534 	  else
3535 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3536 	  set_parm_rtl (parm, tmp);
3537 
3538 	  real = DECL_INCOMING_RTL (fnargs[i]);
3539 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3540 	  if (inner != GET_MODE (real))
3541 	    {
3542 	      real = gen_lowpart_SUBREG (inner, real);
3543 	      imag = gen_lowpart_SUBREG (inner, imag);
3544 	    }
3545 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3546 	  set_decl_incoming_rtl (parm, tmp, false);
3547 	  i++;
3548 	}
3549     }
3550 }
3551 
3552 /* Load bounds of PARM from bounds table.  */
3553 static void
assign_parm_load_bounds(struct assign_parm_data_one * data,tree parm,rtx entry,unsigned bound_no)3554 assign_parm_load_bounds (struct assign_parm_data_one *data,
3555 			 tree parm,
3556 			 rtx entry,
3557 			 unsigned bound_no)
3558 {
3559   bitmap_iterator bi;
3560   unsigned i, offs = 0;
3561   int bnd_no = -1;
3562   rtx slot = NULL, ptr = NULL;
3563 
3564   if (parm)
3565     {
3566       bitmap slots;
3567       bitmap_obstack_initialize (NULL);
3568       slots = BITMAP_ALLOC (NULL);
3569       chkp_find_bound_slots (TREE_TYPE (parm), slots);
3570       EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3571 	{
3572 	  if (bound_no)
3573 	    bound_no--;
3574 	  else
3575 	    {
3576 	      bnd_no = i;
3577 	      break;
3578 	    }
3579 	}
3580       BITMAP_FREE (slots);
3581       bitmap_obstack_release (NULL);
3582     }
3583 
3584   /* We may have bounds not associated with any pointer.  */
3585   if (bnd_no != -1)
3586     offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3587 
3588   /* Find associated pointer.  */
3589   if (bnd_no == -1)
3590     {
3591       /* If bounds are not associated with any bounds,
3592 	 then it is passed in a register or special slot.  */
3593       gcc_assert (data->entry_parm);
3594       ptr = const0_rtx;
3595     }
3596   else if (MEM_P (entry))
3597     slot = adjust_address (entry, Pmode, offs);
3598   else if (REG_P (entry))
3599     ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3600   else if (GET_CODE (entry) == PARALLEL)
3601     ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3602   else
3603     gcc_unreachable ();
3604   data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3605 							data->entry_parm);
3606 }
3607 
3608 /* Assign RTL expressions to the function's bounds parameters BNDARGS.  */
3609 
3610 static void
assign_bounds(vec<bounds_parm_data> & bndargs,struct assign_parm_data_all & all,bool assign_regs,bool assign_special,bool assign_bt)3611 assign_bounds (vec<bounds_parm_data> &bndargs,
3612 	       struct assign_parm_data_all &all,
3613 	       bool assign_regs, bool assign_special,
3614 	       bool assign_bt)
3615 {
3616   unsigned i, pass;
3617   bounds_parm_data *pbdata;
3618 
3619   if (!bndargs.exists ())
3620     return;
3621 
3622   /* We make few passes to store input bounds.  Firstly handle bounds
3623      passed in registers.  After that we load bounds passed in special
3624      slots.  Finally we load bounds from Bounds Table.  */
3625   for (pass = 0; pass < 3; pass++)
3626     FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3627       {
3628 	/* Pass 0 => regs only.  */
3629 	if (pass == 0
3630 	    && (!assign_regs
3631 		||(!pbdata->parm_data.entry_parm
3632 		   || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3633 	  continue;
3634 	/* Pass 1 => slots only.  */
3635 	else if (pass == 1
3636 		 && (!assign_special
3637 		     || (!pbdata->parm_data.entry_parm
3638 			 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3639 	  continue;
3640 	/* Pass 2 => BT only.  */
3641 	else if (pass == 2
3642 		 && (!assign_bt
3643 		     || pbdata->parm_data.entry_parm))
3644 	  continue;
3645 
3646 	if (!pbdata->parm_data.entry_parm
3647 	    || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3648 	  assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3649 				   pbdata->ptr_entry, pbdata->bound_no);
3650 
3651 	set_decl_incoming_rtl (pbdata->bounds_parm,
3652 			       pbdata->parm_data.entry_parm, false);
3653 
3654 	if (assign_parm_setup_block_p (&pbdata->parm_data))
3655 	  assign_parm_setup_block (&all, pbdata->bounds_parm,
3656 				   &pbdata->parm_data);
3657 	else if (pbdata->parm_data.passed_pointer
3658 		 || use_register_for_decl (pbdata->bounds_parm))
3659 	  assign_parm_setup_reg (&all, pbdata->bounds_parm,
3660 				 &pbdata->parm_data);
3661 	else
3662 	  assign_parm_setup_stack (&all, pbdata->bounds_parm,
3663 				   &pbdata->parm_data);
3664       }
3665 }
3666 
3667 /* Assign RTL expressions to the function's parameters.  This may involve
3668    copying them into registers and using those registers as the DECL_RTL.  */
3669 
3670 static void
assign_parms(tree fndecl)3671 assign_parms (tree fndecl)
3672 {
3673   struct assign_parm_data_all all;
3674   tree parm;
3675   vec<tree> fnargs;
3676   unsigned i, bound_no = 0;
3677   tree last_arg = NULL;
3678   rtx last_arg_entry = NULL;
3679   vec<bounds_parm_data> bndargs = vNULL;
3680   bounds_parm_data bdata;
3681 
3682   crtl->args.internal_arg_pointer
3683     = targetm.calls.internal_arg_pointer ();
3684 
3685   assign_parms_initialize_all (&all);
3686   fnargs = assign_parms_augmented_arg_list (&all);
3687 
3688   FOR_EACH_VEC_ELT (fnargs, i, parm)
3689     {
3690       struct assign_parm_data_one data;
3691 
3692       /* Extract the type of PARM; adjust it according to ABI.  */
3693       assign_parm_find_data_types (&all, parm, &data);
3694 
3695       /* Early out for errors and void parameters.  */
3696       if (data.passed_mode == VOIDmode)
3697 	{
3698 	  SET_DECL_RTL (parm, const0_rtx);
3699 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3700 	  continue;
3701 	}
3702 
3703       /* Estimate stack alignment from parameter alignment.  */
3704       if (SUPPORTS_STACK_ALIGNMENT)
3705         {
3706           unsigned int align
3707 	    = targetm.calls.function_arg_boundary (data.promoted_mode,
3708 						   data.passed_type);
3709 	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3710 				     align);
3711 	  if (TYPE_ALIGN (data.nominal_type) > align)
3712 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
3713 				       TYPE_MODE (data.nominal_type),
3714 				       TYPE_ALIGN (data.nominal_type));
3715 	  if (crtl->stack_alignment_estimated < align)
3716 	    {
3717 	      gcc_assert (!crtl->stack_realign_processed);
3718 	      crtl->stack_alignment_estimated = align;
3719 	    }
3720 	}
3721 
3722       /* Find out where the parameter arrives in this function.  */
3723       assign_parm_find_entry_rtl (&all, &data);
3724 
3725       /* Find out where stack space for this parameter might be.  */
3726       if (assign_parm_is_stack_parm (&all, &data))
3727 	{
3728 	  assign_parm_find_stack_rtl (parm, &data);
3729 	  assign_parm_adjust_entry_rtl (&data);
3730 	}
3731       if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3732 	{
3733 	  /* Remember where last non bounds arg was passed in case
3734 	     we have to load associated bounds for it from Bounds
3735 	     Table.  */
3736 	  last_arg = parm;
3737 	  last_arg_entry = data.entry_parm;
3738 	  bound_no = 0;
3739 	}
3740       /* Record permanently how this parm was passed.  */
3741       if (data.passed_pointer)
3742 	{
3743 	  rtx incoming_rtl
3744 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3745 			   data.entry_parm);
3746 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
3747 	}
3748       else
3749 	set_decl_incoming_rtl (parm, data.entry_parm, false);
3750 
3751       assign_parm_adjust_stack_rtl (&data);
3752 
3753       /* Bounds should be loaded in the particular order to
3754 	 have registers allocated correctly.  Collect info about
3755 	 input bounds and load them later.  */
3756       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3757 	{
3758 	  /* Expect bounds in instrumented functions only.  */
3759 	  gcc_assert (chkp_function_instrumented_p (fndecl));
3760 
3761 	  bdata.parm_data = data;
3762 	  bdata.bounds_parm = parm;
3763 	  bdata.ptr_parm = last_arg;
3764 	  bdata.ptr_entry = last_arg_entry;
3765 	  bdata.bound_no = bound_no;
3766 	  bndargs.safe_push (bdata);
3767 	}
3768       else
3769 	{
3770 	  if (assign_parm_setup_block_p (&data))
3771 	    assign_parm_setup_block (&all, parm, &data);
3772 	  else if (data.passed_pointer || use_register_for_decl (parm))
3773 	    assign_parm_setup_reg (&all, parm, &data);
3774 	  else
3775 	    assign_parm_setup_stack (&all, parm, &data);
3776 	}
3777 
3778       if (cfun->stdarg && !DECL_CHAIN (parm))
3779 	{
3780 	  int pretend_bytes = 0;
3781 
3782 	  assign_parms_setup_varargs (&all, &data, false);
3783 
3784 	  if (chkp_function_instrumented_p (fndecl))
3785 	    {
3786 	      /* We expect this is the last parm.  Otherwise it is wrong
3787 		 to assign bounds right now.  */
3788 	      gcc_assert (i == (fnargs.length () - 1));
3789 	      assign_bounds (bndargs, all, true, false, false);
3790 	      targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3791 							  data.promoted_mode,
3792 							  data.passed_type,
3793 							  &pretend_bytes,
3794 							  false);
3795 	      assign_bounds (bndargs, all, false, true, true);
3796 	      bndargs.release ();
3797 	    }
3798 	}
3799 
3800       /* Update info on where next arg arrives in registers.  */
3801       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3802 					  data.passed_type, data.named_arg);
3803 
3804       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3805 	bound_no++;
3806     }
3807 
3808   assign_bounds (bndargs, all, true, true, true);
3809   bndargs.release ();
3810 
3811   if (targetm.calls.split_complex_arg)
3812     assign_parms_unsplit_complex (&all, fnargs);
3813 
3814   fnargs.release ();
3815 
3816   /* Output all parameter conversion instructions (possibly including calls)
3817      now that all parameters have been copied out of hard registers.  */
3818   emit_insn (all.first_conversion_insn);
3819 
3820   /* Estimate reload stack alignment from scalar return mode.  */
3821   if (SUPPORTS_STACK_ALIGNMENT)
3822     {
3823       if (DECL_RESULT (fndecl))
3824 	{
3825 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
3826 	  machine_mode mode = TYPE_MODE (type);
3827 
3828 	  if (mode != BLKmode
3829 	      && mode != VOIDmode
3830 	      && !AGGREGATE_TYPE_P (type))
3831 	    {
3832 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
3833 	      if (crtl->stack_alignment_estimated < align)
3834 		{
3835 		  gcc_assert (!crtl->stack_realign_processed);
3836 		  crtl->stack_alignment_estimated = align;
3837 		}
3838 	    }
3839 	}
3840     }
3841 
3842   /* If we are receiving a struct value address as the first argument, set up
3843      the RTL for the function result. As this might require code to convert
3844      the transmitted address to Pmode, we do this here to ensure that possible
3845      preliminary conversions of the address have been emitted already.  */
3846   if (all.function_result_decl)
3847     {
3848       tree result = DECL_RESULT (current_function_decl);
3849       rtx addr = DECL_RTL (all.function_result_decl);
3850       rtx x;
3851 
3852       if (DECL_BY_REFERENCE (result))
3853 	{
3854 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3855 	  x = addr;
3856 	}
3857       else
3858 	{
3859 	  SET_DECL_VALUE_EXPR (result,
3860 			       build1 (INDIRECT_REF, TREE_TYPE (result),
3861 				       all.function_result_decl));
3862 	  addr = convert_memory_address (Pmode, addr);
3863 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3864 	  set_mem_attributes (x, result, 1);
3865 	}
3866 
3867       DECL_HAS_VALUE_EXPR_P (result) = 1;
3868 
3869       set_parm_rtl (result, x);
3870     }
3871 
3872   /* We have aligned all the args, so add space for the pretend args.  */
3873   crtl->args.pretend_args_size = all.pretend_args_size;
3874   all.stack_args_size.constant += all.extra_pretend_bytes;
3875   crtl->args.size = all.stack_args_size.constant;
3876 
3877   /* Adjust function incoming argument size for alignment and
3878      minimum length.  */
3879 
3880   crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3881   crtl->args.size = CEIL_ROUND (crtl->args.size,
3882 					   PARM_BOUNDARY / BITS_PER_UNIT);
3883 
3884   if (ARGS_GROW_DOWNWARD)
3885     {
3886       crtl->args.arg_offset_rtx
3887 	= (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3888 	   : expand_expr (size_diffop (all.stack_args_size.var,
3889 				       size_int (-all.stack_args_size.constant)),
3890 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
3891     }
3892   else
3893     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3894 
3895   /* See how many bytes, if any, of its args a function should try to pop
3896      on return.  */
3897 
3898   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3899 							 TREE_TYPE (fndecl),
3900 							 crtl->args.size);
3901 
3902   /* For stdarg.h function, save info about
3903      regs and stack space used by the named args.  */
3904 
3905   crtl->args.info = all.args_so_far_v;
3906 
3907   /* Set the rtx used for the function return value.  Put this in its
3908      own variable so any optimizers that need this information don't have
3909      to include tree.h.  Do this here so it gets done when an inlined
3910      function gets output.  */
3911 
3912   crtl->return_rtx
3913     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3914        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3915 
3916   /* If scalar return value was computed in a pseudo-reg, or was a named
3917      return value that got dumped to the stack, copy that to the hard
3918      return register.  */
3919   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3920     {
3921       tree decl_result = DECL_RESULT (fndecl);
3922       rtx decl_rtl = DECL_RTL (decl_result);
3923 
3924       if (REG_P (decl_rtl)
3925 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3926 	  : DECL_REGISTER (decl_result))
3927 	{
3928 	  rtx real_decl_rtl;
3929 
3930 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3931 							fndecl, true);
3932 	  if (chkp_function_instrumented_p (fndecl))
3933 	    crtl->return_bnd
3934 	      = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3935 							  fndecl, true);
3936 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3937 	  /* The delay slot scheduler assumes that crtl->return_rtx
3938 	     holds the hard register containing the return value, not a
3939 	     temporary pseudo.  */
3940 	  crtl->return_rtx = real_decl_rtl;
3941 	}
3942     }
3943 }
3944 
3945 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3946    For all seen types, gimplify their sizes.  */
3947 
3948 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3949 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3950 {
3951   tree t = *tp;
3952 
3953   *walk_subtrees = 0;
3954   if (TYPE_P (t))
3955     {
3956       if (POINTER_TYPE_P (t))
3957 	*walk_subtrees = 1;
3958       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3959 	       && !TYPE_SIZES_GIMPLIFIED (t))
3960 	{
3961 	  gimplify_type_sizes (t, (gimple_seq *) data);
3962 	  *walk_subtrees = 1;
3963 	}
3964     }
3965 
3966   return NULL;
3967 }
3968 
3969 /* Gimplify the parameter list for current_function_decl.  This involves
3970    evaluating SAVE_EXPRs of variable sized parameters and generating code
3971    to implement callee-copies reference parameters.  Returns a sequence of
3972    statements to add to the beginning of the function.  */
3973 
3974 gimple_seq
gimplify_parameters(void)3975 gimplify_parameters (void)
3976 {
3977   struct assign_parm_data_all all;
3978   tree parm;
3979   gimple_seq stmts = NULL;
3980   vec<tree> fnargs;
3981   unsigned i;
3982 
3983   assign_parms_initialize_all (&all);
3984   fnargs = assign_parms_augmented_arg_list (&all);
3985 
3986   FOR_EACH_VEC_ELT (fnargs, i, parm)
3987     {
3988       struct assign_parm_data_one data;
3989 
3990       /* Extract the type of PARM; adjust it according to ABI.  */
3991       assign_parm_find_data_types (&all, parm, &data);
3992 
3993       /* Early out for errors and void parameters.  */
3994       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3995 	continue;
3996 
3997       /* Update info on where next arg arrives in registers.  */
3998       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3999 					  data.passed_type, data.named_arg);
4000 
4001       /* ??? Once upon a time variable_size stuffed parameter list
4002 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
4003 	 turned out to be less than manageable in the gimple world.
4004 	 Now we have to hunt them down ourselves.  */
4005       walk_tree_without_duplicates (&data.passed_type,
4006 				    gimplify_parm_type, &stmts);
4007 
4008       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4009 	{
4010 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4011 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4012 	}
4013 
4014       if (data.passed_pointer)
4015 	{
4016           tree type = TREE_TYPE (data.passed_type);
4017 	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4018 				       type, data.named_arg))
4019 	    {
4020 	      tree local, t;
4021 
4022 	      /* For constant-sized objects, this is trivial; for
4023 		 variable-sized objects, we have to play games.  */
4024 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4025 		  && !(flag_stack_check == GENERIC_STACK_CHECK
4026 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
4027 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
4028 		{
4029 		  local = create_tmp_var (type, get_name (parm));
4030 		  DECL_IGNORED_P (local) = 0;
4031 		  /* If PARM was addressable, move that flag over
4032 		     to the local copy, as its address will be taken,
4033 		     not the PARMs.  Keep the parms address taken
4034 		     as we'll query that flag during gimplification.  */
4035 		  if (TREE_ADDRESSABLE (parm))
4036 		    TREE_ADDRESSABLE (local) = 1;
4037 		  else if (TREE_CODE (type) == COMPLEX_TYPE
4038 			   || TREE_CODE (type) == VECTOR_TYPE)
4039 		    DECL_GIMPLE_REG_P (local) = 1;
4040 		}
4041 	      else
4042 		{
4043 		  tree ptr_type, addr;
4044 
4045 		  ptr_type = build_pointer_type (type);
4046 		  addr = create_tmp_reg (ptr_type, get_name (parm));
4047 		  DECL_IGNORED_P (addr) = 0;
4048 		  local = build_fold_indirect_ref (addr);
4049 
4050 		  t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4051 		  t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
4052 				       size_int (DECL_ALIGN (parm)));
4053 
4054 		  /* The call has been built for a variable-sized object.  */
4055 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
4056 		  t = fold_convert (ptr_type, t);
4057 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4058 		  gimplify_and_add (t, &stmts);
4059 		}
4060 
4061 	      gimplify_assign (local, parm, &stmts);
4062 
4063 	      SET_DECL_VALUE_EXPR (parm, local);
4064 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
4065 	    }
4066 	}
4067     }
4068 
4069   fnargs.release ();
4070 
4071   return stmts;
4072 }
4073 
4074 /* Compute the size and offset from the start of the stacked arguments for a
4075    parm passed in mode PASSED_MODE and with type TYPE.
4076 
4077    INITIAL_OFFSET_PTR points to the current offset into the stacked
4078    arguments.
4079 
4080    The starting offset and size for this parm are returned in
4081    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
4082    nonzero, the offset is that of stack slot, which is returned in
4083    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
4084    padding required from the initial offset ptr to the stack slot.
4085 
4086    IN_REGS is nonzero if the argument will be passed in registers.  It will
4087    never be set if REG_PARM_STACK_SPACE is not defined.
4088 
4089    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4090    for arguments which are passed in registers.
4091 
4092    FNDECL is the function in which the argument was defined.
4093 
4094    There are two types of rounding that are done.  The first, controlled by
4095    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4096    argument list to be aligned to the specific boundary (in bits).  This
4097    rounding affects the initial and starting offsets, but not the argument
4098    size.
4099 
4100    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4101    optionally rounds the size of the parm to PARM_BOUNDARY.  The
4102    initial offset is not affected by this rounding, while the size always
4103    is and the starting offset may be.  */
4104 
4105 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4106     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4107     callers pass in the total size of args so far as
4108     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
4109 
4110 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)4111 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4112 		     int reg_parm_stack_space, int partial,
4113 		     tree fndecl ATTRIBUTE_UNUSED,
4114 		     struct args_size *initial_offset_ptr,
4115 		     struct locate_and_pad_arg_data *locate)
4116 {
4117   tree sizetree;
4118   enum direction where_pad;
4119   unsigned int boundary, round_boundary;
4120   int part_size_in_regs;
4121 
4122   /* If we have found a stack parm before we reach the end of the
4123      area reserved for registers, skip that area.  */
4124   if (! in_regs)
4125     {
4126       if (reg_parm_stack_space > 0)
4127 	{
4128 	  if (initial_offset_ptr->var)
4129 	    {
4130 	      initial_offset_ptr->var
4131 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4132 			      ssize_int (reg_parm_stack_space));
4133 	      initial_offset_ptr->constant = 0;
4134 	    }
4135 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
4136 	    initial_offset_ptr->constant = reg_parm_stack_space;
4137 	}
4138     }
4139 
4140   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4141 
4142   sizetree
4143     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4144   where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4145   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4146   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4147 							      type);
4148   locate->where_pad = where_pad;
4149 
4150   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
4151   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4152     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4153 
4154   locate->boundary = boundary;
4155 
4156   if (SUPPORTS_STACK_ALIGNMENT)
4157     {
4158       /* stack_alignment_estimated can't change after stack has been
4159 	 realigned.  */
4160       if (crtl->stack_alignment_estimated < boundary)
4161         {
4162           if (!crtl->stack_realign_processed)
4163 	    crtl->stack_alignment_estimated = boundary;
4164 	  else
4165 	    {
4166 	      /* If stack is realigned and stack alignment value
4167 		 hasn't been finalized, it is OK not to increase
4168 		 stack_alignment_estimated.  The bigger alignment
4169 		 requirement is recorded in stack_alignment_needed
4170 		 below.  */
4171 	      gcc_assert (!crtl->stack_realign_finalized
4172 			  && crtl->stack_realign_needed);
4173 	    }
4174 	}
4175     }
4176 
4177   /* Remember if the outgoing parameter requires extra alignment on the
4178      calling function side.  */
4179   if (crtl->stack_alignment_needed < boundary)
4180     crtl->stack_alignment_needed = boundary;
4181   if (crtl->preferred_stack_boundary < boundary)
4182     crtl->preferred_stack_boundary = boundary;
4183 
4184   if (ARGS_GROW_DOWNWARD)
4185     {
4186       locate->slot_offset.constant = -initial_offset_ptr->constant;
4187       if (initial_offset_ptr->var)
4188 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4189 					      initial_offset_ptr->var);
4190 
4191       {
4192 	tree s2 = sizetree;
4193 	if (where_pad != none
4194 	    && (!tree_fits_uhwi_p (sizetree)
4195 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4196 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4197 	SUB_PARM_SIZE (locate->slot_offset, s2);
4198       }
4199 
4200       locate->slot_offset.constant += part_size_in_regs;
4201 
4202       if (!in_regs || reg_parm_stack_space > 0)
4203 	pad_to_arg_alignment (&locate->slot_offset, boundary,
4204 			      &locate->alignment_pad);
4205 
4206       locate->size.constant = (-initial_offset_ptr->constant
4207 			       - locate->slot_offset.constant);
4208       if (initial_offset_ptr->var)
4209 	locate->size.var = size_binop (MINUS_EXPR,
4210 				       size_binop (MINUS_EXPR,
4211 						   ssize_int (0),
4212 						   initial_offset_ptr->var),
4213 				       locate->slot_offset.var);
4214 
4215       /* Pad_below needs the pre-rounded size to know how much to pad
4216 	 below.  */
4217       locate->offset = locate->slot_offset;
4218       if (where_pad == downward)
4219 	pad_below (&locate->offset, passed_mode, sizetree);
4220 
4221     }
4222   else
4223     {
4224       if (!in_regs || reg_parm_stack_space > 0)
4225 	pad_to_arg_alignment (initial_offset_ptr, boundary,
4226 			      &locate->alignment_pad);
4227       locate->slot_offset = *initial_offset_ptr;
4228 
4229 #ifdef PUSH_ROUNDING
4230       if (passed_mode != BLKmode)
4231 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4232 #endif
4233 
4234       /* Pad_below needs the pre-rounded size to know how much to pad below
4235 	 so this must be done before rounding up.  */
4236       locate->offset = locate->slot_offset;
4237       if (where_pad == downward)
4238 	pad_below (&locate->offset, passed_mode, sizetree);
4239 
4240       if (where_pad != none
4241 	  && (!tree_fits_uhwi_p (sizetree)
4242 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4243 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4244 
4245       ADD_PARM_SIZE (locate->size, sizetree);
4246 
4247       locate->size.constant -= part_size_in_regs;
4248     }
4249 
4250 #ifdef FUNCTION_ARG_OFFSET
4251   locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4252 #endif
4253 }
4254 
4255 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4256    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4257 
4258 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4259 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4260 		      struct args_size *alignment_pad)
4261 {
4262   tree save_var = NULL_TREE;
4263   HOST_WIDE_INT save_constant = 0;
4264   int boundary_in_bytes = boundary / BITS_PER_UNIT;
4265   HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4266 
4267 #ifdef SPARC_STACK_BOUNDARY_HACK
4268   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4269      the real alignment of %sp.  However, when it does this, the
4270      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
4271   if (SPARC_STACK_BOUNDARY_HACK)
4272     sp_offset = 0;
4273 #endif
4274 
4275   if (boundary > PARM_BOUNDARY)
4276     {
4277       save_var = offset_ptr->var;
4278       save_constant = offset_ptr->constant;
4279     }
4280 
4281   alignment_pad->var = NULL_TREE;
4282   alignment_pad->constant = 0;
4283 
4284   if (boundary > BITS_PER_UNIT)
4285     {
4286       if (offset_ptr->var)
4287 	{
4288 	  tree sp_offset_tree = ssize_int (sp_offset);
4289 	  tree offset = size_binop (PLUS_EXPR,
4290 				    ARGS_SIZE_TREE (*offset_ptr),
4291 				    sp_offset_tree);
4292 	  tree rounded;
4293 	  if (ARGS_GROW_DOWNWARD)
4294 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
4295 	  else
4296 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
4297 
4298 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4299 	  /* ARGS_SIZE_TREE includes constant term.  */
4300 	  offset_ptr->constant = 0;
4301 	  if (boundary > PARM_BOUNDARY)
4302 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4303 					     save_var);
4304 	}
4305       else
4306 	{
4307 	  offset_ptr->constant = -sp_offset +
4308 	    (ARGS_GROW_DOWNWARD
4309 	    ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4310 	    : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4311 
4312 	    if (boundary > PARM_BOUNDARY)
4313 	      alignment_pad->constant = offset_ptr->constant - save_constant;
4314 	}
4315     }
4316 }
4317 
4318 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4319 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4320 {
4321   if (passed_mode != BLKmode)
4322     {
4323       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4324 	offset_ptr->constant
4325 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4326 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4327 	      - GET_MODE_SIZE (passed_mode));
4328     }
4329   else
4330     {
4331       if (TREE_CODE (sizetree) != INTEGER_CST
4332 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4333 	{
4334 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4335 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4336 	  /* Add it in.  */
4337 	  ADD_PARM_SIZE (*offset_ptr, s2);
4338 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4339 	}
4340     }
4341 }
4342 
4343 
4344 /* True if register REGNO was alive at a place where `setjmp' was
4345    called and was set more than once or is an argument.  Such regs may
4346    be clobbered by `longjmp'.  */
4347 
4348 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4349 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4350 {
4351   /* There appear to be cases where some local vars never reach the
4352      backend but have bogus regnos.  */
4353   if (regno >= max_reg_num ())
4354     return false;
4355 
4356   return ((REG_N_SETS (regno) > 1
4357 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4358 			       regno))
4359 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
4360 }
4361 
4362 /* Walk the tree of blocks describing the binding levels within a
4363    function and warn about variables the might be killed by setjmp or
4364    vfork.  This is done after calling flow_analysis before register
4365    allocation since that will clobber the pseudo-regs to hard
4366    regs.  */
4367 
4368 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4369 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4370 {
4371   tree decl, sub;
4372 
4373   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4374     {
4375       if (TREE_CODE (decl) == VAR_DECL
4376 	  && DECL_RTL_SET_P (decl)
4377 	  && REG_P (DECL_RTL (decl))
4378 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4379 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4380                  " %<longjmp%> or %<vfork%>", decl);
4381     }
4382 
4383   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4384     setjmp_vars_warning (setjmp_crosses, sub);
4385 }
4386 
4387 /* Do the appropriate part of setjmp_vars_warning
4388    but for arguments instead of local variables.  */
4389 
4390 static void
setjmp_args_warning(bitmap setjmp_crosses)4391 setjmp_args_warning (bitmap setjmp_crosses)
4392 {
4393   tree decl;
4394   for (decl = DECL_ARGUMENTS (current_function_decl);
4395        decl; decl = DECL_CHAIN (decl))
4396     if (DECL_RTL (decl) != 0
4397 	&& REG_P (DECL_RTL (decl))
4398 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4399       warning (OPT_Wclobbered,
4400                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4401 	       decl);
4402 }
4403 
4404 /* Generate warning messages for variables live across setjmp.  */
4405 
4406 void
generate_setjmp_warnings(void)4407 generate_setjmp_warnings (void)
4408 {
4409   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4410 
4411   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4412       || bitmap_empty_p (setjmp_crosses))
4413     return;
4414 
4415   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4416   setjmp_args_warning (setjmp_crosses);
4417 }
4418 
4419 
4420 /* Reverse the order of elements in the fragment chain T of blocks,
4421    and return the new head of the chain (old last element).
4422    In addition to that clear BLOCK_SAME_RANGE flags when needed
4423    and adjust BLOCK_SUPERCONTEXT from the super fragment to
4424    its super fragment origin.  */
4425 
4426 static tree
block_fragments_nreverse(tree t)4427 block_fragments_nreverse (tree t)
4428 {
4429   tree prev = 0, block, next, prev_super = 0;
4430   tree super = BLOCK_SUPERCONTEXT (t);
4431   if (BLOCK_FRAGMENT_ORIGIN (super))
4432     super = BLOCK_FRAGMENT_ORIGIN (super);
4433   for (block = t; block; block = next)
4434     {
4435       next = BLOCK_FRAGMENT_CHAIN (block);
4436       BLOCK_FRAGMENT_CHAIN (block) = prev;
4437       if ((prev && !BLOCK_SAME_RANGE (prev))
4438 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4439 	      != prev_super))
4440 	BLOCK_SAME_RANGE (block) = 0;
4441       prev_super = BLOCK_SUPERCONTEXT (block);
4442       BLOCK_SUPERCONTEXT (block) = super;
4443       prev = block;
4444     }
4445   t = BLOCK_FRAGMENT_ORIGIN (t);
4446   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4447       != prev_super)
4448     BLOCK_SAME_RANGE (t) = 0;
4449   BLOCK_SUPERCONTEXT (t) = super;
4450   return prev;
4451 }
4452 
4453 /* Reverse the order of elements in the chain T of blocks,
4454    and return the new head of the chain (old last element).
4455    Also do the same on subblocks and reverse the order of elements
4456    in BLOCK_FRAGMENT_CHAIN as well.  */
4457 
4458 static tree
blocks_nreverse_all(tree t)4459 blocks_nreverse_all (tree t)
4460 {
4461   tree prev = 0, block, next;
4462   for (block = t; block; block = next)
4463     {
4464       next = BLOCK_CHAIN (block);
4465       BLOCK_CHAIN (block) = prev;
4466       if (BLOCK_FRAGMENT_CHAIN (block)
4467 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4468 	{
4469 	  BLOCK_FRAGMENT_CHAIN (block)
4470 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4471 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4472 	    BLOCK_SAME_RANGE (block) = 0;
4473 	}
4474       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4475       prev = block;
4476     }
4477   return prev;
4478 }
4479 
4480 
4481 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4482    and create duplicate blocks.  */
4483 /* ??? Need an option to either create block fragments or to create
4484    abstract origin duplicates of a source block.  It really depends
4485    on what optimization has been performed.  */
4486 
4487 void
reorder_blocks(void)4488 reorder_blocks (void)
4489 {
4490   tree block = DECL_INITIAL (current_function_decl);
4491 
4492   if (block == NULL_TREE)
4493     return;
4494 
4495   auto_vec<tree, 10> block_stack;
4496 
4497   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
4498   clear_block_marks (block);
4499 
4500   /* Prune the old trees away, so that they don't get in the way.  */
4501   BLOCK_SUBBLOCKS (block) = NULL_TREE;
4502   BLOCK_CHAIN (block) = NULL_TREE;
4503 
4504   /* Recreate the block tree from the note nesting.  */
4505   reorder_blocks_1 (get_insns (), block, &block_stack);
4506   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4507 }
4508 
4509 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
4510 
4511 void
clear_block_marks(tree block)4512 clear_block_marks (tree block)
4513 {
4514   while (block)
4515     {
4516       TREE_ASM_WRITTEN (block) = 0;
4517       clear_block_marks (BLOCK_SUBBLOCKS (block));
4518       block = BLOCK_CHAIN (block);
4519     }
4520 }
4521 
4522 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4523 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4524 		  vec<tree> *p_block_stack)
4525 {
4526   rtx_insn *insn;
4527   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4528 
4529   for (insn = insns; insn; insn = NEXT_INSN (insn))
4530     {
4531       if (NOTE_P (insn))
4532 	{
4533 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4534 	    {
4535 	      tree block = NOTE_BLOCK (insn);
4536 	      tree origin;
4537 
4538 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4539 	      origin = block;
4540 
4541 	      if (prev_end)
4542 		BLOCK_SAME_RANGE (prev_end) = 0;
4543 	      prev_end = NULL_TREE;
4544 
4545 	      /* If we have seen this block before, that means it now
4546 		 spans multiple address regions.  Create a new fragment.  */
4547 	      if (TREE_ASM_WRITTEN (block))
4548 		{
4549 		  tree new_block = copy_node (block);
4550 
4551 		  BLOCK_SAME_RANGE (new_block) = 0;
4552 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4553 		  BLOCK_FRAGMENT_CHAIN (new_block)
4554 		    = BLOCK_FRAGMENT_CHAIN (origin);
4555 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4556 
4557 		  NOTE_BLOCK (insn) = new_block;
4558 		  block = new_block;
4559 		}
4560 
4561 	      if (prev_beg == current_block && prev_beg)
4562 		BLOCK_SAME_RANGE (block) = 1;
4563 
4564 	      prev_beg = origin;
4565 
4566 	      BLOCK_SUBBLOCKS (block) = 0;
4567 	      TREE_ASM_WRITTEN (block) = 1;
4568 	      /* When there's only one block for the entire function,
4569 		 current_block == block and we mustn't do this, it
4570 		 will cause infinite recursion.  */
4571 	      if (block != current_block)
4572 		{
4573 		  tree super;
4574 		  if (block != origin)
4575 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4576 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4577 								      (origin))
4578 				   == current_block);
4579 		  if (p_block_stack->is_empty ())
4580 		    super = current_block;
4581 		  else
4582 		    {
4583 		      super = p_block_stack->last ();
4584 		      gcc_assert (super == current_block
4585 				  || BLOCK_FRAGMENT_ORIGIN (super)
4586 				     == current_block);
4587 		    }
4588 		  BLOCK_SUPERCONTEXT (block) = super;
4589 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4590 		  BLOCK_SUBBLOCKS (current_block) = block;
4591 		  current_block = origin;
4592 		}
4593 	      p_block_stack->safe_push (block);
4594 	    }
4595 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4596 	    {
4597 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
4598 	      current_block = BLOCK_SUPERCONTEXT (current_block);
4599 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
4600 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4601 	      prev_beg = NULL_TREE;
4602 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4603 			 ? NOTE_BLOCK (insn) : NULL_TREE;
4604 	    }
4605 	}
4606       else
4607 	{
4608 	  prev_beg = NULL_TREE;
4609 	  if (prev_end)
4610 	    BLOCK_SAME_RANGE (prev_end) = 0;
4611 	  prev_end = NULL_TREE;
4612 	}
4613     }
4614 }
4615 
4616 /* Reverse the order of elements in the chain T of blocks,
4617    and return the new head of the chain (old last element).  */
4618 
4619 tree
blocks_nreverse(tree t)4620 blocks_nreverse (tree t)
4621 {
4622   tree prev = 0, block, next;
4623   for (block = t; block; block = next)
4624     {
4625       next = BLOCK_CHAIN (block);
4626       BLOCK_CHAIN (block) = prev;
4627       prev = block;
4628     }
4629   return prev;
4630 }
4631 
4632 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4633    by modifying the last node in chain 1 to point to chain 2.  */
4634 
4635 tree
block_chainon(tree op1,tree op2)4636 block_chainon (tree op1, tree op2)
4637 {
4638   tree t1;
4639 
4640   if (!op1)
4641     return op2;
4642   if (!op2)
4643     return op1;
4644 
4645   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4646     continue;
4647   BLOCK_CHAIN (t1) = op2;
4648 
4649 #ifdef ENABLE_TREE_CHECKING
4650   {
4651     tree t2;
4652     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4653       gcc_assert (t2 != t1);
4654   }
4655 #endif
4656 
4657   return op1;
4658 }
4659 
4660 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
4661    non-NULL, list them all into VECTOR, in a depth-first preorder
4662    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
4663    blocks.  */
4664 
4665 static int
all_blocks(tree block,tree * vector)4666 all_blocks (tree block, tree *vector)
4667 {
4668   int n_blocks = 0;
4669 
4670   while (block)
4671     {
4672       TREE_ASM_WRITTEN (block) = 0;
4673 
4674       /* Record this block.  */
4675       if (vector)
4676 	vector[n_blocks] = block;
4677 
4678       ++n_blocks;
4679 
4680       /* Record the subblocks, and their subblocks...  */
4681       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4682 			      vector ? vector + n_blocks : 0);
4683       block = BLOCK_CHAIN (block);
4684     }
4685 
4686   return n_blocks;
4687 }
4688 
4689 /* Return a vector containing all the blocks rooted at BLOCK.  The
4690    number of elements in the vector is stored in N_BLOCKS_P.  The
4691    vector is dynamically allocated; it is the caller's responsibility
4692    to call `free' on the pointer returned.  */
4693 
4694 static tree *
get_block_vector(tree block,int * n_blocks_p)4695 get_block_vector (tree block, int *n_blocks_p)
4696 {
4697   tree *block_vector;
4698 
4699   *n_blocks_p = all_blocks (block, NULL);
4700   block_vector = XNEWVEC (tree, *n_blocks_p);
4701   all_blocks (block, block_vector);
4702 
4703   return block_vector;
4704 }
4705 
4706 static GTY(()) int next_block_index = 2;
4707 
4708 /* Set BLOCK_NUMBER for all the blocks in FN.  */
4709 
4710 void
number_blocks(tree fn)4711 number_blocks (tree fn)
4712 {
4713   int i;
4714   int n_blocks;
4715   tree *block_vector;
4716 
4717   /* For SDB and XCOFF debugging output, we start numbering the blocks
4718      from 1 within each function, rather than keeping a running
4719      count.  */
4720 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
4721   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4722     next_block_index = 1;
4723 #endif
4724 
4725   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4726 
4727   /* The top-level BLOCK isn't numbered at all.  */
4728   for (i = 1; i < n_blocks; ++i)
4729     /* We number the blocks from two.  */
4730     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4731 
4732   free (block_vector);
4733 
4734   return;
4735 }
4736 
4737 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
4738 
4739 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4740 debug_find_var_in_block_tree (tree var, tree block)
4741 {
4742   tree t;
4743 
4744   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4745     if (t == var)
4746       return block;
4747 
4748   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4749     {
4750       tree ret = debug_find_var_in_block_tree (var, t);
4751       if (ret)
4752 	return ret;
4753     }
4754 
4755   return NULL_TREE;
4756 }
4757 
4758 /* Keep track of whether we're in a dummy function context.  If we are,
4759    we don't want to invoke the set_current_function hook, because we'll
4760    get into trouble if the hook calls target_reinit () recursively or
4761    when the initial initialization is not yet complete.  */
4762 
4763 static bool in_dummy_function;
4764 
4765 /* Invoke the target hook when setting cfun.  Update the optimization options
4766    if the function uses different options than the default.  */
4767 
4768 static void
invoke_set_current_function_hook(tree fndecl)4769 invoke_set_current_function_hook (tree fndecl)
4770 {
4771   if (!in_dummy_function)
4772     {
4773       tree opts = ((fndecl)
4774 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4775 		   : optimization_default_node);
4776 
4777       if (!opts)
4778 	opts = optimization_default_node;
4779 
4780       /* Change optimization options if needed.  */
4781       if (optimization_current_node != opts)
4782 	{
4783 	  optimization_current_node = opts;
4784 	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4785 	}
4786 
4787       targetm.set_current_function (fndecl);
4788       this_fn_optabs = this_target_optabs;
4789 
4790       if (opts != optimization_default_node)
4791 	{
4792 	  init_tree_optimization_optabs (opts);
4793 	  if (TREE_OPTIMIZATION_OPTABS (opts))
4794 	    this_fn_optabs = (struct target_optabs *)
4795 	      TREE_OPTIMIZATION_OPTABS (opts);
4796 	}
4797     }
4798 }
4799 
4800 /* cfun should never be set directly; use this function.  */
4801 
4802 void
set_cfun(struct function * new_cfun,bool force)4803 set_cfun (struct function *new_cfun, bool force)
4804 {
4805   if (cfun != new_cfun || force)
4806     {
4807       cfun = new_cfun;
4808       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4809       redirect_edge_var_map_empty ();
4810     }
4811 }
4812 
4813 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
4814 
4815 static vec<function *> cfun_stack;
4816 
4817 /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
4818    current_function_decl accordingly.  */
4819 
4820 void
push_cfun(struct function * new_cfun)4821 push_cfun (struct function *new_cfun)
4822 {
4823   gcc_assert ((!cfun && !current_function_decl)
4824 	      || (cfun && current_function_decl == cfun->decl));
4825   cfun_stack.safe_push (cfun);
4826   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4827   set_cfun (new_cfun);
4828 }
4829 
4830 /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
4831 
4832 void
pop_cfun(void)4833 pop_cfun (void)
4834 {
4835   struct function *new_cfun = cfun_stack.pop ();
4836   /* When in_dummy_function, we do have a cfun but current_function_decl is
4837      NULL.  We also allow pushing NULL cfun and subsequently changing
4838      current_function_decl to something else and have both restored by
4839      pop_cfun.  */
4840   gcc_checking_assert (in_dummy_function
4841 		       || !cfun
4842 		       || current_function_decl == cfun->decl);
4843   set_cfun (new_cfun);
4844   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4845 }
4846 
4847 /* Return value of funcdef and increase it.  */
4848 int
get_next_funcdef_no(void)4849 get_next_funcdef_no (void)
4850 {
4851   return funcdef_no++;
4852 }
4853 
4854 /* Return value of funcdef.  */
4855 int
get_last_funcdef_no(void)4856 get_last_funcdef_no (void)
4857 {
4858   return funcdef_no;
4859 }
4860 
4861 /* Allocate a function structure for FNDECL and set its contents
4862    to the defaults.  Set cfun to the newly-allocated object.
4863    Some of the helper functions invoked during initialization assume
4864    that cfun has already been set.  Therefore, assign the new object
4865    directly into cfun and invoke the back end hook explicitly at the
4866    very end, rather than initializing a temporary and calling set_cfun
4867    on it.
4868 
4869    ABSTRACT_P is true if this is a function that will never be seen by
4870    the middle-end.  Such functions are front-end concepts (like C++
4871    function templates) that do not correspond directly to functions
4872    placed in object files.  */
4873 
4874 void
allocate_struct_function(tree fndecl,bool abstract_p)4875 allocate_struct_function (tree fndecl, bool abstract_p)
4876 {
4877   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4878 
4879   cfun = ggc_cleared_alloc<function> ();
4880 
4881   init_eh_for_function ();
4882 
4883   if (init_machine_status)
4884     cfun->machine = (*init_machine_status) ();
4885 
4886 #ifdef OVERRIDE_ABI_FORMAT
4887   OVERRIDE_ABI_FORMAT (fndecl);
4888 #endif
4889 
4890   if (fndecl != NULL_TREE)
4891     {
4892       DECL_STRUCT_FUNCTION (fndecl) = cfun;
4893       cfun->decl = fndecl;
4894       current_function_funcdef_no = get_next_funcdef_no ();
4895     }
4896 
4897   invoke_set_current_function_hook (fndecl);
4898 
4899   if (fndecl != NULL_TREE)
4900     {
4901       tree result = DECL_RESULT (fndecl);
4902 
4903       if (!abstract_p)
4904 	{
4905 	  /* Now that we have activated any function-specific attributes
4906 	     that might affect layout, particularly vector modes, relayout
4907 	     each of the parameters and the result.  */
4908 	  relayout_decl (result);
4909 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4910 	       parm = DECL_CHAIN (parm))
4911 	    relayout_decl (parm);
4912 
4913 	  /* Similarly relayout the function decl.  */
4914 	  targetm.target_option.relayout_function (fndecl);
4915 	}
4916 
4917       if (!abstract_p && aggregate_value_p (result, fndecl))
4918 	{
4919 #ifdef PCC_STATIC_STRUCT_RETURN
4920 	  cfun->returns_pcc_struct = 1;
4921 #endif
4922 	  cfun->returns_struct = 1;
4923 	}
4924 
4925       cfun->stdarg = stdarg_p (fntype);
4926 
4927       /* Assume all registers in stdarg functions need to be saved.  */
4928       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4929       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4930 
4931       /* ??? This could be set on a per-function basis by the front-end
4932          but is this worth the hassle?  */
4933       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4934       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4935 
4936       if (!profile_flag && !flag_instrument_function_entry_exit)
4937 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4938     }
4939 }
4940 
4941 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4942    instead of just setting it.  */
4943 
4944 void
push_struct_function(tree fndecl)4945 push_struct_function (tree fndecl)
4946 {
4947   /* When in_dummy_function we might be in the middle of a pop_cfun and
4948      current_function_decl and cfun may not match.  */
4949   gcc_assert (in_dummy_function
4950 	      || (!cfun && !current_function_decl)
4951 	      || (cfun && current_function_decl == cfun->decl));
4952   cfun_stack.safe_push (cfun);
4953   current_function_decl = fndecl;
4954   allocate_struct_function (fndecl, false);
4955 }
4956 
4957 /* Reset crtl and other non-struct-function variables to defaults as
4958    appropriate for emitting rtl at the start of a function.  */
4959 
4960 static void
prepare_function_start(void)4961 prepare_function_start (void)
4962 {
4963   gcc_assert (!get_last_insn ());
4964   init_temp_slots ();
4965   init_emit ();
4966   init_varasm_status ();
4967   init_expr ();
4968   default_rtl_profile ();
4969 
4970   if (flag_stack_usage_info)
4971     {
4972       cfun->su = ggc_cleared_alloc<stack_usage> ();
4973       cfun->su->static_stack_size = -1;
4974     }
4975 
4976   cse_not_expected = ! optimize;
4977 
4978   /* Caller save not needed yet.  */
4979   caller_save_needed = 0;
4980 
4981   /* We haven't done register allocation yet.  */
4982   reg_renumber = 0;
4983 
4984   /* Indicate that we have not instantiated virtual registers yet.  */
4985   virtuals_instantiated = 0;
4986 
4987   /* Indicate that we want CONCATs now.  */
4988   generating_concat_p = 1;
4989 
4990   /* Indicate we have no need of a frame pointer yet.  */
4991   frame_pointer_needed = 0;
4992 }
4993 
4994 void
push_dummy_function(bool with_decl)4995 push_dummy_function (bool with_decl)
4996 {
4997   tree fn_decl, fn_type, fn_result_decl;
4998 
4999   gcc_assert (!in_dummy_function);
5000   in_dummy_function = true;
5001 
5002   if (with_decl)
5003     {
5004       fn_type = build_function_type_list (void_type_node, NULL_TREE);
5005       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5006 			    fn_type);
5007       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5008 					 NULL_TREE, void_type_node);
5009       DECL_RESULT (fn_decl) = fn_result_decl;
5010     }
5011   else
5012     fn_decl = NULL_TREE;
5013 
5014   push_struct_function (fn_decl);
5015 }
5016 
5017 /* Initialize the rtl expansion mechanism so that we can do simple things
5018    like generate sequences.  This is used to provide a context during global
5019    initialization of some passes.  You must call expand_dummy_function_end
5020    to exit this context.  */
5021 
5022 void
init_dummy_function_start(void)5023 init_dummy_function_start (void)
5024 {
5025   push_dummy_function (false);
5026   prepare_function_start ();
5027 }
5028 
5029 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5030    and initialize static variables for generating RTL for the statements
5031    of the function.  */
5032 
5033 void
init_function_start(tree subr)5034 init_function_start (tree subr)
5035 {
5036   /* Initialize backend, if needed.  */
5037   initialize_rtl ();
5038 
5039   prepare_function_start ();
5040   decide_function_section (subr);
5041 
5042   /* Warn if this value is an aggregate type,
5043      regardless of which calling convention we are using for it.  */
5044   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5045     warning (OPT_Waggregate_return, "function returns an aggregate");
5046 }
5047 
5048 /* Expand code to verify the stack_protect_guard.  This is invoked at
5049    the end of a function to be protected.  */
5050 
5051 void
stack_protect_epilogue(void)5052 stack_protect_epilogue (void)
5053 {
5054   tree guard_decl = targetm.stack_protect_guard ();
5055   rtx_code_label *label = gen_label_rtx ();
5056   rtx x, y;
5057   rtx_insn *seq;
5058 
5059   x = expand_normal (crtl->stack_protect_guard);
5060   y = expand_normal (guard_decl);
5061 
5062   /* Allow the target to compare Y with X without leaking either into
5063      a register.  */
5064   if (targetm.have_stack_protect_test ()
5065       && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5066     emit_insn (seq);
5067   else
5068     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5069 
5070   /* The noreturn predictor has been moved to the tree level.  The rtl-level
5071      predictors estimate this branch about 20%, which isn't enough to get
5072      things moved out of line.  Since this is the only extant case of adding
5073      a noreturn function at the rtl level, it doesn't seem worth doing ought
5074      except adding the prediction by hand.  */
5075   rtx_insn *tmp = get_last_insn ();
5076   if (JUMP_P (tmp))
5077     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5078 
5079   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5080   free_temp_slots ();
5081   emit_label (label);
5082 }
5083 
5084 /* Start the RTL for a new function, and set variables used for
5085    emitting RTL.
5086    SUBR is the FUNCTION_DECL node.
5087    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5088    the function's parameters, which must be run at any return statement.  */
5089 
5090 void
expand_function_start(tree subr)5091 expand_function_start (tree subr)
5092 {
5093   /* Make sure volatile mem refs aren't considered
5094      valid operands of arithmetic insns.  */
5095   init_recog_no_volatile ();
5096 
5097   crtl->profile
5098     = (profile_flag
5099        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5100 
5101   crtl->limit_stack
5102     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5103 
5104   /* Make the label for return statements to jump to.  Do not special
5105      case machines with special return instructions -- they will be
5106      handled later during jump, ifcvt, or epilogue creation.  */
5107   return_label = gen_label_rtx ();
5108 
5109   /* Initialize rtx used to return the value.  */
5110   /* Do this before assign_parms so that we copy the struct value address
5111      before any library calls that assign parms might generate.  */
5112 
5113   /* Decide whether to return the value in memory or in a register.  */
5114   tree res = DECL_RESULT (subr);
5115   if (aggregate_value_p (res, subr))
5116     {
5117       /* Returning something that won't go in a register.  */
5118       rtx value_address = 0;
5119 
5120 #ifdef PCC_STATIC_STRUCT_RETURN
5121       if (cfun->returns_pcc_struct)
5122 	{
5123 	  int size = int_size_in_bytes (TREE_TYPE (res));
5124 	  value_address = assemble_static_space (size);
5125 	}
5126       else
5127 #endif
5128 	{
5129 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5130 	  /* Expect to be passed the address of a place to store the value.
5131 	     If it is passed as an argument, assign_parms will take care of
5132 	     it.  */
5133 	  if (sv)
5134 	    {
5135 	      value_address = gen_reg_rtx (Pmode);
5136 	      emit_move_insn (value_address, sv);
5137 	    }
5138 	}
5139       if (value_address)
5140 	{
5141 	  rtx x = value_address;
5142 	  if (!DECL_BY_REFERENCE (res))
5143 	    {
5144 	      x = gen_rtx_MEM (DECL_MODE (res), x);
5145 	      set_mem_attributes (x, res, 1);
5146 	    }
5147 	  set_parm_rtl (res, x);
5148 	}
5149     }
5150   else if (DECL_MODE (res) == VOIDmode)
5151     /* If return mode is void, this decl rtl should not be used.  */
5152     set_parm_rtl (res, NULL_RTX);
5153   else
5154     {
5155       /* Compute the return values into a pseudo reg, which we will copy
5156 	 into the true return register after the cleanups are done.  */
5157       tree return_type = TREE_TYPE (res);
5158 
5159       /* If we may coalesce this result, make sure it has the expected mode
5160 	 in case it was promoted.  But we need not bother about BLKmode.  */
5161       machine_mode promoted_mode
5162 	= flag_tree_coalesce_vars && is_gimple_reg (res)
5163 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5164 	  : BLKmode;
5165 
5166       if (promoted_mode != BLKmode)
5167 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5168       else if (TYPE_MODE (return_type) != BLKmode
5169 	       && targetm.calls.return_in_msb (return_type))
5170 	/* expand_function_end will insert the appropriate padding in
5171 	   this case.  Use the return value's natural (unpadded) mode
5172 	   within the function proper.  */
5173 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5174       else
5175 	{
5176 	  /* In order to figure out what mode to use for the pseudo, we
5177 	     figure out what the mode of the eventual return register will
5178 	     actually be, and use that.  */
5179 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5180 
5181 	  /* Structures that are returned in registers are not
5182 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
5183 	  if (REG_P (hard_reg))
5184 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5185 	  else
5186 	    {
5187 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5188 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
5189 	    }
5190 	}
5191 
5192       /* Set DECL_REGISTER flag so that expand_function_end will copy the
5193 	 result to the real return register(s).  */
5194       DECL_REGISTER (res) = 1;
5195 
5196       if (chkp_function_instrumented_p (current_function_decl))
5197 	{
5198 	  tree return_type = TREE_TYPE (res);
5199 	  rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5200 								 subr, 1);
5201 	  SET_DECL_BOUNDS_RTL (res, bounds);
5202 	}
5203     }
5204 
5205   /* Initialize rtx for parameters and local variables.
5206      In some cases this requires emitting insns.  */
5207   assign_parms (subr);
5208 
5209   /* If function gets a static chain arg, store it.  */
5210   if (cfun->static_chain_decl)
5211     {
5212       tree parm = cfun->static_chain_decl;
5213       rtx local, chain;
5214       rtx_insn *insn;
5215       int unsignedp;
5216 
5217       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5218       chain = targetm.calls.static_chain (current_function_decl, true);
5219 
5220       set_decl_incoming_rtl (parm, chain, false);
5221       set_parm_rtl (parm, local);
5222       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5223 
5224       if (GET_MODE (local) != GET_MODE (chain))
5225 	{
5226 	  convert_move (local, chain, unsignedp);
5227 	  insn = get_last_insn ();
5228 	}
5229       else
5230 	insn = emit_move_insn (local, chain);
5231 
5232       /* Mark the register as eliminable, similar to parameters.  */
5233       if (MEM_P (chain)
5234 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5235 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
5236 
5237       /* If we aren't optimizing, save the static chain onto the stack.  */
5238       if (!optimize)
5239 	{
5240 	  tree saved_static_chain_decl
5241 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5242 			  DECL_NAME (parm), TREE_TYPE (parm));
5243 	  rtx saved_static_chain_rtx
5244 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5245 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5246 	  emit_move_insn (saved_static_chain_rtx, chain);
5247 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5248 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
5249 	}
5250     }
5251 
5252   /* If the function receives a non-local goto, then store the
5253      bits we need to restore the frame pointer.  */
5254   if (cfun->nonlocal_goto_save_area)
5255     {
5256       tree t_save;
5257       rtx r_save;
5258 
5259       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5260       gcc_assert (DECL_RTL_SET_P (var));
5261 
5262       t_save = build4 (ARRAY_REF,
5263 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5264 		       cfun->nonlocal_goto_save_area,
5265 		       integer_zero_node, NULL_TREE, NULL_TREE);
5266       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5267       gcc_assert (GET_MODE (r_save) == Pmode);
5268 
5269       emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5270       update_nonlocal_goto_save_area ();
5271     }
5272 
5273   /* The following was moved from init_function_start.
5274      The move is supposed to make sdb output more accurate.  */
5275   /* Indicate the beginning of the function body,
5276      as opposed to parm setup.  */
5277   emit_note (NOTE_INSN_FUNCTION_BEG);
5278 
5279   gcc_assert (NOTE_P (get_last_insn ()));
5280 
5281   parm_birth_insn = get_last_insn ();
5282 
5283   if (crtl->profile)
5284     {
5285 #ifdef PROFILE_HOOK
5286       PROFILE_HOOK (current_function_funcdef_no);
5287 #endif
5288     }
5289 
5290   /* If we are doing generic stack checking, the probe should go here.  */
5291   if (flag_stack_check == GENERIC_STACK_CHECK)
5292     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5293 }
5294 
5295 void
pop_dummy_function(void)5296 pop_dummy_function (void)
5297 {
5298   pop_cfun ();
5299   in_dummy_function = false;
5300 }
5301 
5302 /* Undo the effects of init_dummy_function_start.  */
5303 void
expand_dummy_function_end(void)5304 expand_dummy_function_end (void)
5305 {
5306   gcc_assert (in_dummy_function);
5307 
5308   /* End any sequences that failed to be closed due to syntax errors.  */
5309   while (in_sequence_p ())
5310     end_sequence ();
5311 
5312   /* Outside function body, can't compute type's actual size
5313      until next function's body starts.  */
5314 
5315   free_after_parsing (cfun);
5316   free_after_compilation (cfun);
5317   pop_dummy_function ();
5318 }
5319 
5320 /* Helper for diddle_return_value.  */
5321 
5322 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5323 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5324 {
5325   if (! outgoing)
5326     return;
5327 
5328   if (REG_P (outgoing))
5329     (*doit) (outgoing, arg);
5330   else if (GET_CODE (outgoing) == PARALLEL)
5331     {
5332       int i;
5333 
5334       for (i = 0; i < XVECLEN (outgoing, 0); i++)
5335 	{
5336 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5337 
5338 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5339 	    (*doit) (x, arg);
5340 	}
5341     }
5342 }
5343 
5344 /* Call DOIT for each hard register used as a return value from
5345    the current function.  */
5346 
5347 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5348 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5349 {
5350   diddle_return_value_1 (doit, arg, crtl->return_bnd);
5351   diddle_return_value_1 (doit, arg, crtl->return_rtx);
5352 }
5353 
5354 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5355 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5356 {
5357   emit_clobber (reg);
5358 }
5359 
5360 void
clobber_return_register(void)5361 clobber_return_register (void)
5362 {
5363   diddle_return_value (do_clobber_return_reg, NULL);
5364 
5365   /* In case we do use pseudo to return value, clobber it too.  */
5366   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5367     {
5368       tree decl_result = DECL_RESULT (current_function_decl);
5369       rtx decl_rtl = DECL_RTL (decl_result);
5370       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5371 	{
5372 	  do_clobber_return_reg (decl_rtl, NULL);
5373 	}
5374     }
5375 }
5376 
5377 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5378 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5379 {
5380   emit_use (reg);
5381 }
5382 
5383 static void
use_return_register(void)5384 use_return_register (void)
5385 {
5386   diddle_return_value (do_use_return_reg, NULL);
5387 }
5388 
5389 /* Set the location of the insn chain starting at INSN to LOC.  */
5390 
5391 static void
set_insn_locations(rtx_insn * insn,int loc)5392 set_insn_locations (rtx_insn *insn, int loc)
5393 {
5394   while (insn != NULL)
5395     {
5396       if (INSN_P (insn))
5397 	INSN_LOCATION (insn) = loc;
5398       insn = NEXT_INSN (insn);
5399     }
5400 }
5401 
5402 /* Generate RTL for the end of the current function.  */
5403 
5404 void
expand_function_end(void)5405 expand_function_end (void)
5406 {
5407   /* If arg_pointer_save_area was referenced only from a nested
5408      function, we will not have initialized it yet.  Do that now.  */
5409   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5410     get_arg_pointer_save_area ();
5411 
5412   /* If we are doing generic stack checking and this function makes calls,
5413      do a stack probe at the start of the function to ensure we have enough
5414      space for another stack frame.  */
5415   if (flag_stack_check == GENERIC_STACK_CHECK)
5416     {
5417       rtx_insn *insn, *seq;
5418 
5419       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5420 	if (CALL_P (insn))
5421 	  {
5422 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5423 	    start_sequence ();
5424 	    if (STACK_CHECK_MOVING_SP)
5425 	      anti_adjust_stack_and_probe (max_frame_size, true);
5426 	    else
5427 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5428 	    seq = get_insns ();
5429 	    end_sequence ();
5430 	    set_insn_locations (seq, prologue_location);
5431 	    emit_insn_before (seq, stack_check_probe_note);
5432 	    break;
5433 	  }
5434     }
5435 
5436   /* End any sequences that failed to be closed due to syntax errors.  */
5437   while (in_sequence_p ())
5438     end_sequence ();
5439 
5440   clear_pending_stack_adjust ();
5441   do_pending_stack_adjust ();
5442 
5443   /* Output a linenumber for the end of the function.
5444      SDB depends on this.  */
5445   set_curr_insn_location (input_location);
5446 
5447   /* Before the return label (if any), clobber the return
5448      registers so that they are not propagated live to the rest of
5449      the function.  This can only happen with functions that drop
5450      through; if there had been a return statement, there would
5451      have either been a return rtx, or a jump to the return label.
5452 
5453      We delay actual code generation after the current_function_value_rtx
5454      is computed.  */
5455   rtx_insn *clobber_after = get_last_insn ();
5456 
5457   /* Output the label for the actual return from the function.  */
5458   emit_label (return_label);
5459 
5460   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5461     {
5462       /* Let except.c know where it should emit the call to unregister
5463 	 the function context for sjlj exceptions.  */
5464       if (flag_exceptions)
5465 	sjlj_emit_function_exit_after (get_last_insn ());
5466     }
5467   else
5468     {
5469       /* We want to ensure that instructions that may trap are not
5470 	 moved into the epilogue by scheduling, because we don't
5471 	 always emit unwind information for the epilogue.  */
5472       if (cfun->can_throw_non_call_exceptions)
5473 	emit_insn (gen_blockage ());
5474     }
5475 
5476   /* If this is an implementation of throw, do what's necessary to
5477      communicate between __builtin_eh_return and the epilogue.  */
5478   expand_eh_return ();
5479 
5480   /* If scalar return value was computed in a pseudo-reg, or was a named
5481      return value that got dumped to the stack, copy that to the hard
5482      return register.  */
5483   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5484     {
5485       tree decl_result = DECL_RESULT (current_function_decl);
5486       rtx decl_rtl = DECL_RTL (decl_result);
5487 
5488       if (REG_P (decl_rtl)
5489 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5490 	  : DECL_REGISTER (decl_result))
5491 	{
5492 	  rtx real_decl_rtl = crtl->return_rtx;
5493 
5494 	  /* This should be set in assign_parms.  */
5495 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5496 
5497 	  /* If this is a BLKmode structure being returned in registers,
5498 	     then use the mode computed in expand_return.  Note that if
5499 	     decl_rtl is memory, then its mode may have been changed,
5500 	     but that crtl->return_rtx has not.  */
5501 	  if (GET_MODE (real_decl_rtl) == BLKmode)
5502 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5503 
5504 	  /* If a non-BLKmode return value should be padded at the least
5505 	     significant end of the register, shift it left by the appropriate
5506 	     amount.  BLKmode results are handled using the group load/store
5507 	     machinery.  */
5508 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5509 	      && REG_P (real_decl_rtl)
5510 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5511 	    {
5512 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5513 					   REGNO (real_decl_rtl)),
5514 			      decl_rtl);
5515 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5516 	    }
5517 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
5518 	    {
5519 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
5520 		 move the result to the real return registers.  Otherwise, do
5521 		 a group load from decl_rtl for a named return.  */
5522 	      if (GET_CODE (decl_rtl) == PARALLEL)
5523 		emit_group_move (real_decl_rtl, decl_rtl);
5524 	      else
5525 		emit_group_load (real_decl_rtl, decl_rtl,
5526 				 TREE_TYPE (decl_result),
5527 				 int_size_in_bytes (TREE_TYPE (decl_result)));
5528 	    }
5529 	  /* In the case of complex integer modes smaller than a word, we'll
5530 	     need to generate some non-trivial bitfield insertions.  Do that
5531 	     on a pseudo and not the hard register.  */
5532 	  else if (GET_CODE (decl_rtl) == CONCAT
5533 		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5534 		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5535 	    {
5536 	      int old_generating_concat_p;
5537 	      rtx tmp;
5538 
5539 	      old_generating_concat_p = generating_concat_p;
5540 	      generating_concat_p = 0;
5541 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5542 	      generating_concat_p = old_generating_concat_p;
5543 
5544 	      emit_move_insn (tmp, decl_rtl);
5545 	      emit_move_insn (real_decl_rtl, tmp);
5546 	    }
5547 	  /* If a named return value dumped decl_return to memory, then
5548 	     we may need to re-do the PROMOTE_MODE signed/unsigned
5549 	     extension.  */
5550 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5551 	    {
5552 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5553 	      promote_function_mode (TREE_TYPE (decl_result),
5554 				     GET_MODE (decl_rtl), &unsignedp,
5555 				     TREE_TYPE (current_function_decl), 1);
5556 
5557 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
5558 	    }
5559 	  else
5560 	    emit_move_insn (real_decl_rtl, decl_rtl);
5561 	}
5562     }
5563 
5564   /* If returning a structure, arrange to return the address of the value
5565      in a place where debuggers expect to find it.
5566 
5567      If returning a structure PCC style,
5568      the caller also depends on this value.
5569      And cfun->returns_pcc_struct is not necessarily set.  */
5570   if ((cfun->returns_struct || cfun->returns_pcc_struct)
5571       && !targetm.calls.omit_struct_return_reg)
5572     {
5573       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5574       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5575       rtx outgoing;
5576 
5577       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5578 	type = TREE_TYPE (type);
5579       else
5580 	value_address = XEXP (value_address, 0);
5581 
5582       outgoing = targetm.calls.function_value (build_pointer_type (type),
5583 					       current_function_decl, true);
5584 
5585       /* Mark this as a function return value so integrate will delete the
5586 	 assignment and USE below when inlining this function.  */
5587       REG_FUNCTION_VALUE_P (outgoing) = 1;
5588 
5589       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
5590       value_address = convert_memory_address (GET_MODE (outgoing),
5591 					      value_address);
5592 
5593       emit_move_insn (outgoing, value_address);
5594 
5595       /* Show return register used to hold result (in this case the address
5596 	 of the result.  */
5597       crtl->return_rtx = outgoing;
5598     }
5599 
5600   /* Emit the actual code to clobber return register.  Don't emit
5601      it if clobber_after is a barrier, then the previous basic block
5602      certainly doesn't fall thru into the exit block.  */
5603   if (!BARRIER_P (clobber_after))
5604     {
5605       start_sequence ();
5606       clobber_return_register ();
5607       rtx_insn *seq = get_insns ();
5608       end_sequence ();
5609 
5610       emit_insn_after (seq, clobber_after);
5611     }
5612 
5613   /* Output the label for the naked return from the function.  */
5614   if (naked_return_label)
5615     emit_label (naked_return_label);
5616 
5617   /* @@@ This is a kludge.  We want to ensure that instructions that
5618      may trap are not moved into the epilogue by scheduling, because
5619      we don't always emit unwind information for the epilogue.  */
5620   if (cfun->can_throw_non_call_exceptions
5621       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5622     emit_insn (gen_blockage ());
5623 
5624   /* If stack protection is enabled for this function, check the guard.  */
5625   if (crtl->stack_protect_guard)
5626     stack_protect_epilogue ();
5627 
5628   /* If we had calls to alloca, and this machine needs
5629      an accurate stack pointer to exit the function,
5630      insert some code to save and restore the stack pointer.  */
5631   if (! EXIT_IGNORE_STACK
5632       && cfun->calls_alloca)
5633     {
5634       rtx tem = 0;
5635 
5636       start_sequence ();
5637       emit_stack_save (SAVE_FUNCTION, &tem);
5638       rtx_insn *seq = get_insns ();
5639       end_sequence ();
5640       emit_insn_before (seq, parm_birth_insn);
5641 
5642       emit_stack_restore (SAVE_FUNCTION, tem);
5643     }
5644 
5645   /* ??? This should no longer be necessary since stupid is no longer with
5646      us, but there are some parts of the compiler (eg reload_combine, and
5647      sh mach_dep_reorg) that still try and compute their own lifetime info
5648      instead of using the general framework.  */
5649   use_return_register ();
5650 }
5651 
5652 rtx
get_arg_pointer_save_area(void)5653 get_arg_pointer_save_area (void)
5654 {
5655   rtx ret = arg_pointer_save_area;
5656 
5657   if (! ret)
5658     {
5659       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5660       arg_pointer_save_area = ret;
5661     }
5662 
5663   if (! crtl->arg_pointer_save_area_init)
5664     {
5665       /* Save the arg pointer at the beginning of the function.  The
5666 	 generated stack slot may not be a valid memory address, so we
5667 	 have to check it and fix it if necessary.  */
5668       start_sequence ();
5669       emit_move_insn (validize_mem (copy_rtx (ret)),
5670                       crtl->args.internal_arg_pointer);
5671       rtx_insn *seq = get_insns ();
5672       end_sequence ();
5673 
5674       push_topmost_sequence ();
5675       emit_insn_after (seq, entry_of_function ());
5676       pop_topmost_sequence ();
5677 
5678       crtl->arg_pointer_save_area_init = true;
5679     }
5680 
5681   return ret;
5682 }
5683 
5684 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5685    for the first time.  */
5686 
5687 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5688 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5689 {
5690   rtx_insn *tmp;
5691   hash_table<insn_cache_hasher> *hash = *hashp;
5692 
5693   if (hash == NULL)
5694     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5695 
5696   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5697     {
5698       rtx *slot = hash->find_slot (tmp, INSERT);
5699       gcc_assert (*slot == NULL);
5700       *slot = tmp;
5701     }
5702 }
5703 
5704 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5705    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
5706    insn, then record COPY as well.  */
5707 
5708 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5709 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5710 {
5711   hash_table<insn_cache_hasher> *hash;
5712   rtx *slot;
5713 
5714   hash = epilogue_insn_hash;
5715   if (!hash || !hash->find (insn))
5716     {
5717       hash = prologue_insn_hash;
5718       if (!hash || !hash->find (insn))
5719 	return;
5720     }
5721 
5722   slot = hash->find_slot (copy, INSERT);
5723   gcc_assert (*slot == NULL);
5724   *slot = copy;
5725 }
5726 
5727 /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
5728    we can be running after reorg, SEQUENCE rtl is possible.  */
5729 
5730 static bool
contains(const_rtx insn,hash_table<insn_cache_hasher> * hash)5731 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5732 {
5733   if (hash == NULL)
5734     return false;
5735 
5736   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5737     {
5738       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5739       int i;
5740       for (i = seq->len () - 1; i >= 0; i--)
5741 	if (hash->find (seq->element (i)))
5742 	  return true;
5743       return false;
5744     }
5745 
5746   return hash->find (const_cast<rtx> (insn)) != NULL;
5747 }
5748 
5749 int
prologue_epilogue_contains(const_rtx insn)5750 prologue_epilogue_contains (const_rtx insn)
5751 {
5752   if (contains (insn, prologue_insn_hash))
5753     return 1;
5754   if (contains (insn, epilogue_insn_hash))
5755     return 1;
5756   return 0;
5757 }
5758 
5759 /* Insert use of return register before the end of BB.  */
5760 
5761 static void
emit_use_return_register_into_block(basic_block bb)5762 emit_use_return_register_into_block (basic_block bb)
5763 {
5764   start_sequence ();
5765   use_return_register ();
5766   rtx_insn *seq = get_insns ();
5767   end_sequence ();
5768   rtx_insn *insn = BB_END (bb);
5769   if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5770     insn = prev_cc0_setter (insn);
5771 
5772   emit_insn_before (seq, insn);
5773 }
5774 
5775 
5776 /* Create a return pattern, either simple_return or return, depending on
5777    simple_p.  */
5778 
5779 static rtx_insn *
gen_return_pattern(bool simple_p)5780 gen_return_pattern (bool simple_p)
5781 {
5782   return (simple_p
5783 	  ? targetm.gen_simple_return ()
5784 	  : targetm.gen_return ());
5785 }
5786 
5787 /* Insert an appropriate return pattern at the end of block BB.  This
5788    also means updating block_for_insn appropriately.  SIMPLE_P is
5789    the same as in gen_return_pattern and passed to it.  */
5790 
5791 void
emit_return_into_block(bool simple_p,basic_block bb)5792 emit_return_into_block (bool simple_p, basic_block bb)
5793 {
5794   rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5795 					      BB_END (bb));
5796   rtx pat = PATTERN (jump);
5797   if (GET_CODE (pat) == PARALLEL)
5798     pat = XVECEXP (pat, 0, 0);
5799   gcc_assert (ANY_RETURN_P (pat));
5800   JUMP_LABEL (jump) = pat;
5801 }
5802 
5803 /* Set JUMP_LABEL for a return insn.  */
5804 
5805 void
set_return_jump_label(rtx_insn * returnjump)5806 set_return_jump_label (rtx_insn *returnjump)
5807 {
5808   rtx pat = PATTERN (returnjump);
5809   if (GET_CODE (pat) == PARALLEL)
5810     pat = XVECEXP (pat, 0, 0);
5811   if (ANY_RETURN_P (pat))
5812     JUMP_LABEL (returnjump) = pat;
5813   else
5814     JUMP_LABEL (returnjump) = ret_rtx;
5815 }
5816 
5817 /* Return true if there are any active insns between HEAD and TAIL.  */
5818 bool
active_insn_between(rtx_insn * head,rtx_insn * tail)5819 active_insn_between (rtx_insn *head, rtx_insn *tail)
5820 {
5821   while (tail)
5822     {
5823       if (active_insn_p (tail))
5824 	return true;
5825       if (tail == head)
5826 	return false;
5827       tail = PREV_INSN (tail);
5828     }
5829   return false;
5830 }
5831 
5832 /* LAST_BB is a block that exits, and empty of active instructions.
5833    Examine its predecessors for jumps that can be converted to
5834    (conditional) returns.  */
5835 vec<edge>
convert_jumps_to_returns(basic_block last_bb,bool simple_p,vec<edge> unconverted ATTRIBUTE_UNUSED)5836 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5837 			  vec<edge> unconverted ATTRIBUTE_UNUSED)
5838 {
5839   int i;
5840   basic_block bb;
5841   edge_iterator ei;
5842   edge e;
5843   auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5844 
5845   FOR_EACH_EDGE (e, ei, last_bb->preds)
5846     if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5847       src_bbs.quick_push (e->src);
5848 
5849   rtx_insn *label = BB_HEAD (last_bb);
5850 
5851   FOR_EACH_VEC_ELT (src_bbs, i, bb)
5852     {
5853       rtx_insn *jump = BB_END (bb);
5854 
5855       if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5856 	continue;
5857 
5858       e = find_edge (bb, last_bb);
5859 
5860       /* If we have an unconditional jump, we can replace that
5861 	 with a simple return instruction.  */
5862       if (simplejump_p (jump))
5863 	{
5864 	  /* The use of the return register might be present in the exit
5865 	     fallthru block.  Either:
5866 	     - removing the use is safe, and we should remove the use in
5867 	     the exit fallthru block, or
5868 	     - removing the use is not safe, and we should add it here.
5869 	     For now, we conservatively choose the latter.  Either of the
5870 	     2 helps in crossjumping.  */
5871 	  emit_use_return_register_into_block (bb);
5872 
5873 	  emit_return_into_block (simple_p, bb);
5874 	  delete_insn (jump);
5875 	}
5876 
5877       /* If we have a conditional jump branching to the last
5878 	 block, we can try to replace that with a conditional
5879 	 return instruction.  */
5880       else if (condjump_p (jump))
5881 	{
5882 	  rtx dest;
5883 
5884 	  if (simple_p)
5885 	    dest = simple_return_rtx;
5886 	  else
5887 	    dest = ret_rtx;
5888 	  if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5889 	    {
5890 	      if (targetm.have_simple_return () && simple_p)
5891 		{
5892 		  if (dump_file)
5893 		    fprintf (dump_file,
5894 			     "Failed to redirect bb %d branch.\n", bb->index);
5895 		  unconverted.safe_push (e);
5896 		}
5897 	      continue;
5898 	    }
5899 
5900 	  /* See comment in simplejump_p case above.  */
5901 	  emit_use_return_register_into_block (bb);
5902 
5903 	  /* If this block has only one successor, it both jumps
5904 	     and falls through to the fallthru block, so we can't
5905 	     delete the edge.  */
5906 	  if (single_succ_p (bb))
5907 	    continue;
5908 	}
5909       else
5910 	{
5911 	  if (targetm.have_simple_return () && simple_p)
5912 	    {
5913 	      if (dump_file)
5914 		fprintf (dump_file,
5915 			 "Failed to redirect bb %d branch.\n", bb->index);
5916 	      unconverted.safe_push (e);
5917 	    }
5918 	  continue;
5919 	}
5920 
5921       /* Fix up the CFG for the successful change we just made.  */
5922       redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5923       e->flags &= ~EDGE_CROSSING;
5924     }
5925   src_bbs.release ();
5926   return unconverted;
5927 }
5928 
5929 /* Emit a return insn for the exit fallthru block.  */
5930 basic_block
emit_return_for_exit(edge exit_fallthru_edge,bool simple_p)5931 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5932 {
5933   basic_block last_bb = exit_fallthru_edge->src;
5934 
5935   if (JUMP_P (BB_END (last_bb)))
5936     {
5937       last_bb = split_edge (exit_fallthru_edge);
5938       exit_fallthru_edge = single_succ_edge (last_bb);
5939     }
5940   emit_barrier_after (BB_END (last_bb));
5941   emit_return_into_block (simple_p, last_bb);
5942   exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5943   return last_bb;
5944 }
5945 
5946 
5947 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5948    this into place with notes indicating where the prologue ends and where
5949    the epilogue begins.  Update the basic block information when possible.
5950 
5951    Notes on epilogue placement:
5952    There are several kinds of edges to the exit block:
5953    * a single fallthru edge from LAST_BB
5954    * possibly, edges from blocks containing sibcalls
5955    * possibly, fake edges from infinite loops
5956 
5957    The epilogue is always emitted on the fallthru edge from the last basic
5958    block in the function, LAST_BB, into the exit block.
5959 
5960    If LAST_BB is empty except for a label, it is the target of every
5961    other basic block in the function that ends in a return.  If a
5962    target has a return or simple_return pattern (possibly with
5963    conditional variants), these basic blocks can be changed so that a
5964    return insn is emitted into them, and their target is adjusted to
5965    the real exit block.
5966 
5967    Notes on shrink wrapping: We implement a fairly conservative
5968    version of shrink-wrapping rather than the textbook one.  We only
5969    generate a single prologue and a single epilogue.  This is
5970    sufficient to catch a number of interesting cases involving early
5971    exits.
5972 
5973    First, we identify the blocks that require the prologue to occur before
5974    them.  These are the ones that modify a call-saved register, or reference
5975    any of the stack or frame pointer registers.  To simplify things, we then
5976    mark everything reachable from these blocks as also requiring a prologue.
5977    This takes care of loops automatically, and avoids the need to examine
5978    whether MEMs reference the frame, since it is sufficient to check for
5979    occurrences of the stack or frame pointer.
5980 
5981    We then compute the set of blocks for which the need for a prologue
5982    is anticipatable (borrowing terminology from the shrink-wrapping
5983    description in Muchnick's book).  These are the blocks which either
5984    require a prologue themselves, or those that have only successors
5985    where the prologue is anticipatable.  The prologue needs to be
5986    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5987    is not.  For the moment, we ensure that only one such edge exists.
5988 
5989    The epilogue is placed as described above, but we make a
5990    distinction between inserting return and simple_return patterns
5991    when modifying other blocks that end in a return.  Blocks that end
5992    in a sibcall omit the sibcall_epilogue if the block is not in
5993    ANTIC.  */
5994 
5995 void
thread_prologue_and_epilogue_insns(void)5996 thread_prologue_and_epilogue_insns (void)
5997 {
5998   bool inserted;
5999   vec<edge> unconverted_simple_returns = vNULL;
6000   bitmap_head bb_flags;
6001   rtx_insn *returnjump;
6002   rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
6003   rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
6004   edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
6005   edge_iterator ei;
6006 
6007   df_analyze ();
6008 
6009   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6010 
6011   inserted = false;
6012   epilogue_end = NULL;
6013   returnjump = NULL;
6014 
6015   /* Can't deal with multiple successors of the entry block at the
6016      moment.  Function should always have at least one entry
6017      point.  */
6018   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6019   entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6020   orig_entry_edge = entry_edge;
6021 
6022   split_prologue_seq = NULL;
6023   if (flag_split_stack
6024       && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
6025 	  == NULL))
6026     {
6027       start_sequence ();
6028       emit_insn (targetm.gen_split_stack_prologue ());
6029       split_prologue_seq = get_insns ();
6030       end_sequence ();
6031 
6032       record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
6033       set_insn_locations (split_prologue_seq, prologue_location);
6034     }
6035 
6036   prologue_seq = NULL;
6037   if (targetm.have_prologue ())
6038     {
6039       start_sequence ();
6040       rtx_insn *seq = targetm.gen_prologue ();
6041       emit_insn (seq);
6042 
6043       /* Insert an explicit USE for the frame pointer
6044          if the profiling is on and the frame pointer is required.  */
6045       if (crtl->profile && frame_pointer_needed)
6046 	emit_use (hard_frame_pointer_rtx);
6047 
6048       /* Retain a map of the prologue insns.  */
6049       record_insns (seq, NULL, &prologue_insn_hash);
6050       emit_note (NOTE_INSN_PROLOGUE_END);
6051 
6052       /* Ensure that instructions are not moved into the prologue when
6053 	 profiling is on.  The call to the profiling routine can be
6054 	 emitted within the live range of a call-clobbered register.  */
6055       if (!targetm.profile_before_prologue () && crtl->profile)
6056         emit_insn (gen_blockage ());
6057 
6058       prologue_seq = get_insns ();
6059       end_sequence ();
6060       set_insn_locations (prologue_seq, prologue_location);
6061     }
6062 
6063   bitmap_initialize (&bb_flags, &bitmap_default_obstack);
6064 
6065   /* Try to perform a kind of shrink-wrapping, making sure the
6066      prologue/epilogue is emitted only around those parts of the
6067      function that require it.  */
6068 
6069   try_shrink_wrapping (&entry_edge, &bb_flags, prologue_seq);
6070 
6071   rtx_insn *split_prologue_insn = split_prologue_seq;
6072   if (split_prologue_seq != NULL_RTX)
6073     {
6074       while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6075 	split_prologue_insn = NEXT_INSN (split_prologue_insn);
6076       insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6077       inserted = true;
6078     }
6079   rtx_insn *prologue_insn = prologue_seq;
6080   if (prologue_seq != NULL_RTX)
6081     {
6082       while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6083 	prologue_insn = NEXT_INSN (prologue_insn);
6084       insert_insn_on_edge (prologue_seq, entry_edge);
6085       inserted = true;
6086     }
6087 
6088   /* If the exit block has no non-fake predecessors, we don't need
6089      an epilogue.  */
6090   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6091     if ((e->flags & EDGE_FAKE) == 0)
6092       break;
6093   if (e == NULL)
6094     goto epilogue_done;
6095 
6096   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6097 
6098   exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6099 
6100   if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
6101     exit_fallthru_edge
6102 	= get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
6103 					 &unconverted_simple_returns,
6104 					 &returnjump);
6105   if (targetm.have_return ())
6106     {
6107       if (exit_fallthru_edge == NULL)
6108 	goto epilogue_done;
6109 
6110       if (optimize)
6111 	{
6112 	  basic_block last_bb = exit_fallthru_edge->src;
6113 
6114 	  if (LABEL_P (BB_HEAD (last_bb))
6115 	      && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6116 	    convert_jumps_to_returns (last_bb, false, vNULL);
6117 
6118 	  if (EDGE_COUNT (last_bb->preds) != 0
6119 	      && single_succ_p (last_bb))
6120 	    {
6121 	      last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6122 	      epilogue_end = returnjump = BB_END (last_bb);
6123 
6124 	      /* Emitting the return may add a basic block.
6125 		 Fix bb_flags for the added block.  */
6126 	      if (targetm.have_simple_return ()
6127 		  && last_bb != exit_fallthru_edge->src)
6128 		bitmap_set_bit (&bb_flags, last_bb->index);
6129 
6130 	      goto epilogue_done;
6131 	    }
6132 	}
6133     }
6134 
6135   /* A small fib -- epilogue is not yet completed, but we wish to re-use
6136      this marker for the splits of EH_RETURN patterns, and nothing else
6137      uses the flag in the meantime.  */
6138   epilogue_completed = 1;
6139 
6140   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
6141      some targets, these get split to a special version of the epilogue
6142      code.  In order to be able to properly annotate these with unwind
6143      info, try to split them now.  If we get a valid split, drop an
6144      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
6145   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6146     {
6147       rtx_insn *prev, *last, *trial;
6148 
6149       if (e->flags & EDGE_FALLTHRU)
6150 	continue;
6151       last = BB_END (e->src);
6152       if (!eh_returnjump_p (last))
6153 	continue;
6154 
6155       prev = PREV_INSN (last);
6156       trial = try_split (PATTERN (last), last, 1);
6157       if (trial == last)
6158 	continue;
6159 
6160       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6161       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6162     }
6163 
6164   /* If nothing falls through into the exit block, we don't need an
6165      epilogue.  */
6166 
6167   if (exit_fallthru_edge == NULL)
6168     goto epilogue_done;
6169 
6170   if (targetm.have_epilogue ())
6171     {
6172       start_sequence ();
6173       epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6174       rtx_insn *seq = targetm.gen_epilogue ();
6175       if (seq)
6176 	emit_jump_insn (seq);
6177 
6178       /* Retain a map of the epilogue insns.  */
6179       record_insns (seq, NULL, &epilogue_insn_hash);
6180       set_insn_locations (seq, epilogue_location);
6181 
6182       seq = get_insns ();
6183       returnjump = get_last_insn ();
6184       end_sequence ();
6185 
6186       insert_insn_on_edge (seq, exit_fallthru_edge);
6187       inserted = true;
6188 
6189       if (JUMP_P (returnjump))
6190 	set_return_jump_label (returnjump);
6191     }
6192   else
6193     {
6194       basic_block cur_bb;
6195 
6196       if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6197 	goto epilogue_done;
6198       /* We have a fall-through edge to the exit block, the source is not
6199          at the end of the function, and there will be an assembler epilogue
6200          at the end of the function.
6201          We can't use force_nonfallthru here, because that would try to
6202 	 use return.  Inserting a jump 'by hand' is extremely messy, so
6203 	 we take advantage of cfg_layout_finalize using
6204 	 fixup_fallthru_exit_predecessor.  */
6205       cfg_layout_initialize (0);
6206       FOR_EACH_BB_FN (cur_bb, cfun)
6207 	if (cur_bb->index >= NUM_FIXED_BLOCKS
6208 	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6209 	  cur_bb->aux = cur_bb->next_bb;
6210       cfg_layout_finalize ();
6211     }
6212 
6213 epilogue_done:
6214 
6215   default_rtl_profile ();
6216 
6217   if (inserted)
6218     {
6219       sbitmap blocks;
6220 
6221       commit_edge_insertions ();
6222 
6223       /* Look for basic blocks within the prologue insns.  */
6224       if (split_prologue_insn
6225 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6226 	split_prologue_insn = NULL;
6227       if (prologue_insn
6228 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
6229 	prologue_insn = NULL;
6230       blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6231       bitmap_clear (blocks);
6232       if (split_prologue_insn)
6233 	bitmap_set_bit (blocks,
6234 			BLOCK_FOR_INSN (split_prologue_insn)->index);
6235       if (prologue_insn)
6236 	bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6237       bitmap_set_bit (blocks, entry_edge->dest->index);
6238       bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6239       find_many_sub_basic_blocks (blocks);
6240       sbitmap_free (blocks);
6241 
6242       /* The epilogue insns we inserted may cause the exit edge to no longer
6243 	 be fallthru.  */
6244       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6245 	{
6246 	  if (((e->flags & EDGE_FALLTHRU) != 0)
6247 	      && returnjump_p (BB_END (e->src)))
6248 	    e->flags &= ~EDGE_FALLTHRU;
6249 	}
6250     }
6251 
6252   if (targetm.have_simple_return ())
6253     convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6254 			      returnjump, unconverted_simple_returns);
6255 
6256   /* Emit sibling epilogues before any sibling call sites.  */
6257   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6258 							     ei_safe_edge (ei));
6259 							     )
6260     {
6261       basic_block bb = e->src;
6262       rtx_insn *insn = BB_END (bb);
6263 
6264       if (!CALL_P (insn)
6265 	  || ! SIBLING_CALL_P (insn)
6266 	  || (targetm.have_simple_return ()
6267 	      && entry_edge != orig_entry_edge
6268 	      && !bitmap_bit_p (&bb_flags, bb->index)))
6269 	{
6270 	  ei_next (&ei);
6271 	  continue;
6272 	}
6273 
6274       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6275 	{
6276 	  start_sequence ();
6277 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
6278 	  emit_insn (ep_seq);
6279 	  rtx_insn *seq = get_insns ();
6280 	  end_sequence ();
6281 
6282 	  /* Retain a map of the epilogue insns.  Used in life analysis to
6283 	     avoid getting rid of sibcall epilogue insns.  Do this before we
6284 	     actually emit the sequence.  */
6285 	  record_insns (seq, NULL, &epilogue_insn_hash);
6286 	  set_insn_locations (seq, epilogue_location);
6287 
6288 	  emit_insn_before (seq, insn);
6289 	}
6290       ei_next (&ei);
6291     }
6292 
6293   if (epilogue_end)
6294     {
6295       rtx_insn *insn, *next;
6296 
6297       /* Similarly, move any line notes that appear after the epilogue.
6298          There is no need, however, to be quite so anal about the existence
6299 	 of such a note.  Also possibly move
6300 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6301 	 info generation.  */
6302       for (insn = epilogue_end; insn; insn = next)
6303 	{
6304 	  next = NEXT_INSN (insn);
6305 	  if (NOTE_P (insn)
6306 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6307 	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6308 	}
6309     }
6310 
6311   bitmap_clear (&bb_flags);
6312 
6313   /* Threading the prologue and epilogue changes the artificial refs
6314      in the entry and exit blocks.  */
6315   epilogue_completed = 1;
6316   df_update_entry_exit_and_calls ();
6317 }
6318 
6319 /* Reposition the prologue-end and epilogue-begin notes after
6320    instruction scheduling.  */
6321 
6322 void
reposition_prologue_and_epilogue_notes(void)6323 reposition_prologue_and_epilogue_notes (void)
6324 {
6325   if (!targetm.have_prologue ()
6326       && !targetm.have_epilogue ()
6327       && !targetm.have_sibcall_epilogue ())
6328     return;
6329 
6330   /* Since the hash table is created on demand, the fact that it is
6331      non-null is a signal that it is non-empty.  */
6332   if (prologue_insn_hash != NULL)
6333     {
6334       size_t len = prologue_insn_hash->elements ();
6335       rtx_insn *insn, *last = NULL, *note = NULL;
6336 
6337       /* Scan from the beginning until we reach the last prologue insn.  */
6338       /* ??? While we do have the CFG intact, there are two problems:
6339 	 (1) The prologue can contain loops (typically probing the stack),
6340 	     which means that the end of the prologue isn't in the first bb.
6341 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
6342       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6343 	{
6344 	  if (NOTE_P (insn))
6345 	    {
6346 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6347 		note = insn;
6348 	    }
6349 	  else if (contains (insn, prologue_insn_hash))
6350 	    {
6351 	      last = insn;
6352 	      if (--len == 0)
6353 		break;
6354 	    }
6355 	}
6356 
6357       if (last)
6358 	{
6359 	  if (note == NULL)
6360 	    {
6361 	      /* Scan forward looking for the PROLOGUE_END note.  It should
6362 		 be right at the beginning of the block, possibly with other
6363 		 insn notes that got moved there.  */
6364 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6365 		{
6366 		  if (NOTE_P (note)
6367 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6368 		    break;
6369 		}
6370 	    }
6371 
6372 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
6373 	  if (LABEL_P (last))
6374 	    last = NEXT_INSN (last);
6375 	  reorder_insns (note, note, last);
6376 	}
6377     }
6378 
6379   if (epilogue_insn_hash != NULL)
6380     {
6381       edge_iterator ei;
6382       edge e;
6383 
6384       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6385 	{
6386 	  rtx_insn *insn, *first = NULL, *note = NULL;
6387 	  basic_block bb = e->src;
6388 
6389 	  /* Scan from the beginning until we reach the first epilogue insn. */
6390 	  FOR_BB_INSNS (bb, insn)
6391 	    {
6392 	      if (NOTE_P (insn))
6393 		{
6394 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6395 		    {
6396 		      note = insn;
6397 		      if (first != NULL)
6398 			break;
6399 		    }
6400 		}
6401 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
6402 		{
6403 		  first = insn;
6404 		  if (note != NULL)
6405 		    break;
6406 		}
6407 	    }
6408 
6409 	  if (note)
6410 	    {
6411 	      /* If the function has a single basic block, and no real
6412 		 epilogue insns (e.g. sibcall with no cleanup), the
6413 		 epilogue note can get scheduled before the prologue
6414 		 note.  If we have frame related prologue insns, having
6415 		 them scanned during the epilogue will result in a crash.
6416 		 In this case re-order the epilogue note to just before
6417 		 the last insn in the block.  */
6418 	      if (first == NULL)
6419 		first = BB_END (bb);
6420 
6421 	      if (PREV_INSN (first) != note)
6422 		reorder_insns (note, note, PREV_INSN (first));
6423 	    }
6424 	}
6425     }
6426 }
6427 
6428 /* Returns the name of function declared by FNDECL.  */
6429 const char *
fndecl_name(tree fndecl)6430 fndecl_name (tree fndecl)
6431 {
6432   if (fndecl == NULL)
6433     return "(nofn)";
6434   return lang_hooks.decl_printable_name (fndecl, 2);
6435 }
6436 
6437 /* Returns the name of function FN.  */
6438 const char *
function_name(struct function * fn)6439 function_name (struct function *fn)
6440 {
6441   tree fndecl = (fn == NULL) ? NULL : fn->decl;
6442   return fndecl_name (fndecl);
6443 }
6444 
6445 /* Returns the name of the current function.  */
6446 const char *
current_function_name(void)6447 current_function_name (void)
6448 {
6449   return function_name (cfun);
6450 }
6451 
6452 
6453 static unsigned int
rest_of_handle_check_leaf_regs(void)6454 rest_of_handle_check_leaf_regs (void)
6455 {
6456 #ifdef LEAF_REGISTERS
6457   crtl->uses_only_leaf_regs
6458     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6459 #endif
6460   return 0;
6461 }
6462 
6463 /* Insert a TYPE into the used types hash table of CFUN.  */
6464 
6465 static void
used_types_insert_helper(tree type,struct function * func)6466 used_types_insert_helper (tree type, struct function *func)
6467 {
6468   if (type != NULL && func != NULL)
6469     {
6470       if (func->used_types_hash == NULL)
6471 	func->used_types_hash = hash_set<tree>::create_ggc (37);
6472 
6473       func->used_types_hash->add (type);
6474     }
6475 }
6476 
6477 /* Given a type, insert it into the used hash table in cfun.  */
6478 void
used_types_insert(tree t)6479 used_types_insert (tree t)
6480 {
6481   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6482     if (TYPE_NAME (t))
6483       break;
6484     else
6485       t = TREE_TYPE (t);
6486   if (TREE_CODE (t) == ERROR_MARK)
6487     return;
6488   if (TYPE_NAME (t) == NULL_TREE
6489       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6490     t = TYPE_MAIN_VARIANT (t);
6491   if (debug_info_level > DINFO_LEVEL_NONE)
6492     {
6493       if (cfun)
6494 	used_types_insert_helper (t, cfun);
6495       else
6496 	{
6497 	  /* So this might be a type referenced by a global variable.
6498 	     Record that type so that we can later decide to emit its
6499 	     debug information.  */
6500 	  vec_safe_push (types_used_by_cur_var_decl, t);
6501 	}
6502     }
6503 }
6504 
6505 /* Helper to Hash a struct types_used_by_vars_entry.  */
6506 
6507 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6508 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6509 {
6510   gcc_assert (entry && entry->var_decl && entry->type);
6511 
6512   return iterative_hash_object (entry->type,
6513 				iterative_hash_object (entry->var_decl, 0));
6514 }
6515 
6516 /* Hash function of the types_used_by_vars_entry hash table.  */
6517 
6518 hashval_t
hash(types_used_by_vars_entry * entry)6519 used_type_hasher::hash (types_used_by_vars_entry *entry)
6520 {
6521   return hash_types_used_by_vars_entry (entry);
6522 }
6523 
6524 /*Equality function of the types_used_by_vars_entry hash table.  */
6525 
6526 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6527 used_type_hasher::equal (types_used_by_vars_entry *e1,
6528 			 types_used_by_vars_entry *e2)
6529 {
6530   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6531 }
6532 
6533 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6534 
6535 void
types_used_by_var_decl_insert(tree type,tree var_decl)6536 types_used_by_var_decl_insert (tree type, tree var_decl)
6537 {
6538   if (type != NULL && var_decl != NULL)
6539     {
6540       types_used_by_vars_entry **slot;
6541       struct types_used_by_vars_entry e;
6542       e.var_decl = var_decl;
6543       e.type = type;
6544       if (types_used_by_vars_hash == NULL)
6545 	types_used_by_vars_hash
6546 	  = hash_table<used_type_hasher>::create_ggc (37);
6547 
6548       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6549       if (*slot == NULL)
6550 	{
6551 	  struct types_used_by_vars_entry *entry;
6552 	  entry = ggc_alloc<types_used_by_vars_entry> ();
6553 	  entry->type = type;
6554 	  entry->var_decl = var_decl;
6555 	  *slot = entry;
6556 	}
6557     }
6558 }
6559 
6560 namespace {
6561 
6562 const pass_data pass_data_leaf_regs =
6563 {
6564   RTL_PASS, /* type */
6565   "*leaf_regs", /* name */
6566   OPTGROUP_NONE, /* optinfo_flags */
6567   TV_NONE, /* tv_id */
6568   0, /* properties_required */
6569   0, /* properties_provided */
6570   0, /* properties_destroyed */
6571   0, /* todo_flags_start */
6572   0, /* todo_flags_finish */
6573 };
6574 
6575 class pass_leaf_regs : public rtl_opt_pass
6576 {
6577 public:
pass_leaf_regs(gcc::context * ctxt)6578   pass_leaf_regs (gcc::context *ctxt)
6579     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6580   {}
6581 
6582   /* opt_pass methods: */
execute(function *)6583   virtual unsigned int execute (function *)
6584     {
6585       return rest_of_handle_check_leaf_regs ();
6586     }
6587 
6588 }; // class pass_leaf_regs
6589 
6590 } // anon namespace
6591 
6592 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6593 make_pass_leaf_regs (gcc::context *ctxt)
6594 {
6595   return new pass_leaf_regs (ctxt);
6596 }
6597 
6598 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6599 rest_of_handle_thread_prologue_and_epilogue (void)
6600 {
6601   if (optimize)
6602     cleanup_cfg (CLEANUP_EXPENSIVE);
6603 
6604   /* On some machines, the prologue and epilogue code, or parts thereof,
6605      can be represented as RTL.  Doing so lets us schedule insns between
6606      it and the rest of the code and also allows delayed branch
6607      scheduling to operate in the epilogue.  */
6608   thread_prologue_and_epilogue_insns ();
6609 
6610   /* Some non-cold blocks may now be only reachable from cold blocks.
6611      Fix that up.  */
6612   fixup_partitions ();
6613 
6614   /* Shrink-wrapping can result in unreachable edges in the epilogue,
6615      see PR57320.  */
6616   cleanup_cfg (0);
6617 
6618   /* The stack usage info is finalized during prologue expansion.  */
6619   if (flag_stack_usage_info)
6620     output_stack_usage ();
6621 
6622   return 0;
6623 }
6624 
6625 namespace {
6626 
6627 const pass_data pass_data_thread_prologue_and_epilogue =
6628 {
6629   RTL_PASS, /* type */
6630   "pro_and_epilogue", /* name */
6631   OPTGROUP_NONE, /* optinfo_flags */
6632   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6633   0, /* properties_required */
6634   0, /* properties_provided */
6635   0, /* properties_destroyed */
6636   0, /* todo_flags_start */
6637   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6638 };
6639 
6640 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6641 {
6642 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6643   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6644     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6645   {}
6646 
6647   /* opt_pass methods: */
execute(function *)6648   virtual unsigned int execute (function *)
6649     {
6650       return rest_of_handle_thread_prologue_and_epilogue ();
6651     }
6652 
6653 }; // class pass_thread_prologue_and_epilogue
6654 
6655 } // anon namespace
6656 
6657 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6658 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6659 {
6660   return new pass_thread_prologue_and_epilogue (ctxt);
6661 }
6662 
6663 
6664 /* This mini-pass fixes fall-out from SSA in asm statements that have
6665    in-out constraints.  Say you start with
6666 
6667      orig = inout;
6668      asm ("": "+mr" (inout));
6669      use (orig);
6670 
6671    which is transformed very early to use explicit output and match operands:
6672 
6673      orig = inout;
6674      asm ("": "=mr" (inout) : "0" (inout));
6675      use (orig);
6676 
6677    Or, after SSA and copyprop,
6678 
6679      asm ("": "=mr" (inout_2) : "0" (inout_1));
6680      use (inout_1);
6681 
6682    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6683    they represent two separate values, so they will get different pseudo
6684    registers during expansion.  Then, since the two operands need to match
6685    per the constraints, but use different pseudo registers, reload can
6686    only register a reload for these operands.  But reloads can only be
6687    satisfied by hardregs, not by memory, so we need a register for this
6688    reload, just because we are presented with non-matching operands.
6689    So, even though we allow memory for this operand, no memory can be
6690    used for it, just because the two operands don't match.  This can
6691    cause reload failures on register-starved targets.
6692 
6693    So it's a symptom of reload not being able to use memory for reloads
6694    or, alternatively it's also a symptom of both operands not coming into
6695    reload as matching (in which case the pseudo could go to memory just
6696    fine, as the alternative allows it, and no reload would be necessary).
6697    We fix the latter problem here, by transforming
6698 
6699      asm ("": "=mr" (inout_2) : "0" (inout_1));
6700 
6701    back to
6702 
6703      inout_2 = inout_1;
6704      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
6705 
6706 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6707 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6708 {
6709   int i;
6710   bool changed = false;
6711   rtx op = SET_SRC (p_sets[0]);
6712   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6713   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6714   bool *output_matched = XALLOCAVEC (bool, noutputs);
6715 
6716   memset (output_matched, 0, noutputs * sizeof (bool));
6717   for (i = 0; i < ninputs; i++)
6718     {
6719       rtx input, output;
6720       rtx_insn *insns;
6721       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6722       char *end;
6723       int match, j;
6724 
6725       if (*constraint == '%')
6726 	constraint++;
6727 
6728       match = strtoul (constraint, &end, 10);
6729       if (end == constraint)
6730 	continue;
6731 
6732       gcc_assert (match < noutputs);
6733       output = SET_DEST (p_sets[match]);
6734       input = RTVEC_ELT (inputs, i);
6735       /* Only do the transformation for pseudos.  */
6736       if (! REG_P (output)
6737 	  || rtx_equal_p (output, input)
6738 	  || (GET_MODE (input) != VOIDmode
6739 	      && GET_MODE (input) != GET_MODE (output)))
6740 	continue;
6741 
6742       /* We can't do anything if the output is also used as input,
6743 	 as we're going to overwrite it.  */
6744       for (j = 0; j < ninputs; j++)
6745         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6746 	  break;
6747       if (j != ninputs)
6748 	continue;
6749 
6750       /* Avoid changing the same input several times.  For
6751 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6752 	 only change in once (to out1), rather than changing it
6753 	 first to out1 and afterwards to out2.  */
6754       if (i > 0)
6755 	{
6756 	  for (j = 0; j < noutputs; j++)
6757 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
6758 	      break;
6759 	  if (j != noutputs)
6760 	    continue;
6761 	}
6762       output_matched[match] = true;
6763 
6764       start_sequence ();
6765       emit_move_insn (output, input);
6766       insns = get_insns ();
6767       end_sequence ();
6768       emit_insn_before (insns, insn);
6769 
6770       /* Now replace all mentions of the input with output.  We can't
6771 	 just replace the occurrence in inputs[i], as the register might
6772 	 also be used in some other input (or even in an address of an
6773 	 output), which would mean possibly increasing the number of
6774 	 inputs by one (namely 'output' in addition), which might pose
6775 	 a too complicated problem for reload to solve.  E.g. this situation:
6776 
6777 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
6778 
6779 	 Here 'input' is used in two occurrences as input (once for the
6780 	 input operand, once for the address in the second output operand).
6781 	 If we would replace only the occurrence of the input operand (to
6782 	 make the matching) we would be left with this:
6783 
6784 	   output = input
6785 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
6786 
6787 	 Now we suddenly have two different input values (containing the same
6788 	 value, but different pseudos) where we formerly had only one.
6789 	 With more complicated asms this might lead to reload failures
6790 	 which wouldn't have happen without this pass.  So, iterate over
6791 	 all operands and replace all occurrences of the register used.  */
6792       for (j = 0; j < noutputs; j++)
6793 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6794 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6795 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6796 					      input, output);
6797       for (j = 0; j < ninputs; j++)
6798 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6799 	  RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6800 					       input, output);
6801 
6802       changed = true;
6803     }
6804 
6805   if (changed)
6806     df_insn_rescan (insn);
6807 }
6808 
6809 /* Add the decl D to the local_decls list of FUN.  */
6810 
6811 void
add_local_decl(struct function * fun,tree d)6812 add_local_decl (struct function *fun, tree d)
6813 {
6814   gcc_assert (TREE_CODE (d) == VAR_DECL);
6815   vec_safe_push (fun->local_decls, d);
6816 }
6817 
6818 namespace {
6819 
6820 const pass_data pass_data_match_asm_constraints =
6821 {
6822   RTL_PASS, /* type */
6823   "asmcons", /* name */
6824   OPTGROUP_NONE, /* optinfo_flags */
6825   TV_NONE, /* tv_id */
6826   0, /* properties_required */
6827   0, /* properties_provided */
6828   0, /* properties_destroyed */
6829   0, /* todo_flags_start */
6830   0, /* todo_flags_finish */
6831 };
6832 
6833 class pass_match_asm_constraints : public rtl_opt_pass
6834 {
6835 public:
pass_match_asm_constraints(gcc::context * ctxt)6836   pass_match_asm_constraints (gcc::context *ctxt)
6837     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6838   {}
6839 
6840   /* opt_pass methods: */
6841   virtual unsigned int execute (function *);
6842 
6843 }; // class pass_match_asm_constraints
6844 
6845 unsigned
execute(function * fun)6846 pass_match_asm_constraints::execute (function *fun)
6847 {
6848   basic_block bb;
6849   rtx_insn *insn;
6850   rtx pat, *p_sets;
6851   int noutputs;
6852 
6853   if (!crtl->has_asm_statement)
6854     return 0;
6855 
6856   df_set_flags (DF_DEFER_INSN_RESCAN);
6857   FOR_EACH_BB_FN (bb, fun)
6858     {
6859       FOR_BB_INSNS (bb, insn)
6860 	{
6861 	  if (!INSN_P (insn))
6862 	    continue;
6863 
6864 	  pat = PATTERN (insn);
6865 	  if (GET_CODE (pat) == PARALLEL)
6866 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6867 	  else if (GET_CODE (pat) == SET)
6868 	    p_sets = &PATTERN (insn), noutputs = 1;
6869 	  else
6870 	    continue;
6871 
6872 	  if (GET_CODE (*p_sets) == SET
6873 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6874 	    match_asm_constraints_1 (insn, p_sets, noutputs);
6875 	 }
6876     }
6877 
6878   return TODO_df_finish;
6879 }
6880 
6881 } // anon namespace
6882 
6883 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6884 make_pass_match_asm_constraints (gcc::context *ctxt)
6885 {
6886   return new pass_match_asm_constraints (ctxt);
6887 }
6888 
6889 
6890 #include "gt-function.h"
6891