1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-chkp.h"
77 #include "rtl-chkp.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 #include "stringpool.h"
81 #include "attribs.h"
82 #include "gimple.h"
83 #include "options.h"
84
85 /* So we can assign to cfun in this file. */
86 #undef cfun
87
88 #ifndef STACK_ALIGNMENT_NEEDED
89 #define STACK_ALIGNMENT_NEEDED 1
90 #endif
91
92 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
93
94 /* Round a value to the lowest integer less than it that is a multiple of
95 the required alignment. Avoid using division in case the value is
96 negative. Assume the alignment is a power of two. */
97 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
98
99 /* Similar, but round to the next highest integer that meets the
100 alignment. */
101 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
102
103 /* Nonzero once virtual register instantiation has been done.
104 assign_stack_local uses frame_pointer_rtx when this is nonzero.
105 calls.c:emit_library_call_value_1 uses it to set up
106 post-instantiation libcalls. */
107 int virtuals_instantiated;
108
109 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
110 static GTY(()) int funcdef_no;
111
112 /* These variables hold pointers to functions to create and destroy
113 target specific, per-function data structures. */
114 struct machine_function * (*init_machine_status) (void);
115
116 /* The currently compiled function. */
117 struct function *cfun = 0;
118
119 /* These hashes record the prologue and epilogue insns. */
120
121 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
122 {
hashinsn_cache_hasher123 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher124 static bool equal (rtx a, rtx b) { return a == b; }
125 };
126
127 static GTY((cache))
128 hash_table<insn_cache_hasher> *prologue_insn_hash;
129 static GTY((cache))
130 hash_table<insn_cache_hasher> *epilogue_insn_hash;
131
132
133 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
134 vec<tree, va_gc> *types_used_by_cur_var_decl;
135
136 /* Forward declarations. */
137
138 static struct temp_slot *find_temp_slot_from_address (rtx);
139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140 static void pad_below (struct args_size *, machine_mode, tree);
141 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
142 static int all_blocks (tree, tree *);
143 static tree *get_block_vector (tree, int *);
144 extern tree debug_find_var_in_block_tree (tree, tree);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
148 ATTRIBUTE_UNUSED;
149 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
150 static void prepare_function_start (void);
151 static void do_clobber_return_reg (rtx, void *);
152 static void do_use_return_reg (rtx, void *);
153
154
155 /* Stack of nested functions. */
156 /* Keep track of the cfun stack. */
157
158 static vec<function *> function_context_stack;
159
160 /* Save the current context for compilation of a nested function.
161 This is called from language-specific code. */
162
163 void
push_function_context(void)164 push_function_context (void)
165 {
166 if (cfun == 0)
167 allocate_struct_function (NULL, false);
168
169 function_context_stack.safe_push (cfun);
170 set_cfun (NULL);
171 }
172
173 /* Restore the last saved context, at the end of a nested function.
174 This function is called from language-specific code. */
175
176 void
pop_function_context(void)177 pop_function_context (void)
178 {
179 struct function *p = function_context_stack.pop ();
180 set_cfun (p);
181 current_function_decl = p->decl;
182
183 /* Reset variables that have known state during rtx generation. */
184 virtuals_instantiated = 0;
185 generating_concat_p = 1;
186 }
187
188 /* Clear out all parts of the state in F that can safely be discarded
189 after the function has been parsed, but not compiled, to let
190 garbage collection reclaim the memory. */
191
192 void
free_after_parsing(struct function * f)193 free_after_parsing (struct function *f)
194 {
195 f->language = 0;
196 }
197
198 /* Clear out all parts of the state in F that can safely be discarded
199 after the function has been compiled, to let garbage collection
200 reclaim the memory. */
201
202 void
free_after_compilation(struct function * f)203 free_after_compilation (struct function *f)
204 {
205 prologue_insn_hash = NULL;
206 epilogue_insn_hash = NULL;
207
208 free (crtl->emit.regno_pointer_align);
209
210 memset (crtl, 0, sizeof (struct rtl_data));
211 f->eh = NULL;
212 f->machine = NULL;
213 f->cfg = NULL;
214 f->curr_properties &= ~PROP_cfg;
215
216 regno_reg_rtx = NULL;
217 }
218
219 /* Return size needed for stack frame based on slots so far allocated.
220 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
221 the caller may have to do that. */
222
223 poly_int64
get_frame_size(void)224 get_frame_size (void)
225 {
226 if (FRAME_GROWS_DOWNWARD)
227 return -frame_offset;
228 else
229 return frame_offset;
230 }
231
232 /* Issue an error message and return TRUE if frame OFFSET overflows in
233 the signed target pointer arithmetics for function FUNC. Otherwise
234 return FALSE. */
235
236 bool
frame_offset_overflow(poly_int64 offset,tree func)237 frame_offset_overflow (poly_int64 offset, tree func)
238 {
239 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
240 unsigned HOST_WIDE_INT limit
241 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
242 /* Leave room for the fixed part of the frame. */
243 - 64 * UNITS_PER_WORD);
244
245 if (!coeffs_in_range_p (size, 0U, limit))
246 {
247 error_at (DECL_SOURCE_LOCATION (func),
248 "total size of local objects too large");
249 return true;
250 }
251
252 return false;
253 }
254
255 /* Return the minimum spill slot alignment for a register of mode MODE. */
256
257 unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)258 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
259 {
260 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
261 }
262
263 /* Return stack slot alignment in bits for TYPE and MODE. */
264
265 static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)266 get_stack_local_alignment (tree type, machine_mode mode)
267 {
268 unsigned int alignment;
269
270 if (mode == BLKmode)
271 alignment = BIGGEST_ALIGNMENT;
272 else
273 alignment = GET_MODE_ALIGNMENT (mode);
274
275 /* Allow the frond-end to (possibly) increase the alignment of this
276 stack slot. */
277 if (! type)
278 type = lang_hooks.types.type_for_mode (mode, 0);
279
280 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
281 }
282
283 /* Determine whether it is possible to fit a stack slot of size SIZE and
284 alignment ALIGNMENT into an area in the stack frame that starts at
285 frame offset START and has a length of LENGTH. If so, store the frame
286 offset to be used for the stack slot in *POFFSET and return true;
287 return false otherwise. This function will extend the frame size when
288 given a start/length pair that lies at the end of the frame. */
289
290 static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)291 try_fit_stack_local (poly_int64 start, poly_int64 length,
292 poly_int64 size, unsigned int alignment,
293 poly_int64_pod *poffset)
294 {
295 poly_int64 this_frame_offset;
296 int frame_off, frame_alignment, frame_phase;
297
298 /* Calculate how many bytes the start of local variables is off from
299 stack alignment. */
300 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
301 frame_off = targetm.starting_frame_offset () % frame_alignment;
302 frame_phase = frame_off ? frame_alignment - frame_off : 0;
303
304 /* Round the frame offset to the specified alignment. */
305
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
309 + frame_phase);
310 else
311 this_frame_offset
312 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
313
314 /* See if it fits. If this space is at the edge of the frame,
315 consider extending the frame to make it fit. Our caller relies on
316 this when allocating a new slot. */
317 if (maybe_lt (this_frame_offset, start))
318 {
319 if (known_eq (frame_offset, start))
320 frame_offset = this_frame_offset;
321 else
322 return false;
323 }
324 else if (maybe_gt (this_frame_offset + size, start + length))
325 {
326 if (known_eq (frame_offset, start + length))
327 frame_offset = this_frame_offset + size;
328 else
329 return false;
330 }
331
332 *poffset = this_frame_offset;
333 return true;
334 }
335
336 /* Create a new frame_space structure describing free space in the stack
337 frame beginning at START and ending at END, and chain it into the
338 function's frame_space_list. */
339
340 static void
add_frame_space(poly_int64 start,poly_int64 end)341 add_frame_space (poly_int64 start, poly_int64 end)
342 {
343 struct frame_space *space = ggc_alloc<frame_space> ();
344 space->next = crtl->frame_space_list;
345 crtl->frame_space_list = space;
346 space->start = start;
347 space->length = end - start;
348 }
349
350 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
351 with machine mode MODE.
352
353 ALIGN controls the amount of alignment for the address of the slot:
354 0 means according to MODE,
355 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
356 -2 means use BITS_PER_UNIT,
357 positive specifies alignment boundary in bits.
358
359 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
360 alignment and ASLK_RECORD_PAD bit set if we should remember
361 extra space we allocated for alignment purposes. When we are
362 called from assign_stack_temp_for_type, it is not set so we don't
363 track the same stack slot in two independent lists.
364
365 We do not round to stack_boundary here. */
366
367 rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)368 assign_stack_local_1 (machine_mode mode, poly_int64 size,
369 int align, int kind)
370 {
371 rtx x, addr;
372 poly_int64 bigend_correction = 0;
373 poly_int64 slot_offset = 0, old_frame_offset;
374 unsigned int alignment, alignment_in_bits;
375
376 if (align == 0)
377 {
378 alignment = get_stack_local_alignment (NULL, mode);
379 alignment /= BITS_PER_UNIT;
380 }
381 else if (align == -1)
382 {
383 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
384 size = aligned_upper_bound (size, alignment);
385 }
386 else if (align == -2)
387 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
388 else
389 alignment = align / BITS_PER_UNIT;
390
391 alignment_in_bits = alignment * BITS_PER_UNIT;
392
393 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
394 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
395 {
396 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
397 alignment = alignment_in_bits / BITS_PER_UNIT;
398 }
399
400 if (SUPPORTS_STACK_ALIGNMENT)
401 {
402 if (crtl->stack_alignment_estimated < alignment_in_bits)
403 {
404 if (!crtl->stack_realign_processed)
405 crtl->stack_alignment_estimated = alignment_in_bits;
406 else
407 {
408 /* If stack is realigned and stack alignment value
409 hasn't been finalized, it is OK not to increase
410 stack_alignment_estimated. The bigger alignment
411 requirement is recorded in stack_alignment_needed
412 below. */
413 gcc_assert (!crtl->stack_realign_finalized);
414 if (!crtl->stack_realign_needed)
415 {
416 /* It is OK to reduce the alignment as long as the
417 requested size is 0 or the estimated stack
418 alignment >= mode alignment. */
419 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
420 || known_eq (size, 0)
421 || (crtl->stack_alignment_estimated
422 >= GET_MODE_ALIGNMENT (mode)));
423 alignment_in_bits = crtl->stack_alignment_estimated;
424 alignment = alignment_in_bits / BITS_PER_UNIT;
425 }
426 }
427 }
428 }
429
430 if (crtl->stack_alignment_needed < alignment_in_bits)
431 crtl->stack_alignment_needed = alignment_in_bits;
432 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
433 crtl->max_used_stack_slot_alignment = alignment_in_bits;
434
435 if (mode != BLKmode || maybe_ne (size, 0))
436 {
437 if (kind & ASLK_RECORD_PAD)
438 {
439 struct frame_space **psp;
440
441 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
442 {
443 struct frame_space *space = *psp;
444 if (!try_fit_stack_local (space->start, space->length, size,
445 alignment, &slot_offset))
446 continue;
447 *psp = space->next;
448 if (known_gt (slot_offset, space->start))
449 add_frame_space (space->start, slot_offset);
450 if (known_lt (slot_offset + size, space->start + space->length))
451 add_frame_space (slot_offset + size,
452 space->start + space->length);
453 goto found_space;
454 }
455 }
456 }
457 else if (!STACK_ALIGNMENT_NEEDED)
458 {
459 slot_offset = frame_offset;
460 goto found_space;
461 }
462
463 old_frame_offset = frame_offset;
464
465 if (FRAME_GROWS_DOWNWARD)
466 {
467 frame_offset -= size;
468 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
469
470 if (kind & ASLK_RECORD_PAD)
471 {
472 if (known_gt (slot_offset, frame_offset))
473 add_frame_space (frame_offset, slot_offset);
474 if (known_lt (slot_offset + size, old_frame_offset))
475 add_frame_space (slot_offset + size, old_frame_offset);
476 }
477 }
478 else
479 {
480 frame_offset += size;
481 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
482
483 if (kind & ASLK_RECORD_PAD)
484 {
485 if (known_gt (slot_offset, old_frame_offset))
486 add_frame_space (old_frame_offset, slot_offset);
487 if (known_lt (slot_offset + size, frame_offset))
488 add_frame_space (slot_offset + size, frame_offset);
489 }
490 }
491
492 found_space:
493 /* On a big-endian machine, if we are allocating more space than we will use,
494 use the least significant bytes of those that are allocated. */
495 if (mode != BLKmode)
496 {
497 /* The slot size can sometimes be smaller than the mode size;
498 e.g. the rs6000 port allocates slots with a vector mode
499 that have the size of only one element. However, the slot
500 size must always be ordered wrt to the mode size, in the
501 same way as for a subreg. */
502 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
503 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
504 bigend_correction = size - GET_MODE_SIZE (mode);
505 }
506
507 /* If we have already instantiated virtual registers, return the actual
508 address relative to the frame pointer. */
509 if (virtuals_instantiated)
510 addr = plus_constant (Pmode, frame_pointer_rtx,
511 trunc_int_for_mode
512 (slot_offset + bigend_correction
513 + targetm.starting_frame_offset (), Pmode));
514 else
515 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
516 trunc_int_for_mode
517 (slot_offset + bigend_correction,
518 Pmode));
519
520 x = gen_rtx_MEM (mode, addr);
521 set_mem_align (x, alignment_in_bits);
522 MEM_NOTRAP_P (x) = 1;
523
524 vec_safe_push (stack_slot_list, x);
525
526 if (frame_offset_overflow (frame_offset, current_function_decl))
527 frame_offset = 0;
528
529 return x;
530 }
531
532 /* Wrap up assign_stack_local_1 with last parameter as false. */
533
534 rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)535 assign_stack_local (machine_mode mode, poly_int64 size, int align)
536 {
537 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
538 }
539
540 /* In order to evaluate some expressions, such as function calls returning
541 structures in memory, we need to temporarily allocate stack locations.
542 We record each allocated temporary in the following structure.
543
544 Associated with each temporary slot is a nesting level. When we pop up
545 one level, all temporaries associated with the previous level are freed.
546 Normally, all temporaries are freed after the execution of the statement
547 in which they were created. However, if we are inside a ({...}) grouping,
548 the result may be in a temporary and hence must be preserved. If the
549 result could be in a temporary, we preserve it if we can determine which
550 one it is in. If we cannot determine which temporary may contain the
551 result, all temporaries are preserved. A temporary is preserved by
552 pretending it was allocated at the previous nesting level. */
553
554 struct GTY(()) temp_slot {
555 /* Points to next temporary slot. */
556 struct temp_slot *next;
557 /* Points to previous temporary slot. */
558 struct temp_slot *prev;
559 /* The rtx to used to reference the slot. */
560 rtx slot;
561 /* The size, in units, of the slot. */
562 poly_int64 size;
563 /* The type of the object in the slot, or zero if it doesn't correspond
564 to a type. We use this to determine whether a slot can be reused.
565 It can be reused if objects of the type of the new slot will always
566 conflict with objects of the type of the old slot. */
567 tree type;
568 /* The alignment (in bits) of the slot. */
569 unsigned int align;
570 /* Nonzero if this temporary is currently in use. */
571 char in_use;
572 /* Nesting level at which this slot is being used. */
573 int level;
574 /* The offset of the slot from the frame_pointer, including extra space
575 for alignment. This info is for combine_temp_slots. */
576 poly_int64 base_offset;
577 /* The size of the slot, including extra space for alignment. This
578 info is for combine_temp_slots. */
579 poly_int64 full_size;
580 };
581
582 /* Entry for the below hash table. */
583 struct GTY((for_user)) temp_slot_address_entry {
584 hashval_t hash;
585 rtx address;
586 struct temp_slot *temp_slot;
587 };
588
589 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
590 {
591 static hashval_t hash (temp_slot_address_entry *);
592 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
593 };
594
595 /* A table of addresses that represent a stack slot. The table is a mapping
596 from address RTXen to a temp slot. */
597 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
598 static size_t n_temp_slots_in_use;
599
600 /* Removes temporary slot TEMP from LIST. */
601
602 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)603 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
604 {
605 if (temp->next)
606 temp->next->prev = temp->prev;
607 if (temp->prev)
608 temp->prev->next = temp->next;
609 else
610 *list = temp->next;
611
612 temp->prev = temp->next = NULL;
613 }
614
615 /* Inserts temporary slot TEMP to LIST. */
616
617 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)618 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
619 {
620 temp->next = *list;
621 if (*list)
622 (*list)->prev = temp;
623 temp->prev = NULL;
624 *list = temp;
625 }
626
627 /* Returns the list of used temp slots at LEVEL. */
628
629 static struct temp_slot **
temp_slots_at_level(int level)630 temp_slots_at_level (int level)
631 {
632 if (level >= (int) vec_safe_length (used_temp_slots))
633 vec_safe_grow_cleared (used_temp_slots, level + 1);
634
635 return &(*used_temp_slots)[level];
636 }
637
638 /* Returns the maximal temporary slot level. */
639
640 static int
max_slot_level(void)641 max_slot_level (void)
642 {
643 if (!used_temp_slots)
644 return -1;
645
646 return used_temp_slots->length () - 1;
647 }
648
649 /* Moves temporary slot TEMP to LEVEL. */
650
651 static void
move_slot_to_level(struct temp_slot * temp,int level)652 move_slot_to_level (struct temp_slot *temp, int level)
653 {
654 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
655 insert_slot_to_list (temp, temp_slots_at_level (level));
656 temp->level = level;
657 }
658
659 /* Make temporary slot TEMP available. */
660
661 static void
make_slot_available(struct temp_slot * temp)662 make_slot_available (struct temp_slot *temp)
663 {
664 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
665 insert_slot_to_list (temp, &avail_temp_slots);
666 temp->in_use = 0;
667 temp->level = -1;
668 n_temp_slots_in_use--;
669 }
670
671 /* Compute the hash value for an address -> temp slot mapping.
672 The value is cached on the mapping entry. */
673 static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)674 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
675 {
676 int do_not_record = 0;
677 return hash_rtx (t->address, GET_MODE (t->address),
678 &do_not_record, NULL, false);
679 }
680
681 /* Return the hash value for an address -> temp slot mapping. */
682 hashval_t
hash(temp_slot_address_entry * t)683 temp_address_hasher::hash (temp_slot_address_entry *t)
684 {
685 return t->hash;
686 }
687
688 /* Compare two address -> temp slot mapping entries. */
689 bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)690 temp_address_hasher::equal (temp_slot_address_entry *t1,
691 temp_slot_address_entry *t2)
692 {
693 return exp_equiv_p (t1->address, t2->address, 0, true);
694 }
695
696 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
697 static void
insert_temp_slot_address(rtx address,struct temp_slot * temp_slot)698 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
699 {
700 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
701 t->address = address;
702 t->temp_slot = temp_slot;
703 t->hash = temp_slot_address_compute_hash (t);
704 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
705 }
706
707 /* Remove an address -> temp slot mapping entry if the temp slot is
708 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
709 int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)710 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
711 {
712 const struct temp_slot_address_entry *t = *slot;
713 if (! t->temp_slot->in_use)
714 temp_slot_address_table->clear_slot (slot);
715 return 1;
716 }
717
718 /* Remove all mappings of addresses to unused temp slots. */
719 static void
remove_unused_temp_slot_addresses(void)720 remove_unused_temp_slot_addresses (void)
721 {
722 /* Use quicker clearing if there aren't any active temp slots. */
723 if (n_temp_slots_in_use)
724 temp_slot_address_table->traverse
725 <void *, remove_unused_temp_slot_addresses_1> (NULL);
726 else
727 temp_slot_address_table->empty ();
728 }
729
730 /* Find the temp slot corresponding to the object at address X. */
731
732 static struct temp_slot *
find_temp_slot_from_address(rtx x)733 find_temp_slot_from_address (rtx x)
734 {
735 struct temp_slot *p;
736 struct temp_slot_address_entry tmp, *t;
737
738 /* First try the easy way:
739 See if X exists in the address -> temp slot mapping. */
740 tmp.address = x;
741 tmp.temp_slot = NULL;
742 tmp.hash = temp_slot_address_compute_hash (&tmp);
743 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
744 if (t)
745 return t->temp_slot;
746
747 /* If we have a sum involving a register, see if it points to a temp
748 slot. */
749 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
750 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
751 return p;
752 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
753 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
754 return p;
755
756 /* Last resort: Address is a virtual stack var address. */
757 poly_int64 offset;
758 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
759 {
760 int i;
761 for (i = max_slot_level (); i >= 0; i--)
762 for (p = *temp_slots_at_level (i); p; p = p->next)
763 if (known_in_range_p (offset, p->base_offset, p->full_size))
764 return p;
765 }
766
767 return NULL;
768 }
769
770 /* Allocate a temporary stack slot and record it for possible later
771 reuse.
772
773 MODE is the machine mode to be given to the returned rtx.
774
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
777
778 TYPE is the type that will be used for the stack slot. */
779
780 rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)781 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
782 {
783 unsigned int align;
784 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
785 rtx slot;
786
787 gcc_assert (known_size_p (size));
788
789 align = get_stack_local_alignment (type, mode);
790
791 /* Try to find an available, already-allocated temporary of the proper
792 mode which meets the size and alignment requirements. Choose the
793 smallest one with the closest alignment.
794
795 If assign_stack_temp is called outside of the tree->rtl expansion,
796 we cannot reuse the stack slots (that may still refer to
797 VIRTUAL_STACK_VARS_REGNUM). */
798 if (!virtuals_instantiated)
799 {
800 for (p = avail_temp_slots; p; p = p->next)
801 {
802 if (p->align >= align
803 && known_ge (p->size, size)
804 && GET_MODE (p->slot) == mode
805 && objects_must_conflict_p (p->type, type)
806 && (best_p == 0
807 || (known_eq (best_p->size, p->size)
808 ? best_p->align > p->align
809 : known_ge (best_p->size, p->size))))
810 {
811 if (p->align == align && known_eq (p->size, size))
812 {
813 selected = p;
814 cut_slot_from_list (selected, &avail_temp_slots);
815 best_p = 0;
816 break;
817 }
818 best_p = p;
819 }
820 }
821 }
822
823 /* Make our best, if any, the one to use. */
824 if (best_p)
825 {
826 selected = best_p;
827 cut_slot_from_list (selected, &avail_temp_slots);
828
829 /* If there are enough aligned bytes left over, make them into a new
830 temp_slot so that the extra bytes don't get wasted. Do this only
831 for BLKmode slots, so that we can be sure of the alignment. */
832 if (GET_MODE (best_p->slot) == BLKmode)
833 {
834 int alignment = best_p->align / BITS_PER_UNIT;
835 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
836
837 if (known_ge (best_p->size - rounded_size, alignment))
838 {
839 p = ggc_alloc<temp_slot> ();
840 p->in_use = 0;
841 p->size = best_p->size - rounded_size;
842 p->base_offset = best_p->base_offset + rounded_size;
843 p->full_size = best_p->full_size - rounded_size;
844 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
845 p->align = best_p->align;
846 p->type = best_p->type;
847 insert_slot_to_list (p, &avail_temp_slots);
848
849 vec_safe_push (stack_slot_list, p->slot);
850
851 best_p->size = rounded_size;
852 best_p->full_size = rounded_size;
853 }
854 }
855 }
856
857 /* If we still didn't find one, make a new temporary. */
858 if (selected == 0)
859 {
860 poly_int64 frame_offset_old = frame_offset;
861
862 p = ggc_alloc<temp_slot> ();
863
864 /* We are passing an explicit alignment request to assign_stack_local.
865 One side effect of that is assign_stack_local will not round SIZE
866 to ensure the frame offset remains suitably aligned.
867
868 So for requests which depended on the rounding of SIZE, we go ahead
869 and round it now. We also make sure ALIGNMENT is at least
870 BIGGEST_ALIGNMENT. */
871 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
872 p->slot = assign_stack_local_1 (mode,
873 (mode == BLKmode
874 ? aligned_upper_bound (size,
875 (int) align
876 / BITS_PER_UNIT)
877 : size),
878 align, 0);
879
880 p->align = align;
881
882 /* The following slot size computation is necessary because we don't
883 know the actual size of the temporary slot until assign_stack_local
884 has performed all the frame alignment and size rounding for the
885 requested temporary. Note that extra space added for alignment
886 can be either above or below this stack slot depending on which
887 way the frame grows. We include the extra space if and only if it
888 is above this slot. */
889 if (FRAME_GROWS_DOWNWARD)
890 p->size = frame_offset_old - frame_offset;
891 else
892 p->size = size;
893
894 /* Now define the fields used by combine_temp_slots. */
895 if (FRAME_GROWS_DOWNWARD)
896 {
897 p->base_offset = frame_offset;
898 p->full_size = frame_offset_old - frame_offset;
899 }
900 else
901 {
902 p->base_offset = frame_offset_old;
903 p->full_size = frame_offset - frame_offset_old;
904 }
905
906 selected = p;
907 }
908
909 p = selected;
910 p->in_use = 1;
911 p->type = type;
912 p->level = temp_slot_level;
913 n_temp_slots_in_use++;
914
915 pp = temp_slots_at_level (p->level);
916 insert_slot_to_list (p, pp);
917 insert_temp_slot_address (XEXP (p->slot, 0), p);
918
919 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
920 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
921 vec_safe_push (stack_slot_list, slot);
922
923 /* If we know the alias set for the memory that will be used, use
924 it. If there's no TYPE, then we don't know anything about the
925 alias set for the memory. */
926 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
927 set_mem_align (slot, align);
928
929 /* If a type is specified, set the relevant flags. */
930 if (type != 0)
931 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
932 MEM_NOTRAP_P (slot) = 1;
933
934 return slot;
935 }
936
937 /* Allocate a temporary stack slot and record it for possible later
938 reuse. First two arguments are same as in preceding function. */
939
940 rtx
assign_stack_temp(machine_mode mode,poly_int64 size)941 assign_stack_temp (machine_mode mode, poly_int64 size)
942 {
943 return assign_stack_temp_for_type (mode, size, NULL_TREE);
944 }
945
946 /* Assign a temporary.
947 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
948 and so that should be used in error messages. In either case, we
949 allocate of the given type.
950 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
951 it is 0 if a register is OK.
952 DONT_PROMOTE is 1 if we should not promote values in register
953 to wider modes. */
954
955 rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)956 assign_temp (tree type_or_decl, int memory_required,
957 int dont_promote ATTRIBUTE_UNUSED)
958 {
959 tree type, decl;
960 machine_mode mode;
961 #ifdef PROMOTE_MODE
962 int unsignedp;
963 #endif
964
965 if (DECL_P (type_or_decl))
966 decl = type_or_decl, type = TREE_TYPE (decl);
967 else
968 decl = NULL, type = type_or_decl;
969
970 mode = TYPE_MODE (type);
971 #ifdef PROMOTE_MODE
972 unsignedp = TYPE_UNSIGNED (type);
973 #endif
974
975 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
976 end. See also create_tmp_var for the gimplification-time check. */
977 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
978
979 if (mode == BLKmode || memory_required)
980 {
981 HOST_WIDE_INT size = int_size_in_bytes (type);
982 rtx tmp;
983
984 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
985 problems with allocating the stack space. */
986 if (size == 0)
987 size = 1;
988
989 /* Unfortunately, we don't yet know how to allocate variable-sized
990 temporaries. However, sometimes we can find a fixed upper limit on
991 the size, so try that instead. */
992 else if (size == -1)
993 size = max_int_size_in_bytes (type);
994
995 /* The size of the temporary may be too large to fit into an integer. */
996 /* ??? Not sure this should happen except for user silliness, so limit
997 this to things that aren't compiler-generated temporaries. The
998 rest of the time we'll die in assign_stack_temp_for_type. */
999 if (decl && size == -1
1000 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1001 {
1002 error ("size of variable %q+D is too large", decl);
1003 size = 1;
1004 }
1005
1006 tmp = assign_stack_temp_for_type (mode, size, type);
1007 return tmp;
1008 }
1009
1010 #ifdef PROMOTE_MODE
1011 if (! dont_promote)
1012 mode = promote_mode (type, mode, &unsignedp);
1013 #endif
1014
1015 return gen_reg_rtx (mode);
1016 }
1017
1018 /* Combine temporary stack slots which are adjacent on the stack.
1019
1020 This allows for better use of already allocated stack space. This is only
1021 done for BLKmode slots because we can be sure that we won't have alignment
1022 problems in this case. */
1023
1024 static void
combine_temp_slots(void)1025 combine_temp_slots (void)
1026 {
1027 struct temp_slot *p, *q, *next, *next_q;
1028 int num_slots;
1029
1030 /* We can't combine slots, because the information about which slot
1031 is in which alias set will be lost. */
1032 if (flag_strict_aliasing)
1033 return;
1034
1035 /* If there are a lot of temp slots, don't do anything unless
1036 high levels of optimization. */
1037 if (! flag_expensive_optimizations)
1038 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1039 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1040 return;
1041
1042 for (p = avail_temp_slots; p; p = next)
1043 {
1044 int delete_p = 0;
1045
1046 next = p->next;
1047
1048 if (GET_MODE (p->slot) != BLKmode)
1049 continue;
1050
1051 for (q = p->next; q; q = next_q)
1052 {
1053 int delete_q = 0;
1054
1055 next_q = q->next;
1056
1057 if (GET_MODE (q->slot) != BLKmode)
1058 continue;
1059
1060 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1061 {
1062 /* Q comes after P; combine Q into P. */
1063 p->size += q->size;
1064 p->full_size += q->full_size;
1065 delete_q = 1;
1066 }
1067 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1068 {
1069 /* P comes after Q; combine P into Q. */
1070 q->size += p->size;
1071 q->full_size += p->full_size;
1072 delete_p = 1;
1073 break;
1074 }
1075 if (delete_q)
1076 cut_slot_from_list (q, &avail_temp_slots);
1077 }
1078
1079 /* Either delete P or advance past it. */
1080 if (delete_p)
1081 cut_slot_from_list (p, &avail_temp_slots);
1082 }
1083 }
1084
1085 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1086 slot that previously was known by OLD_RTX. */
1087
1088 void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)1089 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1090 {
1091 struct temp_slot *p;
1092
1093 if (rtx_equal_p (old_rtx, new_rtx))
1094 return;
1095
1096 p = find_temp_slot_from_address (old_rtx);
1097
1098 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1099 NEW_RTX is a register, see if one operand of the PLUS is a
1100 temporary location. If so, NEW_RTX points into it. Otherwise,
1101 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1102 in common between them. If so, try a recursive call on those
1103 values. */
1104 if (p == 0)
1105 {
1106 if (GET_CODE (old_rtx) != PLUS)
1107 return;
1108
1109 if (REG_P (new_rtx))
1110 {
1111 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1112 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1113 return;
1114 }
1115 else if (GET_CODE (new_rtx) != PLUS)
1116 return;
1117
1118 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1119 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1120 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1121 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1122 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1123 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1124 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1125 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1126
1127 return;
1128 }
1129
1130 /* Otherwise add an alias for the temp's address. */
1131 insert_temp_slot_address (new_rtx, p);
1132 }
1133
1134 /* If X could be a reference to a temporary slot, mark that slot as
1135 belonging to the to one level higher than the current level. If X
1136 matched one of our slots, just mark that one. Otherwise, we can't
1137 easily predict which it is, so upgrade all of them.
1138
1139 This is called when an ({...}) construct occurs and a statement
1140 returns a value in memory. */
1141
1142 void
preserve_temp_slots(rtx x)1143 preserve_temp_slots (rtx x)
1144 {
1145 struct temp_slot *p = 0, *next;
1146
1147 if (x == 0)
1148 return;
1149
1150 /* If X is a register that is being used as a pointer, see if we have
1151 a temporary slot we know it points to. */
1152 if (REG_P (x) && REG_POINTER (x))
1153 p = find_temp_slot_from_address (x);
1154
1155 /* If X is not in memory or is at a constant address, it cannot be in
1156 a temporary slot. */
1157 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1158 return;
1159
1160 /* First see if we can find a match. */
1161 if (p == 0)
1162 p = find_temp_slot_from_address (XEXP (x, 0));
1163
1164 if (p != 0)
1165 {
1166 if (p->level == temp_slot_level)
1167 move_slot_to_level (p, temp_slot_level - 1);
1168 return;
1169 }
1170
1171 /* Otherwise, preserve all non-kept slots at this level. */
1172 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173 {
1174 next = p->next;
1175 move_slot_to_level (p, temp_slot_level - 1);
1176 }
1177 }
1178
1179 /* Free all temporaries used so far. This is normally called at the
1180 end of generating code for a statement. */
1181
1182 void
free_temp_slots(void)1183 free_temp_slots (void)
1184 {
1185 struct temp_slot *p, *next;
1186 bool some_available = false;
1187
1188 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1189 {
1190 next = p->next;
1191 make_slot_available (p);
1192 some_available = true;
1193 }
1194
1195 if (some_available)
1196 {
1197 remove_unused_temp_slot_addresses ();
1198 combine_temp_slots ();
1199 }
1200 }
1201
1202 /* Push deeper into the nesting level for stack temporaries. */
1203
1204 void
push_temp_slots(void)1205 push_temp_slots (void)
1206 {
1207 temp_slot_level++;
1208 }
1209
1210 /* Pop a temporary nesting level. All slots in use in the current level
1211 are freed. */
1212
1213 void
pop_temp_slots(void)1214 pop_temp_slots (void)
1215 {
1216 free_temp_slots ();
1217 temp_slot_level--;
1218 }
1219
1220 /* Initialize temporary slots. */
1221
1222 void
init_temp_slots(void)1223 init_temp_slots (void)
1224 {
1225 /* We have not allocated any temporaries yet. */
1226 avail_temp_slots = 0;
1227 vec_alloc (used_temp_slots, 0);
1228 temp_slot_level = 0;
1229 n_temp_slots_in_use = 0;
1230
1231 /* Set up the table to map addresses to temp slots. */
1232 if (! temp_slot_address_table)
1233 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1234 else
1235 temp_slot_address_table->empty ();
1236 }
1237
1238 /* Functions and data structures to keep track of the values hard regs
1239 had at the start of the function. */
1240
1241 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1242 and has_hard_reg_initial_val.. */
1243 struct GTY(()) initial_value_pair {
1244 rtx hard_reg;
1245 rtx pseudo;
1246 };
1247 /* ??? This could be a VEC but there is currently no way to define an
1248 opaque VEC type. This could be worked around by defining struct
1249 initial_value_pair in function.h. */
1250 struct GTY(()) initial_value_struct {
1251 int num_entries;
1252 int max_entries;
1253 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1254 };
1255
1256 /* If a pseudo represents an initial hard reg (or expression), return
1257 it, else return NULL_RTX. */
1258
1259 rtx
get_hard_reg_initial_reg(rtx reg)1260 get_hard_reg_initial_reg (rtx reg)
1261 {
1262 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1263 int i;
1264
1265 if (ivs == 0)
1266 return NULL_RTX;
1267
1268 for (i = 0; i < ivs->num_entries; i++)
1269 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1270 return ivs->entries[i].hard_reg;
1271
1272 return NULL_RTX;
1273 }
1274
1275 /* Make sure that there's a pseudo register of mode MODE that stores the
1276 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1277
1278 rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)1279 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1280 {
1281 struct initial_value_struct *ivs;
1282 rtx rv;
1283
1284 rv = has_hard_reg_initial_val (mode, regno);
1285 if (rv)
1286 return rv;
1287
1288 ivs = crtl->hard_reg_initial_vals;
1289 if (ivs == 0)
1290 {
1291 ivs = ggc_alloc<initial_value_struct> ();
1292 ivs->num_entries = 0;
1293 ivs->max_entries = 5;
1294 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1295 crtl->hard_reg_initial_vals = ivs;
1296 }
1297
1298 if (ivs->num_entries >= ivs->max_entries)
1299 {
1300 ivs->max_entries += 5;
1301 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1302 ivs->max_entries);
1303 }
1304
1305 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1306 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1307
1308 return ivs->entries[ivs->num_entries++].pseudo;
1309 }
1310
1311 /* See if get_hard_reg_initial_val has been used to create a pseudo
1312 for the initial value of hard register REGNO in mode MODE. Return
1313 the associated pseudo if so, otherwise return NULL. */
1314
1315 rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)1316 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1317 {
1318 struct initial_value_struct *ivs;
1319 int i;
1320
1321 ivs = crtl->hard_reg_initial_vals;
1322 if (ivs != 0)
1323 for (i = 0; i < ivs->num_entries; i++)
1324 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1325 && REGNO (ivs->entries[i].hard_reg) == regno)
1326 return ivs->entries[i].pseudo;
1327
1328 return NULL_RTX;
1329 }
1330
1331 unsigned int
emit_initial_value_sets(void)1332 emit_initial_value_sets (void)
1333 {
1334 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1335 int i;
1336 rtx_insn *seq;
1337
1338 if (ivs == 0)
1339 return 0;
1340
1341 start_sequence ();
1342 for (i = 0; i < ivs->num_entries; i++)
1343 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1344 seq = get_insns ();
1345 end_sequence ();
1346
1347 emit_insn_at_entry (seq);
1348 return 0;
1349 }
1350
1351 /* Return the hardreg-pseudoreg initial values pair entry I and
1352 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1353 bool
initial_value_entry(int i,rtx * hreg,rtx * preg)1354 initial_value_entry (int i, rtx *hreg, rtx *preg)
1355 {
1356 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1357 if (!ivs || i >= ivs->num_entries)
1358 return false;
1359
1360 *hreg = ivs->entries[i].hard_reg;
1361 *preg = ivs->entries[i].pseudo;
1362 return true;
1363 }
1364
1365 /* These routines are responsible for converting virtual register references
1366 to the actual hard register references once RTL generation is complete.
1367
1368 The following four variables are used for communication between the
1369 routines. They contain the offsets of the virtual registers from their
1370 respective hard registers. */
1371
1372 static poly_int64 in_arg_offset;
1373 static poly_int64 var_offset;
1374 static poly_int64 dynamic_offset;
1375 static poly_int64 out_arg_offset;
1376 static poly_int64 cfa_offset;
1377
1378 /* In most machines, the stack pointer register is equivalent to the bottom
1379 of the stack. */
1380
1381 #ifndef STACK_POINTER_OFFSET
1382 #define STACK_POINTER_OFFSET 0
1383 #endif
1384
1385 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1386 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1387 #endif
1388
1389 /* If not defined, pick an appropriate default for the offset of dynamically
1390 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1391 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1392
1393 #ifndef STACK_DYNAMIC_OFFSET
1394
1395 /* The bottom of the stack points to the actual arguments. If
1396 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1397 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1398 stack space for register parameters is not pushed by the caller, but
1399 rather part of the fixed stack areas and hence not included in
1400 `crtl->outgoing_args_size'. Nevertheless, we must allow
1401 for it when allocating stack dynamic objects. */
1402
1403 #ifdef INCOMING_REG_PARM_STACK_SPACE
1404 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1405 ((ACCUMULATE_OUTGOING_ARGS \
1406 ? (crtl->outgoing_args_size \
1407 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1408 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1409 : 0) + (STACK_POINTER_OFFSET))
1410 #else
1411 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1412 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1413 + (STACK_POINTER_OFFSET))
1414 #endif
1415 #endif
1416
1417
1418 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1419 is a virtual register, return the equivalent hard register and set the
1420 offset indirectly through the pointer. Otherwise, return 0. */
1421
1422 static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)1423 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1424 {
1425 rtx new_rtx;
1426 poly_int64 offset;
1427
1428 if (x == virtual_incoming_args_rtx)
1429 {
1430 if (stack_realign_drap)
1431 {
1432 /* Replace virtual_incoming_args_rtx with internal arg
1433 pointer if DRAP is used to realign stack. */
1434 new_rtx = crtl->args.internal_arg_pointer;
1435 offset = 0;
1436 }
1437 else
1438 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1439 }
1440 else if (x == virtual_stack_vars_rtx)
1441 new_rtx = frame_pointer_rtx, offset = var_offset;
1442 else if (x == virtual_stack_dynamic_rtx)
1443 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1444 else if (x == virtual_outgoing_args_rtx)
1445 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1446 else if (x == virtual_cfa_rtx)
1447 {
1448 #ifdef FRAME_POINTER_CFA_OFFSET
1449 new_rtx = frame_pointer_rtx;
1450 #else
1451 new_rtx = arg_pointer_rtx;
1452 #endif
1453 offset = cfa_offset;
1454 }
1455 else if (x == virtual_preferred_stack_boundary_rtx)
1456 {
1457 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1458 offset = 0;
1459 }
1460 else
1461 return NULL_RTX;
1462
1463 *poffset = offset;
1464 return new_rtx;
1465 }
1466
1467 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1468 registers present inside of *LOC. The expression is simplified,
1469 as much as possible, but is not to be considered "valid" in any sense
1470 implied by the target. Return true if any change is made. */
1471
1472 static bool
instantiate_virtual_regs_in_rtx(rtx * loc)1473 instantiate_virtual_regs_in_rtx (rtx *loc)
1474 {
1475 if (!*loc)
1476 return false;
1477 bool changed = false;
1478 subrtx_ptr_iterator::array_type array;
1479 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1480 {
1481 rtx *loc = *iter;
1482 if (rtx x = *loc)
1483 {
1484 rtx new_rtx;
1485 poly_int64 offset;
1486 switch (GET_CODE (x))
1487 {
1488 case REG:
1489 new_rtx = instantiate_new_reg (x, &offset);
1490 if (new_rtx)
1491 {
1492 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1493 changed = true;
1494 }
1495 iter.skip_subrtxes ();
1496 break;
1497
1498 case PLUS:
1499 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1500 if (new_rtx)
1501 {
1502 XEXP (x, 0) = new_rtx;
1503 *loc = plus_constant (GET_MODE (x), x, offset, true);
1504 changed = true;
1505 iter.skip_subrtxes ();
1506 break;
1507 }
1508
1509 /* FIXME -- from old code */
1510 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1511 we can commute the PLUS and SUBREG because pointers into the
1512 frame are well-behaved. */
1513 break;
1514
1515 default:
1516 break;
1517 }
1518 }
1519 }
1520 return changed;
1521 }
1522
1523 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1524 matches the predicate for insn CODE operand OPERAND. */
1525
1526 static int
safe_insn_predicate(int code,int operand,rtx x)1527 safe_insn_predicate (int code, int operand, rtx x)
1528 {
1529 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1530 }
1531
1532 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1533 registers present inside of insn. The result will be a valid insn. */
1534
1535 static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)1536 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1537 {
1538 poly_int64 offset;
1539 int insn_code, i;
1540 bool any_change = false;
1541 rtx set, new_rtx, x;
1542 rtx_insn *seq;
1543
1544 /* There are some special cases to be handled first. */
1545 set = single_set (insn);
1546 if (set)
1547 {
1548 /* We're allowed to assign to a virtual register. This is interpreted
1549 to mean that the underlying register gets assigned the inverse
1550 transformation. This is used, for example, in the handling of
1551 non-local gotos. */
1552 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1553 if (new_rtx)
1554 {
1555 start_sequence ();
1556
1557 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1558 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1559 gen_int_mode (-offset, GET_MODE (new_rtx)));
1560 x = force_operand (x, new_rtx);
1561 if (x != new_rtx)
1562 emit_move_insn (new_rtx, x);
1563
1564 seq = get_insns ();
1565 end_sequence ();
1566
1567 emit_insn_before (seq, insn);
1568 delete_insn (insn);
1569 return;
1570 }
1571
1572 /* Handle a straight copy from a virtual register by generating a
1573 new add insn. The difference between this and falling through
1574 to the generic case is avoiding a new pseudo and eliminating a
1575 move insn in the initial rtl stream. */
1576 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1577 if (new_rtx
1578 && maybe_ne (offset, 0)
1579 && REG_P (SET_DEST (set))
1580 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1581 {
1582 start_sequence ();
1583
1584 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1585 gen_int_mode (offset,
1586 GET_MODE (SET_DEST (set))),
1587 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1588 if (x != SET_DEST (set))
1589 emit_move_insn (SET_DEST (set), x);
1590
1591 seq = get_insns ();
1592 end_sequence ();
1593
1594 emit_insn_before (seq, insn);
1595 delete_insn (insn);
1596 return;
1597 }
1598
1599 extract_insn (insn);
1600 insn_code = INSN_CODE (insn);
1601
1602 /* Handle a plus involving a virtual register by determining if the
1603 operands remain valid if they're modified in place. */
1604 poly_int64 delta;
1605 if (GET_CODE (SET_SRC (set)) == PLUS
1606 && recog_data.n_operands >= 3
1607 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1608 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1609 && poly_int_rtx_p (recog_data.operand[2], &delta)
1610 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1611 {
1612 offset += delta;
1613
1614 /* If the sum is zero, then replace with a plain move. */
1615 if (known_eq (offset, 0)
1616 && REG_P (SET_DEST (set))
1617 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1618 {
1619 start_sequence ();
1620 emit_move_insn (SET_DEST (set), new_rtx);
1621 seq = get_insns ();
1622 end_sequence ();
1623
1624 emit_insn_before (seq, insn);
1625 delete_insn (insn);
1626 return;
1627 }
1628
1629 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1630
1631 /* Using validate_change and apply_change_group here leaves
1632 recog_data in an invalid state. Since we know exactly what
1633 we want to check, do those two by hand. */
1634 if (safe_insn_predicate (insn_code, 1, new_rtx)
1635 && safe_insn_predicate (insn_code, 2, x))
1636 {
1637 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1638 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1639 any_change = true;
1640
1641 /* Fall through into the regular operand fixup loop in
1642 order to take care of operands other than 1 and 2. */
1643 }
1644 }
1645 }
1646 else
1647 {
1648 extract_insn (insn);
1649 insn_code = INSN_CODE (insn);
1650 }
1651
1652 /* In the general case, we expect virtual registers to appear only in
1653 operands, and then only as either bare registers or inside memories. */
1654 for (i = 0; i < recog_data.n_operands; ++i)
1655 {
1656 x = recog_data.operand[i];
1657 switch (GET_CODE (x))
1658 {
1659 case MEM:
1660 {
1661 rtx addr = XEXP (x, 0);
1662
1663 if (!instantiate_virtual_regs_in_rtx (&addr))
1664 continue;
1665
1666 start_sequence ();
1667 x = replace_equiv_address (x, addr, true);
1668 /* It may happen that the address with the virtual reg
1669 was valid (e.g. based on the virtual stack reg, which might
1670 be acceptable to the predicates with all offsets), whereas
1671 the address now isn't anymore, for instance when the address
1672 is still offsetted, but the base reg isn't virtual-stack-reg
1673 anymore. Below we would do a force_reg on the whole operand,
1674 but this insn might actually only accept memory. Hence,
1675 before doing that last resort, try to reload the address into
1676 a register, so this operand stays a MEM. */
1677 if (!safe_insn_predicate (insn_code, i, x))
1678 {
1679 addr = force_reg (GET_MODE (addr), addr);
1680 x = replace_equiv_address (x, addr, true);
1681 }
1682 seq = get_insns ();
1683 end_sequence ();
1684 if (seq)
1685 emit_insn_before (seq, insn);
1686 }
1687 break;
1688
1689 case REG:
1690 new_rtx = instantiate_new_reg (x, &offset);
1691 if (new_rtx == NULL)
1692 continue;
1693 if (known_eq (offset, 0))
1694 x = new_rtx;
1695 else
1696 {
1697 start_sequence ();
1698
1699 /* Careful, special mode predicates may have stuff in
1700 insn_data[insn_code].operand[i].mode that isn't useful
1701 to us for computing a new value. */
1702 /* ??? Recognize address_operand and/or "p" constraints
1703 to see if (plus new offset) is a valid before we put
1704 this through expand_simple_binop. */
1705 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1706 gen_int_mode (offset, GET_MODE (x)),
1707 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1708 seq = get_insns ();
1709 end_sequence ();
1710 emit_insn_before (seq, insn);
1711 }
1712 break;
1713
1714 case SUBREG:
1715 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1716 if (new_rtx == NULL)
1717 continue;
1718 if (maybe_ne (offset, 0))
1719 {
1720 start_sequence ();
1721 new_rtx = expand_simple_binop
1722 (GET_MODE (new_rtx), PLUS, new_rtx,
1723 gen_int_mode (offset, GET_MODE (new_rtx)),
1724 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1725 seq = get_insns ();
1726 end_sequence ();
1727 emit_insn_before (seq, insn);
1728 }
1729 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1730 GET_MODE (new_rtx), SUBREG_BYTE (x));
1731 gcc_assert (x);
1732 break;
1733
1734 default:
1735 continue;
1736 }
1737
1738 /* At this point, X contains the new value for the operand.
1739 Validate the new value vs the insn predicate. Note that
1740 asm insns will have insn_code -1 here. */
1741 if (!safe_insn_predicate (insn_code, i, x))
1742 {
1743 start_sequence ();
1744 if (REG_P (x))
1745 {
1746 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1747 x = copy_to_reg (x);
1748 }
1749 else
1750 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1751 seq = get_insns ();
1752 end_sequence ();
1753 if (seq)
1754 emit_insn_before (seq, insn);
1755 }
1756
1757 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1758 any_change = true;
1759 }
1760
1761 if (any_change)
1762 {
1763 /* Propagate operand changes into the duplicates. */
1764 for (i = 0; i < recog_data.n_dups; ++i)
1765 *recog_data.dup_loc[i]
1766 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1767
1768 /* Force re-recognition of the instruction for validation. */
1769 INSN_CODE (insn) = -1;
1770 }
1771
1772 if (asm_noperands (PATTERN (insn)) >= 0)
1773 {
1774 if (!check_asm_operands (PATTERN (insn)))
1775 {
1776 error_for_asm (insn, "impossible constraint in %<asm%>");
1777 /* For asm goto, instead of fixing up all the edges
1778 just clear the template and clear input operands
1779 (asm goto doesn't have any output operands). */
1780 if (JUMP_P (insn))
1781 {
1782 rtx asm_op = extract_asm_operands (PATTERN (insn));
1783 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1784 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1785 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1786 }
1787 else
1788 delete_insn (insn);
1789 }
1790 }
1791 else
1792 {
1793 if (recog_memoized (insn) < 0)
1794 fatal_insn_not_found (insn);
1795 }
1796 }
1797
1798 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1799 do any instantiation required. */
1800
1801 void
instantiate_decl_rtl(rtx x)1802 instantiate_decl_rtl (rtx x)
1803 {
1804 rtx addr;
1805
1806 if (x == 0)
1807 return;
1808
1809 /* If this is a CONCAT, recurse for the pieces. */
1810 if (GET_CODE (x) == CONCAT)
1811 {
1812 instantiate_decl_rtl (XEXP (x, 0));
1813 instantiate_decl_rtl (XEXP (x, 1));
1814 return;
1815 }
1816
1817 /* If this is not a MEM, no need to do anything. Similarly if the
1818 address is a constant or a register that is not a virtual register. */
1819 if (!MEM_P (x))
1820 return;
1821
1822 addr = XEXP (x, 0);
1823 if (CONSTANT_P (addr)
1824 || (REG_P (addr)
1825 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1826 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1827 return;
1828
1829 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1830 }
1831
1832 /* Helper for instantiate_decls called via walk_tree: Process all decls
1833 in the given DECL_VALUE_EXPR. */
1834
1835 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1836 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1837 {
1838 tree t = *tp;
1839 if (! EXPR_P (t))
1840 {
1841 *walk_subtrees = 0;
1842 if (DECL_P (t))
1843 {
1844 if (DECL_RTL_SET_P (t))
1845 instantiate_decl_rtl (DECL_RTL (t));
1846 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1847 && DECL_INCOMING_RTL (t))
1848 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1849 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1850 && DECL_HAS_VALUE_EXPR_P (t))
1851 {
1852 tree v = DECL_VALUE_EXPR (t);
1853 walk_tree (&v, instantiate_expr, NULL, NULL);
1854 }
1855 }
1856 }
1857 return NULL;
1858 }
1859
1860 /* Subroutine of instantiate_decls: Process all decls in the given
1861 BLOCK node and all its subblocks. */
1862
1863 static void
instantiate_decls_1(tree let)1864 instantiate_decls_1 (tree let)
1865 {
1866 tree t;
1867
1868 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1869 {
1870 if (DECL_RTL_SET_P (t))
1871 instantiate_decl_rtl (DECL_RTL (t));
1872 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1873 {
1874 tree v = DECL_VALUE_EXPR (t);
1875 walk_tree (&v, instantiate_expr, NULL, NULL);
1876 }
1877 }
1878
1879 /* Process all subblocks. */
1880 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1881 instantiate_decls_1 (t);
1882 }
1883
1884 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1885 all virtual registers in their DECL_RTL's. */
1886
1887 static void
instantiate_decls(tree fndecl)1888 instantiate_decls (tree fndecl)
1889 {
1890 tree decl;
1891 unsigned ix;
1892
1893 /* Process all parameters of the function. */
1894 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1895 {
1896 instantiate_decl_rtl (DECL_RTL (decl));
1897 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1898 if (DECL_HAS_VALUE_EXPR_P (decl))
1899 {
1900 tree v = DECL_VALUE_EXPR (decl);
1901 walk_tree (&v, instantiate_expr, NULL, NULL);
1902 }
1903 }
1904
1905 if ((decl = DECL_RESULT (fndecl))
1906 && TREE_CODE (decl) == RESULT_DECL)
1907 {
1908 if (DECL_RTL_SET_P (decl))
1909 instantiate_decl_rtl (DECL_RTL (decl));
1910 if (DECL_HAS_VALUE_EXPR_P (decl))
1911 {
1912 tree v = DECL_VALUE_EXPR (decl);
1913 walk_tree (&v, instantiate_expr, NULL, NULL);
1914 }
1915 }
1916
1917 /* Process the saved static chain if it exists. */
1918 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1919 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1920 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1921
1922 /* Now process all variables defined in the function or its subblocks. */
1923 if (DECL_INITIAL (fndecl))
1924 instantiate_decls_1 (DECL_INITIAL (fndecl));
1925
1926 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1927 if (DECL_RTL_SET_P (decl))
1928 instantiate_decl_rtl (DECL_RTL (decl));
1929 vec_free (cfun->local_decls);
1930 }
1931
1932 /* Pass through the INSNS of function FNDECL and convert virtual register
1933 references to hard register references. */
1934
1935 static unsigned int
instantiate_virtual_regs(void)1936 instantiate_virtual_regs (void)
1937 {
1938 rtx_insn *insn;
1939
1940 /* Compute the offsets to use for this function. */
1941 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1942 var_offset = targetm.starting_frame_offset ();
1943 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1944 out_arg_offset = STACK_POINTER_OFFSET;
1945 #ifdef FRAME_POINTER_CFA_OFFSET
1946 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1947 #else
1948 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1949 #endif
1950
1951 /* Initialize recognition, indicating that volatile is OK. */
1952 init_recog ();
1953
1954 /* Scan through all the insns, instantiating every virtual register still
1955 present. */
1956 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1957 if (INSN_P (insn))
1958 {
1959 /* These patterns in the instruction stream can never be recognized.
1960 Fortunately, they shouldn't contain virtual registers either. */
1961 if (GET_CODE (PATTERN (insn)) == USE
1962 || GET_CODE (PATTERN (insn)) == CLOBBER
1963 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1964 || DEBUG_MARKER_INSN_P (insn))
1965 continue;
1966 else if (DEBUG_BIND_INSN_P (insn))
1967 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1968 else
1969 instantiate_virtual_regs_in_insn (insn);
1970
1971 if (insn->deleted ())
1972 continue;
1973
1974 instantiate_virtual_regs_in_rtx (®_NOTES (insn));
1975
1976 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1977 if (CALL_P (insn))
1978 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1979 }
1980
1981 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1982 instantiate_decls (current_function_decl);
1983
1984 targetm.instantiate_decls ();
1985
1986 /* Indicate that, from now on, assign_stack_local should use
1987 frame_pointer_rtx. */
1988 virtuals_instantiated = 1;
1989
1990 return 0;
1991 }
1992
1993 namespace {
1994
1995 const pass_data pass_data_instantiate_virtual_regs =
1996 {
1997 RTL_PASS, /* type */
1998 "vregs", /* name */
1999 OPTGROUP_NONE, /* optinfo_flags */
2000 TV_NONE, /* tv_id */
2001 0, /* properties_required */
2002 0, /* properties_provided */
2003 0, /* properties_destroyed */
2004 0, /* todo_flags_start */
2005 0, /* todo_flags_finish */
2006 };
2007
2008 class pass_instantiate_virtual_regs : public rtl_opt_pass
2009 {
2010 public:
pass_instantiate_virtual_regs(gcc::context * ctxt)2011 pass_instantiate_virtual_regs (gcc::context *ctxt)
2012 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2013 {}
2014
2015 /* opt_pass methods: */
execute(function *)2016 virtual unsigned int execute (function *)
2017 {
2018 return instantiate_virtual_regs ();
2019 }
2020
2021 }; // class pass_instantiate_virtual_regs
2022
2023 } // anon namespace
2024
2025 rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)2026 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2027 {
2028 return new pass_instantiate_virtual_regs (ctxt);
2029 }
2030
2031
2032 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2033 This means a type for which function calls must pass an address to the
2034 function or get an address back from the function.
2035 EXP may be a type node or an expression (whose type is tested). */
2036
2037 int
aggregate_value_p(const_tree exp,const_tree fntype)2038 aggregate_value_p (const_tree exp, const_tree fntype)
2039 {
2040 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2041 int i, regno, nregs;
2042 rtx reg;
2043
2044 if (fntype)
2045 switch (TREE_CODE (fntype))
2046 {
2047 case CALL_EXPR:
2048 {
2049 tree fndecl = get_callee_fndecl (fntype);
2050 if (fndecl)
2051 fntype = TREE_TYPE (fndecl);
2052 else if (CALL_EXPR_FN (fntype))
2053 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2054 else
2055 /* For internal functions, assume nothing needs to be
2056 returned in memory. */
2057 return 0;
2058 }
2059 break;
2060 case FUNCTION_DECL:
2061 fntype = TREE_TYPE (fntype);
2062 break;
2063 case FUNCTION_TYPE:
2064 case METHOD_TYPE:
2065 break;
2066 case IDENTIFIER_NODE:
2067 fntype = NULL_TREE;
2068 break;
2069 default:
2070 /* We don't expect other tree types here. */
2071 gcc_unreachable ();
2072 }
2073
2074 if (VOID_TYPE_P (type))
2075 return 0;
2076
2077 /* If a record should be passed the same as its first (and only) member
2078 don't pass it as an aggregate. */
2079 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2080 return aggregate_value_p (first_field (type), fntype);
2081
2082 /* If the front end has decided that this needs to be passed by
2083 reference, do so. */
2084 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2085 && DECL_BY_REFERENCE (exp))
2086 return 1;
2087
2088 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2089 if (fntype && TREE_ADDRESSABLE (fntype))
2090 return 1;
2091
2092 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2093 and thus can't be returned in registers. */
2094 if (TREE_ADDRESSABLE (type))
2095 return 1;
2096
2097 if (TYPE_EMPTY_P (type))
2098 return 0;
2099
2100 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2101 return 1;
2102
2103 if (targetm.calls.return_in_memory (type, fntype))
2104 return 1;
2105
2106 /* Make sure we have suitable call-clobbered regs to return
2107 the value in; if not, we must return it in memory. */
2108 reg = hard_function_value (type, 0, fntype, 0);
2109
2110 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2111 it is OK. */
2112 if (!REG_P (reg))
2113 return 0;
2114
2115 regno = REGNO (reg);
2116 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2117 for (i = 0; i < nregs; i++)
2118 if (! call_used_regs[regno + i])
2119 return 1;
2120
2121 return 0;
2122 }
2123
2124 /* Return true if we should assign DECL a pseudo register; false if it
2125 should live on the local stack. */
2126
2127 bool
use_register_for_decl(const_tree decl)2128 use_register_for_decl (const_tree decl)
2129 {
2130 if (TREE_CODE (decl) == SSA_NAME)
2131 {
2132 /* We often try to use the SSA_NAME, instead of its underlying
2133 decl, to get type information and guide decisions, to avoid
2134 differences of behavior between anonymous and named
2135 variables, but in this one case we have to go for the actual
2136 variable if there is one. The main reason is that, at least
2137 at -O0, we want to place user variables on the stack, but we
2138 don't mind using pseudos for anonymous or ignored temps.
2139 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2140 should go in pseudos, whereas their corresponding variables
2141 might have to go on the stack. So, disregarding the decl
2142 here would negatively impact debug info at -O0, enable
2143 coalescing between SSA_NAMEs that ought to get different
2144 stack/pseudo assignments, and get the incoming argument
2145 processing thoroughly confused by PARM_DECLs expected to live
2146 in stack slots but assigned to pseudos. */
2147 if (!SSA_NAME_VAR (decl))
2148 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2149 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2150
2151 decl = SSA_NAME_VAR (decl);
2152 }
2153
2154 /* Honor volatile. */
2155 if (TREE_SIDE_EFFECTS (decl))
2156 return false;
2157
2158 /* Honor addressability. */
2159 if (TREE_ADDRESSABLE (decl))
2160 return false;
2161
2162 /* RESULT_DECLs are a bit special in that they're assigned without
2163 regard to use_register_for_decl, but we generally only store in
2164 them. If we coalesce their SSA NAMEs, we'd better return a
2165 result that matches the assignment in expand_function_start. */
2166 if (TREE_CODE (decl) == RESULT_DECL)
2167 {
2168 /* If it's not an aggregate, we're going to use a REG or a
2169 PARALLEL containing a REG. */
2170 if (!aggregate_value_p (decl, current_function_decl))
2171 return true;
2172
2173 /* If expand_function_start determines the return value, we'll
2174 use MEM if it's not by reference. */
2175 if (cfun->returns_pcc_struct
2176 || (targetm.calls.struct_value_rtx
2177 (TREE_TYPE (current_function_decl), 1)))
2178 return DECL_BY_REFERENCE (decl);
2179
2180 /* Otherwise, we're taking an extra all.function_result_decl
2181 argument. It's set up in assign_parms_augmented_arg_list,
2182 under the (negated) conditions above, and then it's used to
2183 set up the RESULT_DECL rtl in assign_params, after looping
2184 over all parameters. Now, if the RESULT_DECL is not by
2185 reference, we'll use a MEM either way. */
2186 if (!DECL_BY_REFERENCE (decl))
2187 return false;
2188
2189 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2190 the function_result_decl's assignment. Since it's a pointer,
2191 we can short-circuit a number of the tests below, and we must
2192 duplicat e them because we don't have the
2193 function_result_decl to test. */
2194 if (!targetm.calls.allocate_stack_slots_for_args ())
2195 return true;
2196 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2197 if (optimize)
2198 return true;
2199 /* We don't set DECL_REGISTER for the function_result_decl. */
2200 return false;
2201 }
2202
2203 /* Decl is implicitly addressible by bound stores and loads
2204 if it is an aggregate holding bounds. */
2205 if (chkp_function_instrumented_p (current_function_decl)
2206 && TREE_TYPE (decl)
2207 && !BOUNDED_P (decl)
2208 && chkp_type_has_pointer (TREE_TYPE (decl)))
2209 return false;
2210
2211 /* Only register-like things go in registers. */
2212 if (DECL_MODE (decl) == BLKmode)
2213 return false;
2214
2215 /* If -ffloat-store specified, don't put explicit float variables
2216 into registers. */
2217 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2218 propagates values across these stores, and it probably shouldn't. */
2219 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2220 return false;
2221
2222 if (!targetm.calls.allocate_stack_slots_for_args ())
2223 return true;
2224
2225 /* If we're not interested in tracking debugging information for
2226 this decl, then we can certainly put it in a register. */
2227 if (DECL_IGNORED_P (decl))
2228 return true;
2229
2230 if (optimize)
2231 return true;
2232
2233 if (!DECL_REGISTER (decl))
2234 return false;
2235
2236 /* When not optimizing, disregard register keyword for types that
2237 could have methods, otherwise the methods won't be callable from
2238 the debugger. */
2239 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2240 return false;
2241
2242 return true;
2243 }
2244
2245 /* Structures to communicate between the subroutines of assign_parms.
2246 The first holds data persistent across all parameters, the second
2247 is cleared out for each parameter. */
2248
2249 struct assign_parm_data_all
2250 {
2251 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2252 should become a job of the target or otherwise encapsulated. */
2253 CUMULATIVE_ARGS args_so_far_v;
2254 cumulative_args_t args_so_far;
2255 struct args_size stack_args_size;
2256 tree function_result_decl;
2257 tree orig_fnargs;
2258 rtx_insn *first_conversion_insn;
2259 rtx_insn *last_conversion_insn;
2260 HOST_WIDE_INT pretend_args_size;
2261 HOST_WIDE_INT extra_pretend_bytes;
2262 int reg_parm_stack_space;
2263 };
2264
2265 struct assign_parm_data_one
2266 {
2267 tree nominal_type;
2268 tree passed_type;
2269 rtx entry_parm;
2270 rtx stack_parm;
2271 machine_mode nominal_mode;
2272 machine_mode passed_mode;
2273 machine_mode promoted_mode;
2274 struct locate_and_pad_arg_data locate;
2275 int partial;
2276 BOOL_BITFIELD named_arg : 1;
2277 BOOL_BITFIELD passed_pointer : 1;
2278 BOOL_BITFIELD on_stack : 1;
2279 BOOL_BITFIELD loaded_in_reg : 1;
2280 };
2281
2282 struct bounds_parm_data
2283 {
2284 assign_parm_data_one parm_data;
2285 tree bounds_parm;
2286 tree ptr_parm;
2287 rtx ptr_entry;
2288 int bound_no;
2289 };
2290
2291 /* A subroutine of assign_parms. Initialize ALL. */
2292
2293 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)2294 assign_parms_initialize_all (struct assign_parm_data_all *all)
2295 {
2296 tree fntype ATTRIBUTE_UNUSED;
2297
2298 memset (all, 0, sizeof (*all));
2299
2300 fntype = TREE_TYPE (current_function_decl);
2301
2302 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2303 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2304 #else
2305 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2306 current_function_decl, -1);
2307 #endif
2308 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2309
2310 #ifdef INCOMING_REG_PARM_STACK_SPACE
2311 all->reg_parm_stack_space
2312 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2313 #endif
2314 }
2315
2316 /* If ARGS contains entries with complex types, split the entry into two
2317 entries of the component type. Return a new list of substitutions are
2318 needed, else the old list. */
2319
2320 static void
split_complex_args(vec<tree> * args)2321 split_complex_args (vec<tree> *args)
2322 {
2323 unsigned i;
2324 tree p;
2325
2326 FOR_EACH_VEC_ELT (*args, i, p)
2327 {
2328 tree type = TREE_TYPE (p);
2329 if (TREE_CODE (type) == COMPLEX_TYPE
2330 && targetm.calls.split_complex_arg (type))
2331 {
2332 tree decl;
2333 tree subtype = TREE_TYPE (type);
2334 bool addressable = TREE_ADDRESSABLE (p);
2335
2336 /* Rewrite the PARM_DECL's type with its component. */
2337 p = copy_node (p);
2338 TREE_TYPE (p) = subtype;
2339 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2340 SET_DECL_MODE (p, VOIDmode);
2341 DECL_SIZE (p) = NULL;
2342 DECL_SIZE_UNIT (p) = NULL;
2343 /* If this arg must go in memory, put it in a pseudo here.
2344 We can't allow it to go in memory as per normal parms,
2345 because the usual place might not have the imag part
2346 adjacent to the real part. */
2347 DECL_ARTIFICIAL (p) = addressable;
2348 DECL_IGNORED_P (p) = addressable;
2349 TREE_ADDRESSABLE (p) = 0;
2350 layout_decl (p, 0);
2351 (*args)[i] = p;
2352
2353 /* Build a second synthetic decl. */
2354 decl = build_decl (EXPR_LOCATION (p),
2355 PARM_DECL, NULL_TREE, subtype);
2356 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2357 DECL_ARTIFICIAL (decl) = addressable;
2358 DECL_IGNORED_P (decl) = addressable;
2359 layout_decl (decl, 0);
2360 args->safe_insert (++i, decl);
2361 }
2362 }
2363 }
2364
2365 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2366 the hidden struct return argument, and (abi willing) complex args.
2367 Return the new parameter list. */
2368
2369 static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2370 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2371 {
2372 tree fndecl = current_function_decl;
2373 tree fntype = TREE_TYPE (fndecl);
2374 vec<tree> fnargs = vNULL;
2375 tree arg;
2376
2377 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2378 fnargs.safe_push (arg);
2379
2380 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2381
2382 /* If struct value address is treated as the first argument, make it so. */
2383 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2384 && ! cfun->returns_pcc_struct
2385 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2386 {
2387 tree type = build_pointer_type (TREE_TYPE (fntype));
2388 tree decl;
2389
2390 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2391 PARM_DECL, get_identifier (".result_ptr"), type);
2392 DECL_ARG_TYPE (decl) = type;
2393 DECL_ARTIFICIAL (decl) = 1;
2394 DECL_NAMELESS (decl) = 1;
2395 TREE_CONSTANT (decl) = 1;
2396 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2397 changes, the end of the RESULT_DECL handling block in
2398 use_register_for_decl must be adjusted to match. */
2399
2400 DECL_CHAIN (decl) = all->orig_fnargs;
2401 all->orig_fnargs = decl;
2402 fnargs.safe_insert (0, decl);
2403
2404 all->function_result_decl = decl;
2405
2406 /* If function is instrumented then bounds of the
2407 passed structure address is the second argument. */
2408 if (chkp_function_instrumented_p (fndecl))
2409 {
2410 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2411 PARM_DECL, get_identifier (".result_bnd"),
2412 pointer_bounds_type_node);
2413 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2414 DECL_ARTIFICIAL (decl) = 1;
2415 DECL_NAMELESS (decl) = 1;
2416 TREE_CONSTANT (decl) = 1;
2417
2418 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2419 DECL_CHAIN (all->orig_fnargs) = decl;
2420 fnargs.safe_insert (1, decl);
2421 }
2422 }
2423
2424 /* If the target wants to split complex arguments into scalars, do so. */
2425 if (targetm.calls.split_complex_arg)
2426 split_complex_args (&fnargs);
2427
2428 return fnargs;
2429 }
2430
2431 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2432 data for the parameter. Incorporate ABI specifics such as pass-by-
2433 reference and type promotion. */
2434
2435 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2436 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2437 struct assign_parm_data_one *data)
2438 {
2439 tree nominal_type, passed_type;
2440 machine_mode nominal_mode, passed_mode, promoted_mode;
2441 int unsignedp;
2442
2443 memset (data, 0, sizeof (*data));
2444
2445 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2446 if (!cfun->stdarg)
2447 data->named_arg = 1; /* No variadic parms. */
2448 else if (DECL_CHAIN (parm))
2449 data->named_arg = 1; /* Not the last non-variadic parm. */
2450 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2451 data->named_arg = 1; /* Only variadic ones are unnamed. */
2452 else
2453 data->named_arg = 0; /* Treat as variadic. */
2454
2455 nominal_type = TREE_TYPE (parm);
2456 passed_type = DECL_ARG_TYPE (parm);
2457
2458 /* Look out for errors propagating this far. Also, if the parameter's
2459 type is void then its value doesn't matter. */
2460 if (TREE_TYPE (parm) == error_mark_node
2461 /* This can happen after weird syntax errors
2462 or if an enum type is defined among the parms. */
2463 || TREE_CODE (parm) != PARM_DECL
2464 || passed_type == NULL
2465 || VOID_TYPE_P (nominal_type))
2466 {
2467 nominal_type = passed_type = void_type_node;
2468 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2469 goto egress;
2470 }
2471
2472 /* Find mode of arg as it is passed, and mode of arg as it should be
2473 during execution of this function. */
2474 passed_mode = TYPE_MODE (passed_type);
2475 nominal_mode = TYPE_MODE (nominal_type);
2476
2477 /* If the parm is to be passed as a transparent union or record, use the
2478 type of the first field for the tests below. We have already verified
2479 that the modes are the same. */
2480 if ((TREE_CODE (passed_type) == UNION_TYPE
2481 || TREE_CODE (passed_type) == RECORD_TYPE)
2482 && TYPE_TRANSPARENT_AGGR (passed_type))
2483 passed_type = TREE_TYPE (first_field (passed_type));
2484
2485 /* See if this arg was passed by invisible reference. */
2486 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2487 passed_type, data->named_arg))
2488 {
2489 passed_type = nominal_type = build_pointer_type (passed_type);
2490 data->passed_pointer = true;
2491 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2492 }
2493
2494 /* Find mode as it is passed by the ABI. */
2495 unsignedp = TYPE_UNSIGNED (passed_type);
2496 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2497 TREE_TYPE (current_function_decl), 0);
2498
2499 egress:
2500 data->nominal_type = nominal_type;
2501 data->passed_type = passed_type;
2502 data->nominal_mode = nominal_mode;
2503 data->passed_mode = passed_mode;
2504 data->promoted_mode = promoted_mode;
2505 }
2506
2507 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2508
2509 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2510 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2511 struct assign_parm_data_one *data, bool no_rtl)
2512 {
2513 int varargs_pretend_bytes = 0;
2514
2515 targetm.calls.setup_incoming_varargs (all->args_so_far,
2516 data->promoted_mode,
2517 data->passed_type,
2518 &varargs_pretend_bytes, no_rtl);
2519
2520 /* If the back-end has requested extra stack space, record how much is
2521 needed. Do not change pretend_args_size otherwise since it may be
2522 nonzero from an earlier partial argument. */
2523 if (varargs_pretend_bytes > 0)
2524 all->pretend_args_size = varargs_pretend_bytes;
2525 }
2526
2527 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2528 the incoming location of the current parameter. */
2529
2530 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2531 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2532 struct assign_parm_data_one *data)
2533 {
2534 HOST_WIDE_INT pretend_bytes = 0;
2535 rtx entry_parm;
2536 bool in_regs;
2537
2538 if (data->promoted_mode == VOIDmode)
2539 {
2540 data->entry_parm = data->stack_parm = const0_rtx;
2541 return;
2542 }
2543
2544 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2545 data->passed_type);
2546
2547 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2548 data->promoted_mode,
2549 data->passed_type,
2550 data->named_arg);
2551
2552 if (entry_parm == 0)
2553 data->promoted_mode = data->passed_mode;
2554
2555 /* Determine parm's home in the stack, in case it arrives in the stack
2556 or we should pretend it did. Compute the stack position and rtx where
2557 the argument arrives and its size.
2558
2559 There is one complexity here: If this was a parameter that would
2560 have been passed in registers, but wasn't only because it is
2561 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2562 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2563 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2564 as it was the previous time. */
2565 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2566 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2567 in_regs = true;
2568 #endif
2569 if (!in_regs && !data->named_arg)
2570 {
2571 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2572 {
2573 rtx tem;
2574 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2575 data->promoted_mode,
2576 data->passed_type, true);
2577 in_regs = tem != NULL;
2578 }
2579 }
2580
2581 /* If this parameter was passed both in registers and in the stack, use
2582 the copy on the stack. */
2583 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2584 data->passed_type))
2585 entry_parm = 0;
2586
2587 if (entry_parm)
2588 {
2589 int partial;
2590
2591 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2592 data->promoted_mode,
2593 data->passed_type,
2594 data->named_arg);
2595 data->partial = partial;
2596
2597 /* The caller might already have allocated stack space for the
2598 register parameters. */
2599 if (partial != 0 && all->reg_parm_stack_space == 0)
2600 {
2601 /* Part of this argument is passed in registers and part
2602 is passed on the stack. Ask the prologue code to extend
2603 the stack part so that we can recreate the full value.
2604
2605 PRETEND_BYTES is the size of the registers we need to store.
2606 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2607 stack space that the prologue should allocate.
2608
2609 Internally, gcc assumes that the argument pointer is aligned
2610 to STACK_BOUNDARY bits. This is used both for alignment
2611 optimizations (see init_emit) and to locate arguments that are
2612 aligned to more than PARM_BOUNDARY bits. We must preserve this
2613 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2614 a stack boundary. */
2615
2616 /* We assume at most one partial arg, and it must be the first
2617 argument on the stack. */
2618 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2619
2620 pretend_bytes = partial;
2621 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2622
2623 /* We want to align relative to the actual stack pointer, so
2624 don't include this in the stack size until later. */
2625 all->extra_pretend_bytes = all->pretend_args_size;
2626 }
2627 }
2628
2629 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2630 all->reg_parm_stack_space,
2631 entry_parm ? data->partial : 0, current_function_decl,
2632 &all->stack_args_size, &data->locate);
2633
2634 /* Update parm_stack_boundary if this parameter is passed in the
2635 stack. */
2636 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2637 crtl->parm_stack_boundary = data->locate.boundary;
2638
2639 /* Adjust offsets to include the pretend args. */
2640 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2641 data->locate.slot_offset.constant += pretend_bytes;
2642 data->locate.offset.constant += pretend_bytes;
2643
2644 data->entry_parm = entry_parm;
2645 }
2646
2647 /* A subroutine of assign_parms. If there is actually space on the stack
2648 for this parm, count it in stack_args_size and return true. */
2649
2650 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2651 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2652 struct assign_parm_data_one *data)
2653 {
2654 /* Bounds are never passed on the stack to keep compatibility
2655 with not instrumented code. */
2656 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2657 return false;
2658 /* Trivially true if we've no incoming register. */
2659 else if (data->entry_parm == NULL)
2660 ;
2661 /* Also true if we're partially in registers and partially not,
2662 since we've arranged to drop the entire argument on the stack. */
2663 else if (data->partial != 0)
2664 ;
2665 /* Also true if the target says that it's passed in both registers
2666 and on the stack. */
2667 else if (GET_CODE (data->entry_parm) == PARALLEL
2668 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2669 ;
2670 /* Also true if the target says that there's stack allocated for
2671 all register parameters. */
2672 else if (all->reg_parm_stack_space > 0)
2673 ;
2674 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2675 else
2676 return false;
2677
2678 all->stack_args_size.constant += data->locate.size.constant;
2679 if (data->locate.size.var)
2680 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2681
2682 return true;
2683 }
2684
2685 /* A subroutine of assign_parms. Given that this parameter is allocated
2686 stack space by the ABI, find it. */
2687
2688 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2689 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2690 {
2691 rtx offset_rtx, stack_parm;
2692 unsigned int align, boundary;
2693
2694 /* If we're passing this arg using a reg, make its stack home the
2695 aligned stack slot. */
2696 if (data->entry_parm)
2697 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2698 else
2699 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2700
2701 stack_parm = crtl->args.internal_arg_pointer;
2702 if (offset_rtx != const0_rtx)
2703 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2704 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2705
2706 if (!data->passed_pointer)
2707 {
2708 set_mem_attributes (stack_parm, parm, 1);
2709 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2710 while promoted mode's size is needed. */
2711 if (data->promoted_mode != BLKmode
2712 && data->promoted_mode != DECL_MODE (parm))
2713 {
2714 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2715 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2716 {
2717 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2718 data->promoted_mode);
2719 if (maybe_ne (offset, 0))
2720 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2721 }
2722 }
2723 }
2724
2725 boundary = data->locate.boundary;
2726 align = BITS_PER_UNIT;
2727
2728 /* If we're padding upward, we know that the alignment of the slot
2729 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2730 intentionally forcing upward padding. Otherwise we have to come
2731 up with a guess at the alignment based on OFFSET_RTX. */
2732 poly_int64 offset;
2733 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2734 align = boundary;
2735 else if (poly_int_rtx_p (offset_rtx, &offset))
2736 {
2737 align = least_bit_hwi (boundary);
2738 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2739 if (offset_align != 0)
2740 align = MIN (align, offset_align);
2741 }
2742 set_mem_align (stack_parm, align);
2743
2744 if (data->entry_parm)
2745 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2746
2747 data->stack_parm = stack_parm;
2748 }
2749
2750 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2751 always valid and contiguous. */
2752
2753 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2754 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2755 {
2756 rtx entry_parm = data->entry_parm;
2757 rtx stack_parm = data->stack_parm;
2758
2759 /* If this parm was passed part in regs and part in memory, pretend it
2760 arrived entirely in memory by pushing the register-part onto the stack.
2761 In the special case of a DImode or DFmode that is split, we could put
2762 it together in a pseudoreg directly, but for now that's not worth
2763 bothering with. */
2764 if (data->partial != 0)
2765 {
2766 /* Handle calls that pass values in multiple non-contiguous
2767 locations. The Irix 6 ABI has examples of this. */
2768 if (GET_CODE (entry_parm) == PARALLEL)
2769 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2770 data->passed_type,
2771 int_size_in_bytes (data->passed_type));
2772 else
2773 {
2774 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2775 move_block_from_reg (REGNO (entry_parm),
2776 validize_mem (copy_rtx (stack_parm)),
2777 data->partial / UNITS_PER_WORD);
2778 }
2779
2780 entry_parm = stack_parm;
2781 }
2782
2783 /* If we didn't decide this parm came in a register, by default it came
2784 on the stack. */
2785 else if (entry_parm == NULL)
2786 entry_parm = stack_parm;
2787
2788 /* When an argument is passed in multiple locations, we can't make use
2789 of this information, but we can save some copying if the whole argument
2790 is passed in a single register. */
2791 else if (GET_CODE (entry_parm) == PARALLEL
2792 && data->nominal_mode != BLKmode
2793 && data->passed_mode != BLKmode)
2794 {
2795 size_t i, len = XVECLEN (entry_parm, 0);
2796
2797 for (i = 0; i < len; i++)
2798 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2799 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2800 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2801 == data->passed_mode)
2802 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2803 {
2804 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2805 break;
2806 }
2807 }
2808
2809 data->entry_parm = entry_parm;
2810 }
2811
2812 /* A subroutine of assign_parms. Reconstitute any values which were
2813 passed in multiple registers and would fit in a single register. */
2814
2815 static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)2816 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2817 {
2818 rtx entry_parm = data->entry_parm;
2819
2820 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2821 This can be done with register operations rather than on the
2822 stack, even if we will store the reconstituted parameter on the
2823 stack later. */
2824 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2825 {
2826 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2827 emit_group_store (parmreg, entry_parm, data->passed_type,
2828 GET_MODE_SIZE (GET_MODE (entry_parm)));
2829 entry_parm = parmreg;
2830 }
2831
2832 data->entry_parm = entry_parm;
2833 }
2834
2835 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2836 always valid and properly aligned. */
2837
2838 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2839 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2840 {
2841 rtx stack_parm = data->stack_parm;
2842
2843 /* If we can't trust the parm stack slot to be aligned enough for its
2844 ultimate type, don't use that slot after entry. We'll make another
2845 stack slot, if we need one. */
2846 if (stack_parm
2847 && ((STRICT_ALIGNMENT
2848 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2849 || (data->nominal_type
2850 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2851 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2852 stack_parm = NULL;
2853
2854 /* If parm was passed in memory, and we need to convert it on entry,
2855 don't store it back in that same slot. */
2856 else if (data->entry_parm == stack_parm
2857 && data->nominal_mode != BLKmode
2858 && data->nominal_mode != data->passed_mode)
2859 stack_parm = NULL;
2860
2861 /* If stack protection is in effect for this function, don't leave any
2862 pointers in their passed stack slots. */
2863 else if (crtl->stack_protect_guard
2864 && (flag_stack_protect == 2
2865 || data->passed_pointer
2866 || POINTER_TYPE_P (data->nominal_type)))
2867 stack_parm = NULL;
2868
2869 data->stack_parm = stack_parm;
2870 }
2871
2872 /* A subroutine of assign_parms. Return true if the current parameter
2873 should be stored as a BLKmode in the current frame. */
2874
2875 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2876 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2877 {
2878 if (data->nominal_mode == BLKmode)
2879 return true;
2880 if (GET_MODE (data->entry_parm) == BLKmode)
2881 return true;
2882
2883 #ifdef BLOCK_REG_PADDING
2884 /* Only assign_parm_setup_block knows how to deal with register arguments
2885 that are padded at the least significant end. */
2886 if (REG_P (data->entry_parm)
2887 && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
2888 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2889 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2890 return true;
2891 #endif
2892
2893 return false;
2894 }
2895
2896 /* A subroutine of assign_parms. Arrange for the parameter to be
2897 present and valid in DATA->STACK_RTL. */
2898
2899 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2900 assign_parm_setup_block (struct assign_parm_data_all *all,
2901 tree parm, struct assign_parm_data_one *data)
2902 {
2903 rtx entry_parm = data->entry_parm;
2904 rtx stack_parm = data->stack_parm;
2905 rtx target_reg = NULL_RTX;
2906 bool in_conversion_seq = false;
2907 HOST_WIDE_INT size;
2908 HOST_WIDE_INT size_stored;
2909
2910 if (GET_CODE (entry_parm) == PARALLEL)
2911 entry_parm = emit_group_move_into_temps (entry_parm);
2912
2913 /* If we want the parameter in a pseudo, don't use a stack slot. */
2914 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2915 {
2916 tree def = ssa_default_def (cfun, parm);
2917 gcc_assert (def);
2918 machine_mode mode = promote_ssa_mode (def, NULL);
2919 rtx reg = gen_reg_rtx (mode);
2920 if (GET_CODE (reg) != CONCAT)
2921 stack_parm = reg;
2922 else
2923 {
2924 target_reg = reg;
2925 /* Avoid allocating a stack slot, if there isn't one
2926 preallocated by the ABI. It might seem like we should
2927 always prefer a pseudo, but converting between
2928 floating-point and integer modes goes through the stack
2929 on various machines, so it's better to use the reserved
2930 stack slot than to risk wasting it and allocating more
2931 for the conversion. */
2932 if (stack_parm == NULL_RTX)
2933 {
2934 int save = generating_concat_p;
2935 generating_concat_p = 0;
2936 stack_parm = gen_reg_rtx (mode);
2937 generating_concat_p = save;
2938 }
2939 }
2940 data->stack_parm = NULL;
2941 }
2942
2943 size = int_size_in_bytes (data->passed_type);
2944 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2945 if (stack_parm == 0)
2946 {
2947 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2948 stack_parm = assign_stack_local (BLKmode, size_stored,
2949 DECL_ALIGN (parm));
2950 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2951 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2952 set_mem_attributes (stack_parm, parm, 1);
2953 }
2954
2955 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2956 calls that pass values in multiple non-contiguous locations. */
2957 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2958 {
2959 rtx mem;
2960
2961 /* Note that we will be storing an integral number of words.
2962 So we have to be careful to ensure that we allocate an
2963 integral number of words. We do this above when we call
2964 assign_stack_local if space was not allocated in the argument
2965 list. If it was, this will not work if PARM_BOUNDARY is not
2966 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2967 if it becomes a problem. Exception is when BLKmode arrives
2968 with arguments not conforming to word_mode. */
2969
2970 if (data->stack_parm == 0)
2971 ;
2972 else if (GET_CODE (entry_parm) == PARALLEL)
2973 ;
2974 else
2975 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2976
2977 mem = validize_mem (copy_rtx (stack_parm));
2978
2979 /* Handle values in multiple non-contiguous locations. */
2980 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2981 emit_group_store (mem, entry_parm, data->passed_type, size);
2982 else if (GET_CODE (entry_parm) == PARALLEL)
2983 {
2984 push_to_sequence2 (all->first_conversion_insn,
2985 all->last_conversion_insn);
2986 emit_group_store (mem, entry_parm, data->passed_type, size);
2987 all->first_conversion_insn = get_insns ();
2988 all->last_conversion_insn = get_last_insn ();
2989 end_sequence ();
2990 in_conversion_seq = true;
2991 }
2992
2993 else if (size == 0)
2994 ;
2995
2996 /* If SIZE is that of a mode no bigger than a word, just use
2997 that mode's store operation. */
2998 else if (size <= UNITS_PER_WORD)
2999 {
3000 unsigned int bits = size * BITS_PER_UNIT;
3001 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3002
3003 if (mode != BLKmode
3004 #ifdef BLOCK_REG_PADDING
3005 && (size == UNITS_PER_WORD
3006 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3007 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3008 #endif
3009 )
3010 {
3011 rtx reg;
3012
3013 /* We are really truncating a word_mode value containing
3014 SIZE bytes into a value of mode MODE. If such an
3015 operation requires no actual instructions, we can refer
3016 to the value directly in mode MODE, otherwise we must
3017 start with the register in word_mode and explicitly
3018 convert it. */
3019 if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3020 BITS_PER_WORD))
3021 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3022 else
3023 {
3024 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3025 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3026 }
3027 emit_move_insn (change_address (mem, mode, 0), reg);
3028 }
3029
3030 #ifdef BLOCK_REG_PADDING
3031 /* Storing the register in memory as a full word, as
3032 move_block_from_reg below would do, and then using the
3033 MEM in a smaller mode, has the effect of shifting right
3034 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3035 shifting must be explicit. */
3036 else if (!MEM_P (mem))
3037 {
3038 rtx x;
3039
3040 /* If the assert below fails, we should have taken the
3041 mode != BLKmode path above, unless we have downward
3042 padding of smaller-than-word arguments on a machine
3043 with little-endian bytes, which would likely require
3044 additional changes to work correctly. */
3045 gcc_checking_assert (BYTES_BIG_ENDIAN
3046 && (BLOCK_REG_PADDING (mode,
3047 data->passed_type, 1)
3048 == PAD_UPWARD));
3049
3050 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3051
3052 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3053 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3054 NULL_RTX, 1);
3055 x = force_reg (word_mode, x);
3056 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3057
3058 emit_move_insn (mem, x);
3059 }
3060 #endif
3061
3062 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3063 machine must be aligned to the left before storing
3064 to memory. Note that the previous test doesn't
3065 handle all cases (e.g. SIZE == 3). */
3066 else if (size != UNITS_PER_WORD
3067 #ifdef BLOCK_REG_PADDING
3068 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3069 == PAD_DOWNWARD)
3070 #else
3071 && BYTES_BIG_ENDIAN
3072 #endif
3073 )
3074 {
3075 rtx tem, x;
3076 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3077 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3078
3079 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3080 tem = change_address (mem, word_mode, 0);
3081 emit_move_insn (tem, x);
3082 }
3083 else
3084 move_block_from_reg (REGNO (entry_parm), mem,
3085 size_stored / UNITS_PER_WORD);
3086 }
3087 else if (!MEM_P (mem))
3088 {
3089 gcc_checking_assert (size > UNITS_PER_WORD);
3090 #ifdef BLOCK_REG_PADDING
3091 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3092 data->passed_type, 0)
3093 == PAD_UPWARD);
3094 #endif
3095 emit_move_insn (mem, entry_parm);
3096 }
3097 else
3098 move_block_from_reg (REGNO (entry_parm), mem,
3099 size_stored / UNITS_PER_WORD);
3100 }
3101 else if (data->stack_parm == 0)
3102 {
3103 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3104 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3105 BLOCK_OP_NORMAL);
3106 all->first_conversion_insn = get_insns ();
3107 all->last_conversion_insn = get_last_insn ();
3108 end_sequence ();
3109 in_conversion_seq = true;
3110 }
3111
3112 if (target_reg)
3113 {
3114 if (!in_conversion_seq)
3115 emit_move_insn (target_reg, stack_parm);
3116 else
3117 {
3118 push_to_sequence2 (all->first_conversion_insn,
3119 all->last_conversion_insn);
3120 emit_move_insn (target_reg, stack_parm);
3121 all->first_conversion_insn = get_insns ();
3122 all->last_conversion_insn = get_last_insn ();
3123 end_sequence ();
3124 }
3125 stack_parm = target_reg;
3126 }
3127
3128 data->stack_parm = stack_parm;
3129 set_parm_rtl (parm, stack_parm);
3130 }
3131
3132 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3133 parameter. Get it there. Perform all ABI specified conversions. */
3134
3135 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3136 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3137 struct assign_parm_data_one *data)
3138 {
3139 rtx parmreg, validated_mem;
3140 rtx equiv_stack_parm;
3141 machine_mode promoted_nominal_mode;
3142 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3143 bool did_conversion = false;
3144 bool need_conversion, moved;
3145 rtx rtl;
3146
3147 /* Store the parm in a pseudoregister during the function, but we may
3148 need to do it in a wider mode. Using 2 here makes the result
3149 consistent with promote_decl_mode and thus expand_expr_real_1. */
3150 promoted_nominal_mode
3151 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3152 TREE_TYPE (current_function_decl), 2);
3153
3154 parmreg = gen_reg_rtx (promoted_nominal_mode);
3155 if (!DECL_ARTIFICIAL (parm))
3156 mark_user_reg (parmreg);
3157
3158 /* If this was an item that we received a pointer to,
3159 set rtl appropriately. */
3160 if (data->passed_pointer)
3161 {
3162 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3163 set_mem_attributes (rtl, parm, 1);
3164 }
3165 else
3166 rtl = parmreg;
3167
3168 assign_parm_remove_parallels (data);
3169
3170 /* Copy the value into the register, thus bridging between
3171 assign_parm_find_data_types and expand_expr_real_1. */
3172
3173 equiv_stack_parm = data->stack_parm;
3174 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3175
3176 need_conversion = (data->nominal_mode != data->passed_mode
3177 || promoted_nominal_mode != data->promoted_mode);
3178 moved = false;
3179
3180 if (need_conversion
3181 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3182 && data->nominal_mode == data->passed_mode
3183 && data->nominal_mode == GET_MODE (data->entry_parm))
3184 {
3185 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3186 mode, by the caller. We now have to convert it to
3187 NOMINAL_MODE, if different. However, PARMREG may be in
3188 a different mode than NOMINAL_MODE if it is being stored
3189 promoted.
3190
3191 If ENTRY_PARM is a hard register, it might be in a register
3192 not valid for operating in its mode (e.g., an odd-numbered
3193 register for a DFmode). In that case, moves are the only
3194 thing valid, so we can't do a convert from there. This
3195 occurs when the calling sequence allow such misaligned
3196 usages.
3197
3198 In addition, the conversion may involve a call, which could
3199 clobber parameters which haven't been copied to pseudo
3200 registers yet.
3201
3202 First, we try to emit an insn which performs the necessary
3203 conversion. We verify that this insn does not clobber any
3204 hard registers. */
3205
3206 enum insn_code icode;
3207 rtx op0, op1;
3208
3209 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3210 unsignedp);
3211
3212 op0 = parmreg;
3213 op1 = validated_mem;
3214 if (icode != CODE_FOR_nothing
3215 && insn_operand_matches (icode, 0, op0)
3216 && insn_operand_matches (icode, 1, op1))
3217 {
3218 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3219 rtx_insn *insn, *insns;
3220 rtx t = op1;
3221 HARD_REG_SET hardregs;
3222
3223 start_sequence ();
3224 /* If op1 is a hard register that is likely spilled, first
3225 force it into a pseudo, otherwise combiner might extend
3226 its lifetime too much. */
3227 if (GET_CODE (t) == SUBREG)
3228 t = SUBREG_REG (t);
3229 if (REG_P (t)
3230 && HARD_REGISTER_P (t)
3231 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3232 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3233 {
3234 t = gen_reg_rtx (GET_MODE (op1));
3235 emit_move_insn (t, op1);
3236 }
3237 else
3238 t = op1;
3239 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3240 data->passed_mode, unsignedp);
3241 emit_insn (pat);
3242 insns = get_insns ();
3243
3244 moved = true;
3245 CLEAR_HARD_REG_SET (hardregs);
3246 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3247 {
3248 if (INSN_P (insn))
3249 note_stores (PATTERN (insn), record_hard_reg_sets,
3250 &hardregs);
3251 if (!hard_reg_set_empty_p (hardregs))
3252 moved = false;
3253 }
3254
3255 end_sequence ();
3256
3257 if (moved)
3258 {
3259 emit_insn (insns);
3260 if (equiv_stack_parm != NULL_RTX)
3261 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3262 equiv_stack_parm);
3263 }
3264 }
3265 }
3266
3267 if (moved)
3268 /* Nothing to do. */
3269 ;
3270 else if (need_conversion)
3271 {
3272 /* We did not have an insn to convert directly, or the sequence
3273 generated appeared unsafe. We must first copy the parm to a
3274 pseudo reg, and save the conversion until after all
3275 parameters have been moved. */
3276
3277 int save_tree_used;
3278 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3279
3280 emit_move_insn (tempreg, validated_mem);
3281
3282 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3283 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3284
3285 if (partial_subreg_p (tempreg)
3286 && GET_MODE (tempreg) == data->nominal_mode
3287 && REG_P (SUBREG_REG (tempreg))
3288 && data->nominal_mode == data->passed_mode
3289 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3290 {
3291 /* The argument is already sign/zero extended, so note it
3292 into the subreg. */
3293 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3294 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3295 }
3296
3297 /* TREE_USED gets set erroneously during expand_assignment. */
3298 save_tree_used = TREE_USED (parm);
3299 SET_DECL_RTL (parm, rtl);
3300 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3301 SET_DECL_RTL (parm, NULL_RTX);
3302 TREE_USED (parm) = save_tree_used;
3303 all->first_conversion_insn = get_insns ();
3304 all->last_conversion_insn = get_last_insn ();
3305 end_sequence ();
3306
3307 did_conversion = true;
3308 }
3309 else
3310 emit_move_insn (parmreg, validated_mem);
3311
3312 /* If we were passed a pointer but the actual value can safely live
3313 in a register, retrieve it and use it directly. */
3314 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3315 {
3316 /* We can't use nominal_mode, because it will have been set to
3317 Pmode above. We must use the actual mode of the parm. */
3318 if (use_register_for_decl (parm))
3319 {
3320 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3321 mark_user_reg (parmreg);
3322 }
3323 else
3324 {
3325 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3326 TYPE_MODE (TREE_TYPE (parm)),
3327 TYPE_ALIGN (TREE_TYPE (parm)));
3328 parmreg
3329 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3330 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3331 align);
3332 set_mem_attributes (parmreg, parm, 1);
3333 }
3334
3335 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3336 the debug info in case it is not legitimate. */
3337 if (GET_MODE (parmreg) != GET_MODE (rtl))
3338 {
3339 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3340 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3341
3342 push_to_sequence2 (all->first_conversion_insn,
3343 all->last_conversion_insn);
3344 emit_move_insn (tempreg, rtl);
3345 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3346 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3347 tempreg);
3348 all->first_conversion_insn = get_insns ();
3349 all->last_conversion_insn = get_last_insn ();
3350 end_sequence ();
3351
3352 did_conversion = true;
3353 }
3354 else
3355 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3356
3357 rtl = parmreg;
3358
3359 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3360 now the parm. */
3361 data->stack_parm = NULL;
3362 }
3363
3364 set_parm_rtl (parm, rtl);
3365
3366 /* Mark the register as eliminable if we did no conversion and it was
3367 copied from memory at a fixed offset, and the arg pointer was not
3368 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3369 offset formed an invalid address, such memory-equivalences as we
3370 make here would screw up life analysis for it. */
3371 if (data->nominal_mode == data->passed_mode
3372 && !did_conversion
3373 && data->stack_parm != 0
3374 && MEM_P (data->stack_parm)
3375 && data->locate.offset.var == 0
3376 && reg_mentioned_p (virtual_incoming_args_rtx,
3377 XEXP (data->stack_parm, 0)))
3378 {
3379 rtx_insn *linsn = get_last_insn ();
3380 rtx_insn *sinsn;
3381 rtx set;
3382
3383 /* Mark complex types separately. */
3384 if (GET_CODE (parmreg) == CONCAT)
3385 {
3386 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3387 int regnor = REGNO (XEXP (parmreg, 0));
3388 int regnoi = REGNO (XEXP (parmreg, 1));
3389 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3390 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3391 GET_MODE_SIZE (submode));
3392
3393 /* Scan backwards for the set of the real and
3394 imaginary parts. */
3395 for (sinsn = linsn; sinsn != 0;
3396 sinsn = prev_nonnote_insn (sinsn))
3397 {
3398 set = single_set (sinsn);
3399 if (set == 0)
3400 continue;
3401
3402 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3403 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3404 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3405 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3406 }
3407 }
3408 else
3409 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3410 }
3411
3412 /* For pointer data type, suggest pointer register. */
3413 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3414 mark_reg_pointer (parmreg,
3415 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3416 }
3417
3418 /* A subroutine of assign_parms. Allocate stack space to hold the current
3419 parameter. Get it there. Perform all ABI specified conversions. */
3420
3421 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)3422 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3423 struct assign_parm_data_one *data)
3424 {
3425 /* Value must be stored in the stack slot STACK_PARM during function
3426 execution. */
3427 bool to_conversion = false;
3428
3429 assign_parm_remove_parallels (data);
3430
3431 if (data->promoted_mode != data->nominal_mode)
3432 {
3433 /* Conversion is required. */
3434 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3435
3436 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3437
3438 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3439 to_conversion = true;
3440
3441 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3442 TYPE_UNSIGNED (TREE_TYPE (parm)));
3443
3444 if (data->stack_parm)
3445 {
3446 poly_int64 offset
3447 = subreg_lowpart_offset (data->nominal_mode,
3448 GET_MODE (data->stack_parm));
3449 /* ??? This may need a big-endian conversion on sparc64. */
3450 data->stack_parm
3451 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3452 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3453 set_mem_offset (data->stack_parm,
3454 MEM_OFFSET (data->stack_parm) + offset);
3455 }
3456 }
3457
3458 if (data->entry_parm != data->stack_parm)
3459 {
3460 rtx src, dest;
3461
3462 if (data->stack_parm == 0)
3463 {
3464 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3465 GET_MODE (data->entry_parm),
3466 TYPE_ALIGN (data->passed_type));
3467 data->stack_parm
3468 = assign_stack_local (GET_MODE (data->entry_parm),
3469 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3470 align);
3471 set_mem_attributes (data->stack_parm, parm, 1);
3472 }
3473
3474 dest = validize_mem (copy_rtx (data->stack_parm));
3475 src = validize_mem (copy_rtx (data->entry_parm));
3476
3477 if (MEM_P (src))
3478 {
3479 /* Use a block move to handle potentially misaligned entry_parm. */
3480 if (!to_conversion)
3481 push_to_sequence2 (all->first_conversion_insn,
3482 all->last_conversion_insn);
3483 to_conversion = true;
3484
3485 emit_block_move (dest, src,
3486 GEN_INT (int_size_in_bytes (data->passed_type)),
3487 BLOCK_OP_NORMAL);
3488 }
3489 else
3490 {
3491 if (!REG_P (src))
3492 src = force_reg (GET_MODE (src), src);
3493 emit_move_insn (dest, src);
3494 }
3495 }
3496
3497 if (to_conversion)
3498 {
3499 all->first_conversion_insn = get_insns ();
3500 all->last_conversion_insn = get_last_insn ();
3501 end_sequence ();
3502 }
3503
3504 set_parm_rtl (parm, data->stack_parm);
3505 }
3506
3507 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3508 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3509
3510 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)3511 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3512 vec<tree> fnargs)
3513 {
3514 tree parm;
3515 tree orig_fnargs = all->orig_fnargs;
3516 unsigned i = 0;
3517
3518 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3519 {
3520 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3521 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3522 {
3523 rtx tmp, real, imag;
3524 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3525
3526 real = DECL_RTL (fnargs[i]);
3527 imag = DECL_RTL (fnargs[i + 1]);
3528 if (inner != GET_MODE (real))
3529 {
3530 real = gen_lowpart_SUBREG (inner, real);
3531 imag = gen_lowpart_SUBREG (inner, imag);
3532 }
3533
3534 if (TREE_ADDRESSABLE (parm))
3535 {
3536 rtx rmem, imem;
3537 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3538 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3539 DECL_MODE (parm),
3540 TYPE_ALIGN (TREE_TYPE (parm)));
3541
3542 /* split_complex_arg put the real and imag parts in
3543 pseudos. Move them to memory. */
3544 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3545 set_mem_attributes (tmp, parm, 1);
3546 rmem = adjust_address_nv (tmp, inner, 0);
3547 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3548 push_to_sequence2 (all->first_conversion_insn,
3549 all->last_conversion_insn);
3550 emit_move_insn (rmem, real);
3551 emit_move_insn (imem, imag);
3552 all->first_conversion_insn = get_insns ();
3553 all->last_conversion_insn = get_last_insn ();
3554 end_sequence ();
3555 }
3556 else
3557 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3558 set_parm_rtl (parm, tmp);
3559
3560 real = DECL_INCOMING_RTL (fnargs[i]);
3561 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3562 if (inner != GET_MODE (real))
3563 {
3564 real = gen_lowpart_SUBREG (inner, real);
3565 imag = gen_lowpart_SUBREG (inner, imag);
3566 }
3567 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3568 set_decl_incoming_rtl (parm, tmp, false);
3569 i++;
3570 }
3571 }
3572 }
3573
3574 /* Load bounds of PARM from bounds table. */
3575 static void
assign_parm_load_bounds(struct assign_parm_data_one * data,tree parm,rtx entry,unsigned bound_no)3576 assign_parm_load_bounds (struct assign_parm_data_one *data,
3577 tree parm,
3578 rtx entry,
3579 unsigned bound_no)
3580 {
3581 bitmap_iterator bi;
3582 unsigned i, offs = 0;
3583 int bnd_no = -1;
3584 rtx slot = NULL, ptr = NULL;
3585
3586 if (parm)
3587 {
3588 bitmap slots;
3589 bitmap_obstack_initialize (NULL);
3590 slots = BITMAP_ALLOC (NULL);
3591 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3592 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3593 {
3594 if (bound_no)
3595 bound_no--;
3596 else
3597 {
3598 bnd_no = i;
3599 break;
3600 }
3601 }
3602 BITMAP_FREE (slots);
3603 bitmap_obstack_release (NULL);
3604 }
3605
3606 /* We may have bounds not associated with any pointer. */
3607 if (bnd_no != -1)
3608 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3609
3610 /* Find associated pointer. */
3611 if (bnd_no == -1)
3612 {
3613 /* If bounds are not associated with any bounds,
3614 then it is passed in a register or special slot. */
3615 gcc_assert (data->entry_parm);
3616 ptr = const0_rtx;
3617 }
3618 else if (MEM_P (entry))
3619 slot = adjust_address (entry, Pmode, offs);
3620 else if (REG_P (entry))
3621 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3622 else if (GET_CODE (entry) == PARALLEL)
3623 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3624 else
3625 gcc_unreachable ();
3626 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3627 data->entry_parm);
3628 }
3629
3630 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3631
3632 static void
assign_bounds(vec<bounds_parm_data> & bndargs,struct assign_parm_data_all & all,bool assign_regs,bool assign_special,bool assign_bt)3633 assign_bounds (vec<bounds_parm_data> &bndargs,
3634 struct assign_parm_data_all &all,
3635 bool assign_regs, bool assign_special,
3636 bool assign_bt)
3637 {
3638 unsigned i, pass;
3639 bounds_parm_data *pbdata;
3640
3641 if (!bndargs.exists ())
3642 return;
3643
3644 /* We make few passes to store input bounds. Firstly handle bounds
3645 passed in registers. After that we load bounds passed in special
3646 slots. Finally we load bounds from Bounds Table. */
3647 for (pass = 0; pass < 3; pass++)
3648 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3649 {
3650 /* Pass 0 => regs only. */
3651 if (pass == 0
3652 && (!assign_regs
3653 ||(!pbdata->parm_data.entry_parm
3654 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3655 continue;
3656 /* Pass 1 => slots only. */
3657 else if (pass == 1
3658 && (!assign_special
3659 || (!pbdata->parm_data.entry_parm
3660 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3661 continue;
3662 /* Pass 2 => BT only. */
3663 else if (pass == 2
3664 && (!assign_bt
3665 || pbdata->parm_data.entry_parm))
3666 continue;
3667
3668 if (!pbdata->parm_data.entry_parm
3669 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3670 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3671 pbdata->ptr_entry, pbdata->bound_no);
3672
3673 set_decl_incoming_rtl (pbdata->bounds_parm,
3674 pbdata->parm_data.entry_parm, false);
3675
3676 if (assign_parm_setup_block_p (&pbdata->parm_data))
3677 assign_parm_setup_block (&all, pbdata->bounds_parm,
3678 &pbdata->parm_data);
3679 else if (pbdata->parm_data.passed_pointer
3680 || use_register_for_decl (pbdata->bounds_parm))
3681 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3682 &pbdata->parm_data);
3683 else
3684 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3685 &pbdata->parm_data);
3686 }
3687 }
3688
3689 /* Assign RTL expressions to the function's parameters. This may involve
3690 copying them into registers and using those registers as the DECL_RTL. */
3691
3692 static void
assign_parms(tree fndecl)3693 assign_parms (tree fndecl)
3694 {
3695 struct assign_parm_data_all all;
3696 tree parm;
3697 vec<tree> fnargs;
3698 unsigned i, bound_no = 0;
3699 tree last_arg = NULL;
3700 rtx last_arg_entry = NULL;
3701 vec<bounds_parm_data> bndargs = vNULL;
3702 bounds_parm_data bdata;
3703
3704 crtl->args.internal_arg_pointer
3705 = targetm.calls.internal_arg_pointer ();
3706
3707 assign_parms_initialize_all (&all);
3708 fnargs = assign_parms_augmented_arg_list (&all);
3709
3710 FOR_EACH_VEC_ELT (fnargs, i, parm)
3711 {
3712 struct assign_parm_data_one data;
3713
3714 /* Extract the type of PARM; adjust it according to ABI. */
3715 assign_parm_find_data_types (&all, parm, &data);
3716
3717 /* Early out for errors and void parameters. */
3718 if (data.passed_mode == VOIDmode)
3719 {
3720 SET_DECL_RTL (parm, const0_rtx);
3721 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3722 continue;
3723 }
3724
3725 /* Estimate stack alignment from parameter alignment. */
3726 if (SUPPORTS_STACK_ALIGNMENT)
3727 {
3728 unsigned int align
3729 = targetm.calls.function_arg_boundary (data.promoted_mode,
3730 data.passed_type);
3731 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3732 align);
3733 if (TYPE_ALIGN (data.nominal_type) > align)
3734 align = MINIMUM_ALIGNMENT (data.nominal_type,
3735 TYPE_MODE (data.nominal_type),
3736 TYPE_ALIGN (data.nominal_type));
3737 if (crtl->stack_alignment_estimated < align)
3738 {
3739 gcc_assert (!crtl->stack_realign_processed);
3740 crtl->stack_alignment_estimated = align;
3741 }
3742 }
3743
3744 /* Find out where the parameter arrives in this function. */
3745 assign_parm_find_entry_rtl (&all, &data);
3746
3747 /* Find out where stack space for this parameter might be. */
3748 if (assign_parm_is_stack_parm (&all, &data))
3749 {
3750 assign_parm_find_stack_rtl (parm, &data);
3751 assign_parm_adjust_entry_rtl (&data);
3752 }
3753 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3754 {
3755 /* Remember where last non bounds arg was passed in case
3756 we have to load associated bounds for it from Bounds
3757 Table. */
3758 last_arg = parm;
3759 last_arg_entry = data.entry_parm;
3760 bound_no = 0;
3761 }
3762 /* Record permanently how this parm was passed. */
3763 if (data.passed_pointer)
3764 {
3765 rtx incoming_rtl
3766 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3767 data.entry_parm);
3768 set_decl_incoming_rtl (parm, incoming_rtl, true);
3769 }
3770 else
3771 set_decl_incoming_rtl (parm, data.entry_parm, false);
3772
3773 assign_parm_adjust_stack_rtl (&data);
3774
3775 /* Bounds should be loaded in the particular order to
3776 have registers allocated correctly. Collect info about
3777 input bounds and load them later. */
3778 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3779 {
3780 /* Expect bounds in instrumented functions only. */
3781 gcc_assert (chkp_function_instrumented_p (fndecl));
3782
3783 bdata.parm_data = data;
3784 bdata.bounds_parm = parm;
3785 bdata.ptr_parm = last_arg;
3786 bdata.ptr_entry = last_arg_entry;
3787 bdata.bound_no = bound_no;
3788 bndargs.safe_push (bdata);
3789 }
3790 else
3791 {
3792 if (assign_parm_setup_block_p (&data))
3793 assign_parm_setup_block (&all, parm, &data);
3794 else if (data.passed_pointer || use_register_for_decl (parm))
3795 assign_parm_setup_reg (&all, parm, &data);
3796 else
3797 assign_parm_setup_stack (&all, parm, &data);
3798 }
3799
3800 if (cfun->stdarg && !DECL_CHAIN (parm))
3801 {
3802 int pretend_bytes = 0;
3803
3804 assign_parms_setup_varargs (&all, &data, false);
3805
3806 if (chkp_function_instrumented_p (fndecl))
3807 {
3808 /* We expect this is the last parm. Otherwise it is wrong
3809 to assign bounds right now. */
3810 gcc_assert (i == (fnargs.length () - 1));
3811 assign_bounds (bndargs, all, true, false, false);
3812 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3813 data.promoted_mode,
3814 data.passed_type,
3815 &pretend_bytes,
3816 false);
3817 assign_bounds (bndargs, all, false, true, true);
3818 bndargs.release ();
3819 }
3820 }
3821
3822 /* Update info on where next arg arrives in registers. */
3823 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3824 data.passed_type, data.named_arg);
3825
3826 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3827 bound_no++;
3828 }
3829
3830 assign_bounds (bndargs, all, true, true, true);
3831 bndargs.release ();
3832
3833 if (targetm.calls.split_complex_arg)
3834 assign_parms_unsplit_complex (&all, fnargs);
3835
3836 fnargs.release ();
3837
3838 /* Output all parameter conversion instructions (possibly including calls)
3839 now that all parameters have been copied out of hard registers. */
3840 emit_insn (all.first_conversion_insn);
3841
3842 /* Estimate reload stack alignment from scalar return mode. */
3843 if (SUPPORTS_STACK_ALIGNMENT)
3844 {
3845 if (DECL_RESULT (fndecl))
3846 {
3847 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3848 machine_mode mode = TYPE_MODE (type);
3849
3850 if (mode != BLKmode
3851 && mode != VOIDmode
3852 && !AGGREGATE_TYPE_P (type))
3853 {
3854 unsigned int align = GET_MODE_ALIGNMENT (mode);
3855 if (crtl->stack_alignment_estimated < align)
3856 {
3857 gcc_assert (!crtl->stack_realign_processed);
3858 crtl->stack_alignment_estimated = align;
3859 }
3860 }
3861 }
3862 }
3863
3864 /* If we are receiving a struct value address as the first argument, set up
3865 the RTL for the function result. As this might require code to convert
3866 the transmitted address to Pmode, we do this here to ensure that possible
3867 preliminary conversions of the address have been emitted already. */
3868 if (all.function_result_decl)
3869 {
3870 tree result = DECL_RESULT (current_function_decl);
3871 rtx addr = DECL_RTL (all.function_result_decl);
3872 rtx x;
3873
3874 if (DECL_BY_REFERENCE (result))
3875 {
3876 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3877 x = addr;
3878 }
3879 else
3880 {
3881 SET_DECL_VALUE_EXPR (result,
3882 build1 (INDIRECT_REF, TREE_TYPE (result),
3883 all.function_result_decl));
3884 addr = convert_memory_address (Pmode, addr);
3885 x = gen_rtx_MEM (DECL_MODE (result), addr);
3886 set_mem_attributes (x, result, 1);
3887 }
3888
3889 DECL_HAS_VALUE_EXPR_P (result) = 1;
3890
3891 set_parm_rtl (result, x);
3892 }
3893
3894 /* We have aligned all the args, so add space for the pretend args. */
3895 crtl->args.pretend_args_size = all.pretend_args_size;
3896 all.stack_args_size.constant += all.extra_pretend_bytes;
3897 crtl->args.size = all.stack_args_size.constant;
3898
3899 /* Adjust function incoming argument size for alignment and
3900 minimum length. */
3901
3902 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3903 crtl->args.size = aligned_upper_bound (crtl->args.size,
3904 PARM_BOUNDARY / BITS_PER_UNIT);
3905
3906 if (ARGS_GROW_DOWNWARD)
3907 {
3908 crtl->args.arg_offset_rtx
3909 = (all.stack_args_size.var == 0
3910 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3911 : expand_expr (size_diffop (all.stack_args_size.var,
3912 size_int (-all.stack_args_size.constant)),
3913 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3914 }
3915 else
3916 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3917
3918 /* See how many bytes, if any, of its args a function should try to pop
3919 on return. */
3920
3921 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3922 TREE_TYPE (fndecl),
3923 crtl->args.size);
3924
3925 /* For stdarg.h function, save info about
3926 regs and stack space used by the named args. */
3927
3928 crtl->args.info = all.args_so_far_v;
3929
3930 /* Set the rtx used for the function return value. Put this in its
3931 own variable so any optimizers that need this information don't have
3932 to include tree.h. Do this here so it gets done when an inlined
3933 function gets output. */
3934
3935 crtl->return_rtx
3936 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3937 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3938
3939 /* If scalar return value was computed in a pseudo-reg, or was a named
3940 return value that got dumped to the stack, copy that to the hard
3941 return register. */
3942 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3943 {
3944 tree decl_result = DECL_RESULT (fndecl);
3945 rtx decl_rtl = DECL_RTL (decl_result);
3946
3947 if (REG_P (decl_rtl)
3948 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3949 : DECL_REGISTER (decl_result))
3950 {
3951 rtx real_decl_rtl;
3952
3953 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3954 fndecl, true);
3955 if (chkp_function_instrumented_p (fndecl))
3956 crtl->return_bnd
3957 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3958 fndecl, true);
3959 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3960 /* The delay slot scheduler assumes that crtl->return_rtx
3961 holds the hard register containing the return value, not a
3962 temporary pseudo. */
3963 crtl->return_rtx = real_decl_rtl;
3964 }
3965 }
3966 }
3967
3968 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3969 For all seen types, gimplify their sizes. */
3970
3971 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3972 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3973 {
3974 tree t = *tp;
3975
3976 *walk_subtrees = 0;
3977 if (TYPE_P (t))
3978 {
3979 if (POINTER_TYPE_P (t))
3980 *walk_subtrees = 1;
3981 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3982 && !TYPE_SIZES_GIMPLIFIED (t))
3983 {
3984 gimplify_type_sizes (t, (gimple_seq *) data);
3985 *walk_subtrees = 1;
3986 }
3987 }
3988
3989 return NULL;
3990 }
3991
3992 /* Gimplify the parameter list for current_function_decl. This involves
3993 evaluating SAVE_EXPRs of variable sized parameters and generating code
3994 to implement callee-copies reference parameters. Returns a sequence of
3995 statements to add to the beginning of the function. */
3996
3997 gimple_seq
gimplify_parameters(gimple_seq * cleanup)3998 gimplify_parameters (gimple_seq *cleanup)
3999 {
4000 struct assign_parm_data_all all;
4001 tree parm;
4002 gimple_seq stmts = NULL;
4003 vec<tree> fnargs;
4004 unsigned i;
4005
4006 assign_parms_initialize_all (&all);
4007 fnargs = assign_parms_augmented_arg_list (&all);
4008
4009 FOR_EACH_VEC_ELT (fnargs, i, parm)
4010 {
4011 struct assign_parm_data_one data;
4012
4013 /* Extract the type of PARM; adjust it according to ABI. */
4014 assign_parm_find_data_types (&all, parm, &data);
4015
4016 /* Early out for errors and void parameters. */
4017 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4018 continue;
4019
4020 /* Update info on where next arg arrives in registers. */
4021 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4022 data.passed_type, data.named_arg);
4023
4024 /* ??? Once upon a time variable_size stuffed parameter list
4025 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4026 turned out to be less than manageable in the gimple world.
4027 Now we have to hunt them down ourselves. */
4028 walk_tree_without_duplicates (&data.passed_type,
4029 gimplify_parm_type, &stmts);
4030
4031 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4032 {
4033 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4034 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4035 }
4036
4037 if (data.passed_pointer)
4038 {
4039 tree type = TREE_TYPE (data.passed_type);
4040 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4041 type, data.named_arg))
4042 {
4043 tree local, t;
4044
4045 /* For constant-sized objects, this is trivial; for
4046 variable-sized objects, we have to play games. */
4047 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4048 && !(flag_stack_check == GENERIC_STACK_CHECK
4049 && compare_tree_int (DECL_SIZE_UNIT (parm),
4050 STACK_CHECK_MAX_VAR_SIZE) > 0))
4051 {
4052 local = create_tmp_var (type, get_name (parm));
4053 DECL_IGNORED_P (local) = 0;
4054 /* If PARM was addressable, move that flag over
4055 to the local copy, as its address will be taken,
4056 not the PARMs. Keep the parms address taken
4057 as we'll query that flag during gimplification. */
4058 if (TREE_ADDRESSABLE (parm))
4059 TREE_ADDRESSABLE (local) = 1;
4060 else if (TREE_CODE (type) == COMPLEX_TYPE
4061 || TREE_CODE (type) == VECTOR_TYPE)
4062 DECL_GIMPLE_REG_P (local) = 1;
4063
4064 if (!is_gimple_reg (local)
4065 && flag_stack_reuse != SR_NONE)
4066 {
4067 tree clobber = build_constructor (type, NULL);
4068 gimple *clobber_stmt;
4069 TREE_THIS_VOLATILE (clobber) = 1;
4070 clobber_stmt = gimple_build_assign (local, clobber);
4071 gimple_seq_add_stmt (cleanup, clobber_stmt);
4072 }
4073 }
4074 else
4075 {
4076 tree ptr_type, addr;
4077
4078 ptr_type = build_pointer_type (type);
4079 addr = create_tmp_reg (ptr_type, get_name (parm));
4080 DECL_IGNORED_P (addr) = 0;
4081 local = build_fold_indirect_ref (addr);
4082
4083 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
4084 DECL_ALIGN (parm),
4085 max_int_size_in_bytes (type));
4086 /* The call has been built for a variable-sized object. */
4087 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4088 t = fold_convert (ptr_type, t);
4089 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4090 gimplify_and_add (t, &stmts);
4091 }
4092
4093 gimplify_assign (local, parm, &stmts);
4094
4095 SET_DECL_VALUE_EXPR (parm, local);
4096 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4097 }
4098 }
4099 }
4100
4101 fnargs.release ();
4102
4103 return stmts;
4104 }
4105
4106 /* Compute the size and offset from the start of the stacked arguments for a
4107 parm passed in mode PASSED_MODE and with type TYPE.
4108
4109 INITIAL_OFFSET_PTR points to the current offset into the stacked
4110 arguments.
4111
4112 The starting offset and size for this parm are returned in
4113 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4114 nonzero, the offset is that of stack slot, which is returned in
4115 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4116 padding required from the initial offset ptr to the stack slot.
4117
4118 IN_REGS is nonzero if the argument will be passed in registers. It will
4119 never be set if REG_PARM_STACK_SPACE is not defined.
4120
4121 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4122 for arguments which are passed in registers.
4123
4124 FNDECL is the function in which the argument was defined.
4125
4126 There are two types of rounding that are done. The first, controlled by
4127 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4128 argument list to be aligned to the specific boundary (in bits). This
4129 rounding affects the initial and starting offsets, but not the argument
4130 size.
4131
4132 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4133 optionally rounds the size of the parm to PARM_BOUNDARY. The
4134 initial offset is not affected by this rounding, while the size always
4135 is and the starting offset may be. */
4136
4137 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4138 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4139 callers pass in the total size of args so far as
4140 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4141
4142 void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)4143 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4144 int reg_parm_stack_space, int partial,
4145 tree fndecl ATTRIBUTE_UNUSED,
4146 struct args_size *initial_offset_ptr,
4147 struct locate_and_pad_arg_data *locate)
4148 {
4149 tree sizetree;
4150 pad_direction where_pad;
4151 unsigned int boundary, round_boundary;
4152 int part_size_in_regs;
4153
4154 /* If we have found a stack parm before we reach the end of the
4155 area reserved for registers, skip that area. */
4156 if (! in_regs)
4157 {
4158 if (reg_parm_stack_space > 0)
4159 {
4160 if (initial_offset_ptr->var
4161 || !ordered_p (initial_offset_ptr->constant,
4162 reg_parm_stack_space))
4163 {
4164 initial_offset_ptr->var
4165 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4166 ssize_int (reg_parm_stack_space));
4167 initial_offset_ptr->constant = 0;
4168 }
4169 else
4170 initial_offset_ptr->constant
4171 = ordered_max (initial_offset_ptr->constant,
4172 reg_parm_stack_space);
4173 }
4174 }
4175
4176 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4177
4178 sizetree = (type
4179 ? arg_size_in_bytes (type)
4180 : size_int (GET_MODE_SIZE (passed_mode)));
4181 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4182 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4183 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4184 type);
4185 locate->where_pad = where_pad;
4186
4187 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4188 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4189 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4190
4191 locate->boundary = boundary;
4192
4193 if (SUPPORTS_STACK_ALIGNMENT)
4194 {
4195 /* stack_alignment_estimated can't change after stack has been
4196 realigned. */
4197 if (crtl->stack_alignment_estimated < boundary)
4198 {
4199 if (!crtl->stack_realign_processed)
4200 crtl->stack_alignment_estimated = boundary;
4201 else
4202 {
4203 /* If stack is realigned and stack alignment value
4204 hasn't been finalized, it is OK not to increase
4205 stack_alignment_estimated. The bigger alignment
4206 requirement is recorded in stack_alignment_needed
4207 below. */
4208 gcc_assert (!crtl->stack_realign_finalized
4209 && crtl->stack_realign_needed);
4210 }
4211 }
4212 }
4213
4214 /* Remember if the outgoing parameter requires extra alignment on the
4215 calling function side. */
4216 if (crtl->stack_alignment_needed < boundary)
4217 crtl->stack_alignment_needed = boundary;
4218 if (crtl->preferred_stack_boundary < boundary)
4219 crtl->preferred_stack_boundary = boundary;
4220
4221 if (ARGS_GROW_DOWNWARD)
4222 {
4223 locate->slot_offset.constant = -initial_offset_ptr->constant;
4224 if (initial_offset_ptr->var)
4225 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4226 initial_offset_ptr->var);
4227
4228 {
4229 tree s2 = sizetree;
4230 if (where_pad != PAD_NONE
4231 && (!tree_fits_uhwi_p (sizetree)
4232 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4233 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4234 SUB_PARM_SIZE (locate->slot_offset, s2);
4235 }
4236
4237 locate->slot_offset.constant += part_size_in_regs;
4238
4239 if (!in_regs || reg_parm_stack_space > 0)
4240 pad_to_arg_alignment (&locate->slot_offset, boundary,
4241 &locate->alignment_pad);
4242
4243 locate->size.constant = (-initial_offset_ptr->constant
4244 - locate->slot_offset.constant);
4245 if (initial_offset_ptr->var)
4246 locate->size.var = size_binop (MINUS_EXPR,
4247 size_binop (MINUS_EXPR,
4248 ssize_int (0),
4249 initial_offset_ptr->var),
4250 locate->slot_offset.var);
4251
4252 /* Pad_below needs the pre-rounded size to know how much to pad
4253 below. */
4254 locate->offset = locate->slot_offset;
4255 if (where_pad == PAD_DOWNWARD)
4256 pad_below (&locate->offset, passed_mode, sizetree);
4257
4258 }
4259 else
4260 {
4261 if (!in_regs || reg_parm_stack_space > 0)
4262 pad_to_arg_alignment (initial_offset_ptr, boundary,
4263 &locate->alignment_pad);
4264 locate->slot_offset = *initial_offset_ptr;
4265
4266 #ifdef PUSH_ROUNDING
4267 if (passed_mode != BLKmode)
4268 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4269 #endif
4270
4271 /* Pad_below needs the pre-rounded size to know how much to pad below
4272 so this must be done before rounding up. */
4273 locate->offset = locate->slot_offset;
4274 if (where_pad == PAD_DOWNWARD)
4275 pad_below (&locate->offset, passed_mode, sizetree);
4276
4277 if (where_pad != PAD_NONE
4278 && (!tree_fits_uhwi_p (sizetree)
4279 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4280 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4281
4282 ADD_PARM_SIZE (locate->size, sizetree);
4283
4284 locate->size.constant -= part_size_in_regs;
4285 }
4286
4287 locate->offset.constant
4288 += targetm.calls.function_arg_offset (passed_mode, type);
4289 }
4290
4291 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4292 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4293
4294 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)4295 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4296 struct args_size *alignment_pad)
4297 {
4298 tree save_var = NULL_TREE;
4299 poly_int64 save_constant = 0;
4300 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4301 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4302
4303 #ifdef SPARC_STACK_BOUNDARY_HACK
4304 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4305 the real alignment of %sp. However, when it does this, the
4306 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4307 if (SPARC_STACK_BOUNDARY_HACK)
4308 sp_offset = 0;
4309 #endif
4310
4311 if (boundary > PARM_BOUNDARY)
4312 {
4313 save_var = offset_ptr->var;
4314 save_constant = offset_ptr->constant;
4315 }
4316
4317 alignment_pad->var = NULL_TREE;
4318 alignment_pad->constant = 0;
4319
4320 if (boundary > BITS_PER_UNIT)
4321 {
4322 int misalign;
4323 if (offset_ptr->var
4324 || !known_misalignment (offset_ptr->constant + sp_offset,
4325 boundary_in_bytes, &misalign))
4326 {
4327 tree sp_offset_tree = ssize_int (sp_offset);
4328 tree offset = size_binop (PLUS_EXPR,
4329 ARGS_SIZE_TREE (*offset_ptr),
4330 sp_offset_tree);
4331 tree rounded;
4332 if (ARGS_GROW_DOWNWARD)
4333 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4334 else
4335 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4336
4337 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4338 /* ARGS_SIZE_TREE includes constant term. */
4339 offset_ptr->constant = 0;
4340 if (boundary > PARM_BOUNDARY)
4341 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4342 save_var);
4343 }
4344 else
4345 {
4346 if (ARGS_GROW_DOWNWARD)
4347 offset_ptr->constant -= misalign;
4348 else
4349 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4350
4351 if (boundary > PARM_BOUNDARY)
4352 alignment_pad->constant = offset_ptr->constant - save_constant;
4353 }
4354 }
4355 }
4356
4357 static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)4358 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4359 {
4360 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4361 int misalign;
4362 if (passed_mode != BLKmode
4363 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4364 offset_ptr->constant += -misalign & (align - 1);
4365 else
4366 {
4367 if (TREE_CODE (sizetree) != INTEGER_CST
4368 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4369 {
4370 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4371 tree s2 = round_up (sizetree, align);
4372 /* Add it in. */
4373 ADD_PARM_SIZE (*offset_ptr, s2);
4374 SUB_PARM_SIZE (*offset_ptr, sizetree);
4375 }
4376 }
4377 }
4378
4379
4380 /* True if register REGNO was alive at a place where `setjmp' was
4381 called and was set more than once or is an argument. Such regs may
4382 be clobbered by `longjmp'. */
4383
4384 static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)4385 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4386 {
4387 /* There appear to be cases where some local vars never reach the
4388 backend but have bogus regnos. */
4389 if (regno >= max_reg_num ())
4390 return false;
4391
4392 return ((REG_N_SETS (regno) > 1
4393 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4394 regno))
4395 && REGNO_REG_SET_P (setjmp_crosses, regno));
4396 }
4397
4398 /* Walk the tree of blocks describing the binding levels within a
4399 function and warn about variables the might be killed by setjmp or
4400 vfork. This is done after calling flow_analysis before register
4401 allocation since that will clobber the pseudo-regs to hard
4402 regs. */
4403
4404 static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)4405 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4406 {
4407 tree decl, sub;
4408
4409 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4410 {
4411 if (VAR_P (decl)
4412 && DECL_RTL_SET_P (decl)
4413 && REG_P (DECL_RTL (decl))
4414 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4415 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4416 " %<longjmp%> or %<vfork%>", decl);
4417 }
4418
4419 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4420 setjmp_vars_warning (setjmp_crosses, sub);
4421 }
4422
4423 /* Do the appropriate part of setjmp_vars_warning
4424 but for arguments instead of local variables. */
4425
4426 static void
setjmp_args_warning(bitmap setjmp_crosses)4427 setjmp_args_warning (bitmap setjmp_crosses)
4428 {
4429 tree decl;
4430 for (decl = DECL_ARGUMENTS (current_function_decl);
4431 decl; decl = DECL_CHAIN (decl))
4432 if (DECL_RTL (decl) != 0
4433 && REG_P (DECL_RTL (decl))
4434 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4435 warning (OPT_Wclobbered,
4436 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4437 decl);
4438 }
4439
4440 /* Generate warning messages for variables live across setjmp. */
4441
4442 void
generate_setjmp_warnings(void)4443 generate_setjmp_warnings (void)
4444 {
4445 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4446
4447 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4448 || bitmap_empty_p (setjmp_crosses))
4449 return;
4450
4451 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4452 setjmp_args_warning (setjmp_crosses);
4453 }
4454
4455
4456 /* Reverse the order of elements in the fragment chain T of blocks,
4457 and return the new head of the chain (old last element).
4458 In addition to that clear BLOCK_SAME_RANGE flags when needed
4459 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4460 its super fragment origin. */
4461
4462 static tree
block_fragments_nreverse(tree t)4463 block_fragments_nreverse (tree t)
4464 {
4465 tree prev = 0, block, next, prev_super = 0;
4466 tree super = BLOCK_SUPERCONTEXT (t);
4467 if (BLOCK_FRAGMENT_ORIGIN (super))
4468 super = BLOCK_FRAGMENT_ORIGIN (super);
4469 for (block = t; block; block = next)
4470 {
4471 next = BLOCK_FRAGMENT_CHAIN (block);
4472 BLOCK_FRAGMENT_CHAIN (block) = prev;
4473 if ((prev && !BLOCK_SAME_RANGE (prev))
4474 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4475 != prev_super))
4476 BLOCK_SAME_RANGE (block) = 0;
4477 prev_super = BLOCK_SUPERCONTEXT (block);
4478 BLOCK_SUPERCONTEXT (block) = super;
4479 prev = block;
4480 }
4481 t = BLOCK_FRAGMENT_ORIGIN (t);
4482 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4483 != prev_super)
4484 BLOCK_SAME_RANGE (t) = 0;
4485 BLOCK_SUPERCONTEXT (t) = super;
4486 return prev;
4487 }
4488
4489 /* Reverse the order of elements in the chain T of blocks,
4490 and return the new head of the chain (old last element).
4491 Also do the same on subblocks and reverse the order of elements
4492 in BLOCK_FRAGMENT_CHAIN as well. */
4493
4494 static tree
blocks_nreverse_all(tree t)4495 blocks_nreverse_all (tree t)
4496 {
4497 tree prev = 0, block, next;
4498 for (block = t; block; block = next)
4499 {
4500 next = BLOCK_CHAIN (block);
4501 BLOCK_CHAIN (block) = prev;
4502 if (BLOCK_FRAGMENT_CHAIN (block)
4503 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4504 {
4505 BLOCK_FRAGMENT_CHAIN (block)
4506 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4507 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4508 BLOCK_SAME_RANGE (block) = 0;
4509 }
4510 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4511 prev = block;
4512 }
4513 return prev;
4514 }
4515
4516
4517 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4518 and create duplicate blocks. */
4519 /* ??? Need an option to either create block fragments or to create
4520 abstract origin duplicates of a source block. It really depends
4521 on what optimization has been performed. */
4522
4523 void
reorder_blocks(void)4524 reorder_blocks (void)
4525 {
4526 tree block = DECL_INITIAL (current_function_decl);
4527
4528 if (block == NULL_TREE)
4529 return;
4530
4531 auto_vec<tree, 10> block_stack;
4532
4533 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4534 clear_block_marks (block);
4535
4536 /* Prune the old trees away, so that they don't get in the way. */
4537 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4538 BLOCK_CHAIN (block) = NULL_TREE;
4539
4540 /* Recreate the block tree from the note nesting. */
4541 reorder_blocks_1 (get_insns (), block, &block_stack);
4542 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4543 }
4544
4545 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4546
4547 void
clear_block_marks(tree block)4548 clear_block_marks (tree block)
4549 {
4550 while (block)
4551 {
4552 TREE_ASM_WRITTEN (block) = 0;
4553 clear_block_marks (BLOCK_SUBBLOCKS (block));
4554 block = BLOCK_CHAIN (block);
4555 }
4556 }
4557
4558 static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)4559 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4560 vec<tree> *p_block_stack)
4561 {
4562 rtx_insn *insn;
4563 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4564
4565 for (insn = insns; insn; insn = NEXT_INSN (insn))
4566 {
4567 if (NOTE_P (insn))
4568 {
4569 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4570 {
4571 tree block = NOTE_BLOCK (insn);
4572 tree origin;
4573
4574 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4575 origin = block;
4576
4577 if (prev_end)
4578 BLOCK_SAME_RANGE (prev_end) = 0;
4579 prev_end = NULL_TREE;
4580
4581 /* If we have seen this block before, that means it now
4582 spans multiple address regions. Create a new fragment. */
4583 if (TREE_ASM_WRITTEN (block))
4584 {
4585 tree new_block = copy_node (block);
4586
4587 BLOCK_SAME_RANGE (new_block) = 0;
4588 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4589 BLOCK_FRAGMENT_CHAIN (new_block)
4590 = BLOCK_FRAGMENT_CHAIN (origin);
4591 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4592
4593 NOTE_BLOCK (insn) = new_block;
4594 block = new_block;
4595 }
4596
4597 if (prev_beg == current_block && prev_beg)
4598 BLOCK_SAME_RANGE (block) = 1;
4599
4600 prev_beg = origin;
4601
4602 BLOCK_SUBBLOCKS (block) = 0;
4603 TREE_ASM_WRITTEN (block) = 1;
4604 /* When there's only one block for the entire function,
4605 current_block == block and we mustn't do this, it
4606 will cause infinite recursion. */
4607 if (block != current_block)
4608 {
4609 tree super;
4610 if (block != origin)
4611 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4612 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4613 (origin))
4614 == current_block);
4615 if (p_block_stack->is_empty ())
4616 super = current_block;
4617 else
4618 {
4619 super = p_block_stack->last ();
4620 gcc_assert (super == current_block
4621 || BLOCK_FRAGMENT_ORIGIN (super)
4622 == current_block);
4623 }
4624 BLOCK_SUPERCONTEXT (block) = super;
4625 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4626 BLOCK_SUBBLOCKS (current_block) = block;
4627 current_block = origin;
4628 }
4629 p_block_stack->safe_push (block);
4630 }
4631 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4632 {
4633 NOTE_BLOCK (insn) = p_block_stack->pop ();
4634 current_block = BLOCK_SUPERCONTEXT (current_block);
4635 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4636 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4637 prev_beg = NULL_TREE;
4638 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4639 ? NOTE_BLOCK (insn) : NULL_TREE;
4640 }
4641 }
4642 else
4643 {
4644 prev_beg = NULL_TREE;
4645 if (prev_end)
4646 BLOCK_SAME_RANGE (prev_end) = 0;
4647 prev_end = NULL_TREE;
4648 }
4649 }
4650 }
4651
4652 /* Reverse the order of elements in the chain T of blocks,
4653 and return the new head of the chain (old last element). */
4654
4655 tree
blocks_nreverse(tree t)4656 blocks_nreverse (tree t)
4657 {
4658 tree prev = 0, block, next;
4659 for (block = t; block; block = next)
4660 {
4661 next = BLOCK_CHAIN (block);
4662 BLOCK_CHAIN (block) = prev;
4663 prev = block;
4664 }
4665 return prev;
4666 }
4667
4668 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4669 by modifying the last node in chain 1 to point to chain 2. */
4670
4671 tree
block_chainon(tree op1,tree op2)4672 block_chainon (tree op1, tree op2)
4673 {
4674 tree t1;
4675
4676 if (!op1)
4677 return op2;
4678 if (!op2)
4679 return op1;
4680
4681 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4682 continue;
4683 BLOCK_CHAIN (t1) = op2;
4684
4685 #ifdef ENABLE_TREE_CHECKING
4686 {
4687 tree t2;
4688 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4689 gcc_assert (t2 != t1);
4690 }
4691 #endif
4692
4693 return op1;
4694 }
4695
4696 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4697 non-NULL, list them all into VECTOR, in a depth-first preorder
4698 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4699 blocks. */
4700
4701 static int
all_blocks(tree block,tree * vector)4702 all_blocks (tree block, tree *vector)
4703 {
4704 int n_blocks = 0;
4705
4706 while (block)
4707 {
4708 TREE_ASM_WRITTEN (block) = 0;
4709
4710 /* Record this block. */
4711 if (vector)
4712 vector[n_blocks] = block;
4713
4714 ++n_blocks;
4715
4716 /* Record the subblocks, and their subblocks... */
4717 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4718 vector ? vector + n_blocks : 0);
4719 block = BLOCK_CHAIN (block);
4720 }
4721
4722 return n_blocks;
4723 }
4724
4725 /* Return a vector containing all the blocks rooted at BLOCK. The
4726 number of elements in the vector is stored in N_BLOCKS_P. The
4727 vector is dynamically allocated; it is the caller's responsibility
4728 to call `free' on the pointer returned. */
4729
4730 static tree *
get_block_vector(tree block,int * n_blocks_p)4731 get_block_vector (tree block, int *n_blocks_p)
4732 {
4733 tree *block_vector;
4734
4735 *n_blocks_p = all_blocks (block, NULL);
4736 block_vector = XNEWVEC (tree, *n_blocks_p);
4737 all_blocks (block, block_vector);
4738
4739 return block_vector;
4740 }
4741
4742 static GTY(()) int next_block_index = 2;
4743
4744 /* Set BLOCK_NUMBER for all the blocks in FN. */
4745
4746 void
number_blocks(tree fn)4747 number_blocks (tree fn)
4748 {
4749 int i;
4750 int n_blocks;
4751 tree *block_vector;
4752
4753 /* For XCOFF debugging output, we start numbering the blocks
4754 from 1 within each function, rather than keeping a running
4755 count. */
4756 #if defined (XCOFF_DEBUGGING_INFO)
4757 if (write_symbols == XCOFF_DEBUG)
4758 next_block_index = 1;
4759 #endif
4760
4761 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4762
4763 /* The top-level BLOCK isn't numbered at all. */
4764 for (i = 1; i < n_blocks; ++i)
4765 /* We number the blocks from two. */
4766 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4767
4768 free (block_vector);
4769
4770 return;
4771 }
4772
4773 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4774
4775 DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)4776 debug_find_var_in_block_tree (tree var, tree block)
4777 {
4778 tree t;
4779
4780 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4781 if (t == var)
4782 return block;
4783
4784 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4785 {
4786 tree ret = debug_find_var_in_block_tree (var, t);
4787 if (ret)
4788 return ret;
4789 }
4790
4791 return NULL_TREE;
4792 }
4793
4794 /* Keep track of whether we're in a dummy function context. If we are,
4795 we don't want to invoke the set_current_function hook, because we'll
4796 get into trouble if the hook calls target_reinit () recursively or
4797 when the initial initialization is not yet complete. */
4798
4799 static bool in_dummy_function;
4800
4801 /* Invoke the target hook when setting cfun. Update the optimization options
4802 if the function uses different options than the default. */
4803
4804 static void
invoke_set_current_function_hook(tree fndecl)4805 invoke_set_current_function_hook (tree fndecl)
4806 {
4807 if (!in_dummy_function)
4808 {
4809 tree opts = ((fndecl)
4810 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4811 : optimization_default_node);
4812
4813 if (!opts)
4814 opts = optimization_default_node;
4815
4816 /* Change optimization options if needed. */
4817 if (optimization_current_node != opts)
4818 {
4819 optimization_current_node = opts;
4820 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4821 }
4822
4823 targetm.set_current_function (fndecl);
4824 this_fn_optabs = this_target_optabs;
4825
4826 if (opts != optimization_default_node)
4827 {
4828 init_tree_optimization_optabs (opts);
4829 if (TREE_OPTIMIZATION_OPTABS (opts))
4830 this_fn_optabs = (struct target_optabs *)
4831 TREE_OPTIMIZATION_OPTABS (opts);
4832 }
4833 }
4834 }
4835
4836 /* cfun should never be set directly; use this function. */
4837
4838 void
set_cfun(struct function * new_cfun,bool force)4839 set_cfun (struct function *new_cfun, bool force)
4840 {
4841 if (cfun != new_cfun || force)
4842 {
4843 cfun = new_cfun;
4844 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4845 redirect_edge_var_map_empty ();
4846 }
4847 }
4848
4849 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4850
4851 static vec<function *> cfun_stack;
4852
4853 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4854 current_function_decl accordingly. */
4855
4856 void
push_cfun(struct function * new_cfun)4857 push_cfun (struct function *new_cfun)
4858 {
4859 gcc_assert ((!cfun && !current_function_decl)
4860 || (cfun && current_function_decl == cfun->decl));
4861 cfun_stack.safe_push (cfun);
4862 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4863 set_cfun (new_cfun);
4864 }
4865
4866 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4867
4868 void
pop_cfun(void)4869 pop_cfun (void)
4870 {
4871 struct function *new_cfun = cfun_stack.pop ();
4872 /* When in_dummy_function, we do have a cfun but current_function_decl is
4873 NULL. We also allow pushing NULL cfun and subsequently changing
4874 current_function_decl to something else and have both restored by
4875 pop_cfun. */
4876 gcc_checking_assert (in_dummy_function
4877 || !cfun
4878 || current_function_decl == cfun->decl);
4879 set_cfun (new_cfun);
4880 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4881 }
4882
4883 /* Return value of funcdef and increase it. */
4884 int
get_next_funcdef_no(void)4885 get_next_funcdef_no (void)
4886 {
4887 return funcdef_no++;
4888 }
4889
4890 /* Return value of funcdef. */
4891 int
get_last_funcdef_no(void)4892 get_last_funcdef_no (void)
4893 {
4894 return funcdef_no;
4895 }
4896
4897 /* Allocate a function structure for FNDECL and set its contents
4898 to the defaults. Set cfun to the newly-allocated object.
4899 Some of the helper functions invoked during initialization assume
4900 that cfun has already been set. Therefore, assign the new object
4901 directly into cfun and invoke the back end hook explicitly at the
4902 very end, rather than initializing a temporary and calling set_cfun
4903 on it.
4904
4905 ABSTRACT_P is true if this is a function that will never be seen by
4906 the middle-end. Such functions are front-end concepts (like C++
4907 function templates) that do not correspond directly to functions
4908 placed in object files. */
4909
4910 void
allocate_struct_function(tree fndecl,bool abstract_p)4911 allocate_struct_function (tree fndecl, bool abstract_p)
4912 {
4913 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4914
4915 cfun = ggc_cleared_alloc<function> ();
4916
4917 init_eh_for_function ();
4918
4919 if (init_machine_status)
4920 cfun->machine = (*init_machine_status) ();
4921
4922 #ifdef OVERRIDE_ABI_FORMAT
4923 OVERRIDE_ABI_FORMAT (fndecl);
4924 #endif
4925
4926 if (fndecl != NULL_TREE)
4927 {
4928 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4929 cfun->decl = fndecl;
4930 current_function_funcdef_no = get_next_funcdef_no ();
4931 }
4932
4933 invoke_set_current_function_hook (fndecl);
4934
4935 if (fndecl != NULL_TREE)
4936 {
4937 tree result = DECL_RESULT (fndecl);
4938
4939 if (!abstract_p)
4940 {
4941 /* Now that we have activated any function-specific attributes
4942 that might affect layout, particularly vector modes, relayout
4943 each of the parameters and the result. */
4944 relayout_decl (result);
4945 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4946 parm = DECL_CHAIN (parm))
4947 relayout_decl (parm);
4948
4949 /* Similarly relayout the function decl. */
4950 targetm.target_option.relayout_function (fndecl);
4951 }
4952
4953 if (!abstract_p && aggregate_value_p (result, fndecl))
4954 {
4955 #ifdef PCC_STATIC_STRUCT_RETURN
4956 cfun->returns_pcc_struct = 1;
4957 #endif
4958 cfun->returns_struct = 1;
4959 }
4960
4961 cfun->stdarg = stdarg_p (fntype);
4962
4963 /* Assume all registers in stdarg functions need to be saved. */
4964 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4965 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4966
4967 /* ??? This could be set on a per-function basis by the front-end
4968 but is this worth the hassle? */
4969 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4970 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4971
4972 if (!profile_flag && !flag_instrument_function_entry_exit)
4973 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4974 }
4975
4976 /* Don't enable begin stmt markers if var-tracking at assignments is
4977 disabled. The markers make little sense without the variable
4978 binding annotations among them. */
4979 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4980 && MAY_HAVE_DEBUG_MARKER_STMTS;
4981 }
4982
4983 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4984 instead of just setting it. */
4985
4986 void
push_struct_function(tree fndecl)4987 push_struct_function (tree fndecl)
4988 {
4989 /* When in_dummy_function we might be in the middle of a pop_cfun and
4990 current_function_decl and cfun may not match. */
4991 gcc_assert (in_dummy_function
4992 || (!cfun && !current_function_decl)
4993 || (cfun && current_function_decl == cfun->decl));
4994 cfun_stack.safe_push (cfun);
4995 current_function_decl = fndecl;
4996 allocate_struct_function (fndecl, false);
4997 }
4998
4999 /* Reset crtl and other non-struct-function variables to defaults as
5000 appropriate for emitting rtl at the start of a function. */
5001
5002 static void
prepare_function_start(void)5003 prepare_function_start (void)
5004 {
5005 gcc_assert (!get_last_insn ());
5006 init_temp_slots ();
5007 init_emit ();
5008 init_varasm_status ();
5009 init_expr ();
5010 default_rtl_profile ();
5011
5012 if (flag_stack_usage_info)
5013 {
5014 cfun->su = ggc_cleared_alloc<stack_usage> ();
5015 cfun->su->static_stack_size = -1;
5016 }
5017
5018 cse_not_expected = ! optimize;
5019
5020 /* Caller save not needed yet. */
5021 caller_save_needed = 0;
5022
5023 /* We haven't done register allocation yet. */
5024 reg_renumber = 0;
5025
5026 /* Indicate that we have not instantiated virtual registers yet. */
5027 virtuals_instantiated = 0;
5028
5029 /* Indicate that we want CONCATs now. */
5030 generating_concat_p = 1;
5031
5032 /* Indicate we have no need of a frame pointer yet. */
5033 frame_pointer_needed = 0;
5034 }
5035
5036 void
push_dummy_function(bool with_decl)5037 push_dummy_function (bool with_decl)
5038 {
5039 tree fn_decl, fn_type, fn_result_decl;
5040
5041 gcc_assert (!in_dummy_function);
5042 in_dummy_function = true;
5043
5044 if (with_decl)
5045 {
5046 fn_type = build_function_type_list (void_type_node, NULL_TREE);
5047 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5048 fn_type);
5049 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5050 NULL_TREE, void_type_node);
5051 DECL_RESULT (fn_decl) = fn_result_decl;
5052 }
5053 else
5054 fn_decl = NULL_TREE;
5055
5056 push_struct_function (fn_decl);
5057 }
5058
5059 /* Initialize the rtl expansion mechanism so that we can do simple things
5060 like generate sequences. This is used to provide a context during global
5061 initialization of some passes. You must call expand_dummy_function_end
5062 to exit this context. */
5063
5064 void
init_dummy_function_start(void)5065 init_dummy_function_start (void)
5066 {
5067 push_dummy_function (false);
5068 prepare_function_start ();
5069 }
5070
5071 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5072 and initialize static variables for generating RTL for the statements
5073 of the function. */
5074
5075 void
init_function_start(tree subr)5076 init_function_start (tree subr)
5077 {
5078 /* Initialize backend, if needed. */
5079 initialize_rtl ();
5080
5081 prepare_function_start ();
5082 decide_function_section (subr);
5083
5084 /* Warn if this value is an aggregate type,
5085 regardless of which calling convention we are using for it. */
5086 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5087 warning (OPT_Waggregate_return, "function returns an aggregate");
5088 }
5089
5090 /* Expand code to verify the stack_protect_guard. This is invoked at
5091 the end of a function to be protected. */
5092
5093 void
stack_protect_epilogue(void)5094 stack_protect_epilogue (void)
5095 {
5096 tree guard_decl = targetm.stack_protect_guard ();
5097 rtx_code_label *label = gen_label_rtx ();
5098 rtx x, y;
5099 rtx_insn *seq;
5100
5101 x = expand_normal (crtl->stack_protect_guard);
5102 if (guard_decl)
5103 y = expand_normal (guard_decl);
5104 else
5105 y = const0_rtx;
5106
5107 /* Allow the target to compare Y with X without leaking either into
5108 a register. */
5109 if (targetm.have_stack_protect_test ()
5110 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5111 emit_insn (seq);
5112 else
5113 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5114
5115 /* The noreturn predictor has been moved to the tree level. The rtl-level
5116 predictors estimate this branch about 20%, which isn't enough to get
5117 things moved out of line. Since this is the only extant case of adding
5118 a noreturn function at the rtl level, it doesn't seem worth doing ought
5119 except adding the prediction by hand. */
5120 rtx_insn *tmp = get_last_insn ();
5121 if (JUMP_P (tmp))
5122 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5123
5124 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5125 free_temp_slots ();
5126 emit_label (label);
5127 }
5128
5129 /* Start the RTL for a new function, and set variables used for
5130 emitting RTL.
5131 SUBR is the FUNCTION_DECL node.
5132 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5133 the function's parameters, which must be run at any return statement. */
5134
5135 void
expand_function_start(tree subr)5136 expand_function_start (tree subr)
5137 {
5138 /* Make sure volatile mem refs aren't considered
5139 valid operands of arithmetic insns. */
5140 init_recog_no_volatile ();
5141
5142 crtl->profile
5143 = (profile_flag
5144 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5145
5146 crtl->limit_stack
5147 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5148
5149 /* Make the label for return statements to jump to. Do not special
5150 case machines with special return instructions -- they will be
5151 handled later during jump, ifcvt, or epilogue creation. */
5152 return_label = gen_label_rtx ();
5153
5154 /* Initialize rtx used to return the value. */
5155 /* Do this before assign_parms so that we copy the struct value address
5156 before any library calls that assign parms might generate. */
5157
5158 /* Decide whether to return the value in memory or in a register. */
5159 tree res = DECL_RESULT (subr);
5160 if (aggregate_value_p (res, subr))
5161 {
5162 /* Returning something that won't go in a register. */
5163 rtx value_address = 0;
5164
5165 #ifdef PCC_STATIC_STRUCT_RETURN
5166 if (cfun->returns_pcc_struct)
5167 {
5168 int size = int_size_in_bytes (TREE_TYPE (res));
5169 value_address = assemble_static_space (size);
5170 }
5171 else
5172 #endif
5173 {
5174 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5175 /* Expect to be passed the address of a place to store the value.
5176 If it is passed as an argument, assign_parms will take care of
5177 it. */
5178 if (sv)
5179 {
5180 value_address = gen_reg_rtx (Pmode);
5181 emit_move_insn (value_address, sv);
5182 }
5183 }
5184 if (value_address)
5185 {
5186 rtx x = value_address;
5187 if (!DECL_BY_REFERENCE (res))
5188 {
5189 x = gen_rtx_MEM (DECL_MODE (res), x);
5190 set_mem_attributes (x, res, 1);
5191 }
5192 set_parm_rtl (res, x);
5193 }
5194 }
5195 else if (DECL_MODE (res) == VOIDmode)
5196 /* If return mode is void, this decl rtl should not be used. */
5197 set_parm_rtl (res, NULL_RTX);
5198 else
5199 {
5200 /* Compute the return values into a pseudo reg, which we will copy
5201 into the true return register after the cleanups are done. */
5202 tree return_type = TREE_TYPE (res);
5203
5204 /* If we may coalesce this result, make sure it has the expected mode
5205 in case it was promoted. But we need not bother about BLKmode. */
5206 machine_mode promoted_mode
5207 = flag_tree_coalesce_vars && is_gimple_reg (res)
5208 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5209 : BLKmode;
5210
5211 if (promoted_mode != BLKmode)
5212 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5213 else if (TYPE_MODE (return_type) != BLKmode
5214 && targetm.calls.return_in_msb (return_type))
5215 /* expand_function_end will insert the appropriate padding in
5216 this case. Use the return value's natural (unpadded) mode
5217 within the function proper. */
5218 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5219 else
5220 {
5221 /* In order to figure out what mode to use for the pseudo, we
5222 figure out what the mode of the eventual return register will
5223 actually be, and use that. */
5224 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5225
5226 /* Structures that are returned in registers are not
5227 aggregate_value_p, so we may see a PARALLEL or a REG. */
5228 if (REG_P (hard_reg))
5229 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5230 else
5231 {
5232 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5233 set_parm_rtl (res, gen_group_rtx (hard_reg));
5234 }
5235 }
5236
5237 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5238 result to the real return register(s). */
5239 DECL_REGISTER (res) = 1;
5240
5241 if (chkp_function_instrumented_p (current_function_decl))
5242 {
5243 tree return_type = TREE_TYPE (res);
5244 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5245 subr, 1);
5246 SET_DECL_BOUNDS_RTL (res, bounds);
5247 }
5248 }
5249
5250 /* Initialize rtx for parameters and local variables.
5251 In some cases this requires emitting insns. */
5252 assign_parms (subr);
5253
5254 /* If function gets a static chain arg, store it. */
5255 if (cfun->static_chain_decl)
5256 {
5257 tree parm = cfun->static_chain_decl;
5258 rtx local, chain;
5259 rtx_insn *insn;
5260 int unsignedp;
5261
5262 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5263 chain = targetm.calls.static_chain (current_function_decl, true);
5264
5265 set_decl_incoming_rtl (parm, chain, false);
5266 set_parm_rtl (parm, local);
5267 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5268
5269 if (GET_MODE (local) != GET_MODE (chain))
5270 {
5271 convert_move (local, chain, unsignedp);
5272 insn = get_last_insn ();
5273 }
5274 else
5275 insn = emit_move_insn (local, chain);
5276
5277 /* Mark the register as eliminable, similar to parameters. */
5278 if (MEM_P (chain)
5279 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5280 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5281
5282 /* If we aren't optimizing, save the static chain onto the stack. */
5283 if (!optimize)
5284 {
5285 tree saved_static_chain_decl
5286 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5287 DECL_NAME (parm), TREE_TYPE (parm));
5288 rtx saved_static_chain_rtx
5289 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5290 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5291 emit_move_insn (saved_static_chain_rtx, chain);
5292 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5293 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5294 }
5295 }
5296
5297 /* The following was moved from init_function_start.
5298 The move was supposed to make sdb output more accurate. */
5299 /* Indicate the beginning of the function body,
5300 as opposed to parm setup. */
5301 emit_note (NOTE_INSN_FUNCTION_BEG);
5302
5303 gcc_assert (NOTE_P (get_last_insn ()));
5304
5305 parm_birth_insn = get_last_insn ();
5306
5307 /* If the function receives a non-local goto, then store the
5308 bits we need to restore the frame pointer. */
5309 if (cfun->nonlocal_goto_save_area)
5310 {
5311 tree t_save;
5312 rtx r_save;
5313
5314 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5315 gcc_assert (DECL_RTL_SET_P (var));
5316
5317 t_save = build4 (ARRAY_REF,
5318 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5319 cfun->nonlocal_goto_save_area,
5320 integer_zero_node, NULL_TREE, NULL_TREE);
5321 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5322 gcc_assert (GET_MODE (r_save) == Pmode);
5323
5324 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5325 update_nonlocal_goto_save_area ();
5326 }
5327
5328 if (crtl->profile)
5329 {
5330 #ifdef PROFILE_HOOK
5331 PROFILE_HOOK (current_function_funcdef_no);
5332 #endif
5333 }
5334
5335 /* If we are doing generic stack checking, the probe should go here. */
5336 if (flag_stack_check == GENERIC_STACK_CHECK)
5337 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5338 }
5339
5340 void
pop_dummy_function(void)5341 pop_dummy_function (void)
5342 {
5343 pop_cfun ();
5344 in_dummy_function = false;
5345 }
5346
5347 /* Undo the effects of init_dummy_function_start. */
5348 void
expand_dummy_function_end(void)5349 expand_dummy_function_end (void)
5350 {
5351 gcc_assert (in_dummy_function);
5352
5353 /* End any sequences that failed to be closed due to syntax errors. */
5354 while (in_sequence_p ())
5355 end_sequence ();
5356
5357 /* Outside function body, can't compute type's actual size
5358 until next function's body starts. */
5359
5360 free_after_parsing (cfun);
5361 free_after_compilation (cfun);
5362 pop_dummy_function ();
5363 }
5364
5365 /* Helper for diddle_return_value. */
5366
5367 void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)5368 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5369 {
5370 if (! outgoing)
5371 return;
5372
5373 if (REG_P (outgoing))
5374 (*doit) (outgoing, arg);
5375 else if (GET_CODE (outgoing) == PARALLEL)
5376 {
5377 int i;
5378
5379 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5380 {
5381 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5382
5383 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5384 (*doit) (x, arg);
5385 }
5386 }
5387 }
5388
5389 /* Call DOIT for each hard register used as a return value from
5390 the current function. */
5391
5392 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)5393 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5394 {
5395 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5396 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5397 }
5398
5399 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5400 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5401 {
5402 emit_clobber (reg);
5403 }
5404
5405 void
clobber_return_register(void)5406 clobber_return_register (void)
5407 {
5408 diddle_return_value (do_clobber_return_reg, NULL);
5409
5410 /* In case we do use pseudo to return value, clobber it too. */
5411 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5412 {
5413 tree decl_result = DECL_RESULT (current_function_decl);
5414 rtx decl_rtl = DECL_RTL (decl_result);
5415 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5416 {
5417 do_clobber_return_reg (decl_rtl, NULL);
5418 }
5419 }
5420 }
5421
5422 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)5423 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5424 {
5425 emit_use (reg);
5426 }
5427
5428 static void
use_return_register(void)5429 use_return_register (void)
5430 {
5431 diddle_return_value (do_use_return_reg, NULL);
5432 }
5433
5434 /* Set the location of the insn chain starting at INSN to LOC. */
5435
5436 static void
set_insn_locations(rtx_insn * insn,int loc)5437 set_insn_locations (rtx_insn *insn, int loc)
5438 {
5439 while (insn != NULL)
5440 {
5441 if (INSN_P (insn))
5442 INSN_LOCATION (insn) = loc;
5443 insn = NEXT_INSN (insn);
5444 }
5445 }
5446
5447 /* Generate RTL for the end of the current function. */
5448
5449 void
expand_function_end(void)5450 expand_function_end (void)
5451 {
5452 /* If arg_pointer_save_area was referenced only from a nested
5453 function, we will not have initialized it yet. Do that now. */
5454 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5455 get_arg_pointer_save_area ();
5456
5457 /* If we are doing generic stack checking and this function makes calls,
5458 do a stack probe at the start of the function to ensure we have enough
5459 space for another stack frame. */
5460 if (flag_stack_check == GENERIC_STACK_CHECK)
5461 {
5462 rtx_insn *insn, *seq;
5463
5464 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5465 if (CALL_P (insn))
5466 {
5467 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5468 start_sequence ();
5469 if (STACK_CHECK_MOVING_SP)
5470 anti_adjust_stack_and_probe (max_frame_size, true);
5471 else
5472 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5473 seq = get_insns ();
5474 end_sequence ();
5475 set_insn_locations (seq, prologue_location);
5476 emit_insn_before (seq, stack_check_probe_note);
5477 break;
5478 }
5479 }
5480
5481 /* End any sequences that failed to be closed due to syntax errors. */
5482 while (in_sequence_p ())
5483 end_sequence ();
5484
5485 clear_pending_stack_adjust ();
5486 do_pending_stack_adjust ();
5487
5488 /* Output a linenumber for the end of the function.
5489 SDB depended on this. */
5490 set_curr_insn_location (input_location);
5491
5492 /* Before the return label (if any), clobber the return
5493 registers so that they are not propagated live to the rest of
5494 the function. This can only happen with functions that drop
5495 through; if there had been a return statement, there would
5496 have either been a return rtx, or a jump to the return label.
5497
5498 We delay actual code generation after the current_function_value_rtx
5499 is computed. */
5500 rtx_insn *clobber_after = get_last_insn ();
5501
5502 /* Output the label for the actual return from the function. */
5503 emit_label (return_label);
5504
5505 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5506 {
5507 /* Let except.c know where it should emit the call to unregister
5508 the function context for sjlj exceptions. */
5509 if (flag_exceptions)
5510 sjlj_emit_function_exit_after (get_last_insn ());
5511 }
5512 else
5513 {
5514 /* We want to ensure that instructions that may trap are not
5515 moved into the epilogue by scheduling, because we don't
5516 always emit unwind information for the epilogue. */
5517 if (cfun->can_throw_non_call_exceptions)
5518 emit_insn (gen_blockage ());
5519 }
5520
5521 /* If this is an implementation of throw, do what's necessary to
5522 communicate between __builtin_eh_return and the epilogue. */
5523 expand_eh_return ();
5524
5525 /* If scalar return value was computed in a pseudo-reg, or was a named
5526 return value that got dumped to the stack, copy that to the hard
5527 return register. */
5528 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5529 {
5530 tree decl_result = DECL_RESULT (current_function_decl);
5531 rtx decl_rtl = DECL_RTL (decl_result);
5532
5533 if (REG_P (decl_rtl)
5534 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5535 : DECL_REGISTER (decl_result))
5536 {
5537 rtx real_decl_rtl = crtl->return_rtx;
5538 complex_mode cmode;
5539
5540 /* This should be set in assign_parms. */
5541 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5542
5543 /* If this is a BLKmode structure being returned in registers,
5544 then use the mode computed in expand_return. Note that if
5545 decl_rtl is memory, then its mode may have been changed,
5546 but that crtl->return_rtx has not. */
5547 if (GET_MODE (real_decl_rtl) == BLKmode)
5548 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5549
5550 /* If a non-BLKmode return value should be padded at the least
5551 significant end of the register, shift it left by the appropriate
5552 amount. BLKmode results are handled using the group load/store
5553 machinery. */
5554 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5555 && REG_P (real_decl_rtl)
5556 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5557 {
5558 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5559 REGNO (real_decl_rtl)),
5560 decl_rtl);
5561 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5562 }
5563 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5564 {
5565 /* If expand_function_start has created a PARALLEL for decl_rtl,
5566 move the result to the real return registers. Otherwise, do
5567 a group load from decl_rtl for a named return. */
5568 if (GET_CODE (decl_rtl) == PARALLEL)
5569 emit_group_move (real_decl_rtl, decl_rtl);
5570 else
5571 emit_group_load (real_decl_rtl, decl_rtl,
5572 TREE_TYPE (decl_result),
5573 int_size_in_bytes (TREE_TYPE (decl_result)));
5574 }
5575 /* In the case of complex integer modes smaller than a word, we'll
5576 need to generate some non-trivial bitfield insertions. Do that
5577 on a pseudo and not the hard register. */
5578 else if (GET_CODE (decl_rtl) == CONCAT
5579 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5580 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5581 {
5582 int old_generating_concat_p;
5583 rtx tmp;
5584
5585 old_generating_concat_p = generating_concat_p;
5586 generating_concat_p = 0;
5587 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5588 generating_concat_p = old_generating_concat_p;
5589
5590 emit_move_insn (tmp, decl_rtl);
5591 emit_move_insn (real_decl_rtl, tmp);
5592 }
5593 /* If a named return value dumped decl_return to memory, then
5594 we may need to re-do the PROMOTE_MODE signed/unsigned
5595 extension. */
5596 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5597 {
5598 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5599 promote_function_mode (TREE_TYPE (decl_result),
5600 GET_MODE (decl_rtl), &unsignedp,
5601 TREE_TYPE (current_function_decl), 1);
5602
5603 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5604 }
5605 else
5606 emit_move_insn (real_decl_rtl, decl_rtl);
5607 }
5608 }
5609
5610 /* If returning a structure, arrange to return the address of the value
5611 in a place where debuggers expect to find it.
5612
5613 If returning a structure PCC style,
5614 the caller also depends on this value.
5615 And cfun->returns_pcc_struct is not necessarily set. */
5616 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5617 && !targetm.calls.omit_struct_return_reg)
5618 {
5619 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5620 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5621 rtx outgoing;
5622
5623 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5624 type = TREE_TYPE (type);
5625 else
5626 value_address = XEXP (value_address, 0);
5627
5628 outgoing = targetm.calls.function_value (build_pointer_type (type),
5629 current_function_decl, true);
5630
5631 /* Mark this as a function return value so integrate will delete the
5632 assignment and USE below when inlining this function. */
5633 REG_FUNCTION_VALUE_P (outgoing) = 1;
5634
5635 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5636 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5637 value_address = convert_memory_address (mode, value_address);
5638
5639 emit_move_insn (outgoing, value_address);
5640
5641 /* Show return register used to hold result (in this case the address
5642 of the result. */
5643 crtl->return_rtx = outgoing;
5644 }
5645
5646 /* Emit the actual code to clobber return register. Don't emit
5647 it if clobber_after is a barrier, then the previous basic block
5648 certainly doesn't fall thru into the exit block. */
5649 if (!BARRIER_P (clobber_after))
5650 {
5651 start_sequence ();
5652 clobber_return_register ();
5653 rtx_insn *seq = get_insns ();
5654 end_sequence ();
5655
5656 emit_insn_after (seq, clobber_after);
5657 }
5658
5659 /* Output the label for the naked return from the function. */
5660 if (naked_return_label)
5661 emit_label (naked_return_label);
5662
5663 /* @@@ This is a kludge. We want to ensure that instructions that
5664 may trap are not moved into the epilogue by scheduling, because
5665 we don't always emit unwind information for the epilogue. */
5666 if (cfun->can_throw_non_call_exceptions
5667 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5668 emit_insn (gen_blockage ());
5669
5670 /* If stack protection is enabled for this function, check the guard. */
5671 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
5672 stack_protect_epilogue ();
5673
5674 /* If we had calls to alloca, and this machine needs
5675 an accurate stack pointer to exit the function,
5676 insert some code to save and restore the stack pointer. */
5677 if (! EXIT_IGNORE_STACK
5678 && cfun->calls_alloca)
5679 {
5680 rtx tem = 0;
5681
5682 start_sequence ();
5683 emit_stack_save (SAVE_FUNCTION, &tem);
5684 rtx_insn *seq = get_insns ();
5685 end_sequence ();
5686 emit_insn_before (seq, parm_birth_insn);
5687
5688 emit_stack_restore (SAVE_FUNCTION, tem);
5689 }
5690
5691 /* ??? This should no longer be necessary since stupid is no longer with
5692 us, but there are some parts of the compiler (eg reload_combine, and
5693 sh mach_dep_reorg) that still try and compute their own lifetime info
5694 instead of using the general framework. */
5695 use_return_register ();
5696 }
5697
5698 rtx
get_arg_pointer_save_area(void)5699 get_arg_pointer_save_area (void)
5700 {
5701 rtx ret = arg_pointer_save_area;
5702
5703 if (! ret)
5704 {
5705 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5706 arg_pointer_save_area = ret;
5707 }
5708
5709 if (! crtl->arg_pointer_save_area_init)
5710 {
5711 /* Save the arg pointer at the beginning of the function. The
5712 generated stack slot may not be a valid memory address, so we
5713 have to check it and fix it if necessary. */
5714 start_sequence ();
5715 emit_move_insn (validize_mem (copy_rtx (ret)),
5716 crtl->args.internal_arg_pointer);
5717 rtx_insn *seq = get_insns ();
5718 end_sequence ();
5719
5720 push_topmost_sequence ();
5721 emit_insn_after (seq, entry_of_function ());
5722 pop_topmost_sequence ();
5723
5724 crtl->arg_pointer_save_area_init = true;
5725 }
5726
5727 return ret;
5728 }
5729
5730
5731 /* If debugging dumps are requested, dump information about how the
5732 target handled -fstack-check=clash for the prologue.
5733
5734 PROBES describes what if any probes were emitted.
5735
5736 RESIDUALS indicates if the prologue had any residual allocation
5737 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5738
5739 void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)5740 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5741 {
5742 if (!dump_file)
5743 return;
5744
5745 switch (probes)
5746 {
5747 case NO_PROBE_NO_FRAME:
5748 fprintf (dump_file,
5749 "Stack clash no probe no stack adjustment in prologue.\n");
5750 break;
5751 case NO_PROBE_SMALL_FRAME:
5752 fprintf (dump_file,
5753 "Stack clash no probe small stack adjustment in prologue.\n");
5754 break;
5755 case PROBE_INLINE:
5756 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5757 break;
5758 case PROBE_LOOP:
5759 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5760 break;
5761 }
5762
5763 if (residuals)
5764 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5765 else
5766 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5767
5768 if (frame_pointer_needed)
5769 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5770 else
5771 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5772
5773 if (TREE_THIS_VOLATILE (cfun->decl))
5774 fprintf (dump_file,
5775 "Stack clash noreturn prologue, assuming no implicit"
5776 " probes in caller.\n");
5777 else
5778 fprintf (dump_file,
5779 "Stack clash not noreturn prologue.\n");
5780 }
5781
5782 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5783 for the first time. */
5784
5785 static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)5786 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5787 {
5788 rtx_insn *tmp;
5789 hash_table<insn_cache_hasher> *hash = *hashp;
5790
5791 if (hash == NULL)
5792 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5793
5794 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5795 {
5796 rtx *slot = hash->find_slot (tmp, INSERT);
5797 gcc_assert (*slot == NULL);
5798 *slot = tmp;
5799 }
5800 }
5801
5802 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5803 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5804 insn, then record COPY as well. */
5805
5806 void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)5807 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5808 {
5809 hash_table<insn_cache_hasher> *hash;
5810 rtx *slot;
5811
5812 hash = epilogue_insn_hash;
5813 if (!hash || !hash->find (insn))
5814 {
5815 hash = prologue_insn_hash;
5816 if (!hash || !hash->find (insn))
5817 return;
5818 }
5819
5820 slot = hash->find_slot (copy, INSERT);
5821 gcc_assert (*slot == NULL);
5822 *slot = copy;
5823 }
5824
5825 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5826 we can be running after reorg, SEQUENCE rtl is possible. */
5827
5828 static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)5829 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5830 {
5831 if (hash == NULL)
5832 return false;
5833
5834 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5835 {
5836 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5837 int i;
5838 for (i = seq->len () - 1; i >= 0; i--)
5839 if (hash->find (seq->element (i)))
5840 return true;
5841 return false;
5842 }
5843
5844 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5845 }
5846
5847 int
prologue_contains(const rtx_insn * insn)5848 prologue_contains (const rtx_insn *insn)
5849 {
5850 return contains (insn, prologue_insn_hash);
5851 }
5852
5853 int
epilogue_contains(const rtx_insn * insn)5854 epilogue_contains (const rtx_insn *insn)
5855 {
5856 return contains (insn, epilogue_insn_hash);
5857 }
5858
5859 int
prologue_epilogue_contains(const rtx_insn * insn)5860 prologue_epilogue_contains (const rtx_insn *insn)
5861 {
5862 if (contains (insn, prologue_insn_hash))
5863 return 1;
5864 if (contains (insn, epilogue_insn_hash))
5865 return 1;
5866 return 0;
5867 }
5868
5869 void
record_prologue_seq(rtx_insn * seq)5870 record_prologue_seq (rtx_insn *seq)
5871 {
5872 record_insns (seq, NULL, &prologue_insn_hash);
5873 }
5874
5875 void
record_epilogue_seq(rtx_insn * seq)5876 record_epilogue_seq (rtx_insn *seq)
5877 {
5878 record_insns (seq, NULL, &epilogue_insn_hash);
5879 }
5880
5881 /* Set JUMP_LABEL for a return insn. */
5882
5883 void
set_return_jump_label(rtx_insn * returnjump)5884 set_return_jump_label (rtx_insn *returnjump)
5885 {
5886 rtx pat = PATTERN (returnjump);
5887 if (GET_CODE (pat) == PARALLEL)
5888 pat = XVECEXP (pat, 0, 0);
5889 if (ANY_RETURN_P (pat))
5890 JUMP_LABEL (returnjump) = pat;
5891 else
5892 JUMP_LABEL (returnjump) = ret_rtx;
5893 }
5894
5895 /* Return a sequence to be used as the split prologue for the current
5896 function, or NULL. */
5897
5898 static rtx_insn *
make_split_prologue_seq(void)5899 make_split_prologue_seq (void)
5900 {
5901 if (!flag_split_stack
5902 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5903 return NULL;
5904
5905 start_sequence ();
5906 emit_insn (targetm.gen_split_stack_prologue ());
5907 rtx_insn *seq = get_insns ();
5908 end_sequence ();
5909
5910 record_insns (seq, NULL, &prologue_insn_hash);
5911 set_insn_locations (seq, prologue_location);
5912
5913 return seq;
5914 }
5915
5916 /* Return a sequence to be used as the prologue for the current function,
5917 or NULL. */
5918
5919 static rtx_insn *
make_prologue_seq(void)5920 make_prologue_seq (void)
5921 {
5922 if (!targetm.have_prologue ())
5923 return NULL;
5924
5925 start_sequence ();
5926 rtx_insn *seq = targetm.gen_prologue ();
5927 emit_insn (seq);
5928
5929 /* Insert an explicit USE for the frame pointer
5930 if the profiling is on and the frame pointer is required. */
5931 if (crtl->profile && frame_pointer_needed)
5932 emit_use (hard_frame_pointer_rtx);
5933
5934 /* Retain a map of the prologue insns. */
5935 record_insns (seq, NULL, &prologue_insn_hash);
5936 emit_note (NOTE_INSN_PROLOGUE_END);
5937
5938 /* Ensure that instructions are not moved into the prologue when
5939 profiling is on. The call to the profiling routine can be
5940 emitted within the live range of a call-clobbered register. */
5941 if (!targetm.profile_before_prologue () && crtl->profile)
5942 emit_insn (gen_blockage ());
5943
5944 seq = get_insns ();
5945 end_sequence ();
5946 set_insn_locations (seq, prologue_location);
5947
5948 return seq;
5949 }
5950
5951 /* Return a sequence to be used as the epilogue for the current function,
5952 or NULL. */
5953
5954 static rtx_insn *
make_epilogue_seq(void)5955 make_epilogue_seq (void)
5956 {
5957 if (!targetm.have_epilogue ())
5958 return NULL;
5959
5960 start_sequence ();
5961 emit_note (NOTE_INSN_EPILOGUE_BEG);
5962 rtx_insn *seq = targetm.gen_epilogue ();
5963 if (seq)
5964 emit_jump_insn (seq);
5965
5966 /* Retain a map of the epilogue insns. */
5967 record_insns (seq, NULL, &epilogue_insn_hash);
5968 set_insn_locations (seq, epilogue_location);
5969
5970 seq = get_insns ();
5971 rtx_insn *returnjump = get_last_insn ();
5972 end_sequence ();
5973
5974 if (JUMP_P (returnjump))
5975 set_return_jump_label (returnjump);
5976
5977 return seq;
5978 }
5979
5980
5981 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5982 this into place with notes indicating where the prologue ends and where
5983 the epilogue begins. Update the basic block information when possible.
5984
5985 Notes on epilogue placement:
5986 There are several kinds of edges to the exit block:
5987 * a single fallthru edge from LAST_BB
5988 * possibly, edges from blocks containing sibcalls
5989 * possibly, fake edges from infinite loops
5990
5991 The epilogue is always emitted on the fallthru edge from the last basic
5992 block in the function, LAST_BB, into the exit block.
5993
5994 If LAST_BB is empty except for a label, it is the target of every
5995 other basic block in the function that ends in a return. If a
5996 target has a return or simple_return pattern (possibly with
5997 conditional variants), these basic blocks can be changed so that a
5998 return insn is emitted into them, and their target is adjusted to
5999 the real exit block.
6000
6001 Notes on shrink wrapping: We implement a fairly conservative
6002 version of shrink-wrapping rather than the textbook one. We only
6003 generate a single prologue and a single epilogue. This is
6004 sufficient to catch a number of interesting cases involving early
6005 exits.
6006
6007 First, we identify the blocks that require the prologue to occur before
6008 them. These are the ones that modify a call-saved register, or reference
6009 any of the stack or frame pointer registers. To simplify things, we then
6010 mark everything reachable from these blocks as also requiring a prologue.
6011 This takes care of loops automatically, and avoids the need to examine
6012 whether MEMs reference the frame, since it is sufficient to check for
6013 occurrences of the stack or frame pointer.
6014
6015 We then compute the set of blocks for which the need for a prologue
6016 is anticipatable (borrowing terminology from the shrink-wrapping
6017 description in Muchnick's book). These are the blocks which either
6018 require a prologue themselves, or those that have only successors
6019 where the prologue is anticipatable. The prologue needs to be
6020 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6021 is not. For the moment, we ensure that only one such edge exists.
6022
6023 The epilogue is placed as described above, but we make a
6024 distinction between inserting return and simple_return patterns
6025 when modifying other blocks that end in a return. Blocks that end
6026 in a sibcall omit the sibcall_epilogue if the block is not in
6027 ANTIC. */
6028
6029 void
thread_prologue_and_epilogue_insns(void)6030 thread_prologue_and_epilogue_insns (void)
6031 {
6032 df_analyze ();
6033
6034 /* Can't deal with multiple successors of the entry block at the
6035 moment. Function should always have at least one entry
6036 point. */
6037 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6038
6039 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6040 edge orig_entry_edge = entry_edge;
6041
6042 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6043 rtx_insn *prologue_seq = make_prologue_seq ();
6044 rtx_insn *epilogue_seq = make_epilogue_seq ();
6045
6046 /* Try to perform a kind of shrink-wrapping, making sure the
6047 prologue/epilogue is emitted only around those parts of the
6048 function that require it. */
6049 try_shrink_wrapping (&entry_edge, prologue_seq);
6050
6051 /* If the target can handle splitting the prologue/epilogue into separate
6052 components, try to shrink-wrap these components separately. */
6053 try_shrink_wrapping_separate (entry_edge->dest);
6054
6055 /* If that did anything for any component we now need the generate the
6056 "main" prologue again. Because some targets require some of these
6057 to be called in a specific order (i386 requires the split prologue
6058 to be first, for example), we create all three sequences again here.
6059 If this does not work for some target, that target should not enable
6060 separate shrink-wrapping. */
6061 if (crtl->shrink_wrapped_separate)
6062 {
6063 split_prologue_seq = make_split_prologue_seq ();
6064 prologue_seq = make_prologue_seq ();
6065 epilogue_seq = make_epilogue_seq ();
6066 }
6067
6068 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6069
6070 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6071 this marker for the splits of EH_RETURN patterns, and nothing else
6072 uses the flag in the meantime. */
6073 epilogue_completed = 1;
6074
6075 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6076 some targets, these get split to a special version of the epilogue
6077 code. In order to be able to properly annotate these with unwind
6078 info, try to split them now. If we get a valid split, drop an
6079 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6080 edge e;
6081 edge_iterator ei;
6082 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6083 {
6084 rtx_insn *prev, *last, *trial;
6085
6086 if (e->flags & EDGE_FALLTHRU)
6087 continue;
6088 last = BB_END (e->src);
6089 if (!eh_returnjump_p (last))
6090 continue;
6091
6092 prev = PREV_INSN (last);
6093 trial = try_split (PATTERN (last), last, 1);
6094 if (trial == last)
6095 continue;
6096
6097 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6098 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6099 }
6100
6101 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6102
6103 if (exit_fallthru_edge)
6104 {
6105 if (epilogue_seq)
6106 {
6107 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6108 commit_edge_insertions ();
6109
6110 /* The epilogue insns we inserted may cause the exit edge to no longer
6111 be fallthru. */
6112 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6113 {
6114 if (((e->flags & EDGE_FALLTHRU) != 0)
6115 && returnjump_p (BB_END (e->src)))
6116 e->flags &= ~EDGE_FALLTHRU;
6117 }
6118 }
6119 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6120 {
6121 /* We have a fall-through edge to the exit block, the source is not
6122 at the end of the function, and there will be an assembler epilogue
6123 at the end of the function.
6124 We can't use force_nonfallthru here, because that would try to
6125 use return. Inserting a jump 'by hand' is extremely messy, so
6126 we take advantage of cfg_layout_finalize using
6127 fixup_fallthru_exit_predecessor. */
6128 cfg_layout_initialize (0);
6129 basic_block cur_bb;
6130 FOR_EACH_BB_FN (cur_bb, cfun)
6131 if (cur_bb->index >= NUM_FIXED_BLOCKS
6132 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6133 cur_bb->aux = cur_bb->next_bb;
6134 cfg_layout_finalize ();
6135 }
6136 }
6137
6138 /* Insert the prologue. */
6139
6140 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6141
6142 if (split_prologue_seq || prologue_seq)
6143 {
6144 rtx_insn *split_prologue_insn = split_prologue_seq;
6145 if (split_prologue_seq)
6146 {
6147 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6148 split_prologue_insn = NEXT_INSN (split_prologue_insn);
6149 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6150 }
6151
6152 rtx_insn *prologue_insn = prologue_seq;
6153 if (prologue_seq)
6154 {
6155 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6156 prologue_insn = NEXT_INSN (prologue_insn);
6157 insert_insn_on_edge (prologue_seq, entry_edge);
6158 }
6159
6160 commit_edge_insertions ();
6161
6162 /* Look for basic blocks within the prologue insns. */
6163 if (split_prologue_insn
6164 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6165 split_prologue_insn = NULL;
6166 if (prologue_insn
6167 && BLOCK_FOR_INSN (prologue_insn) == NULL)
6168 prologue_insn = NULL;
6169 if (split_prologue_insn || prologue_insn)
6170 {
6171 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6172 bitmap_clear (blocks);
6173 if (split_prologue_insn)
6174 bitmap_set_bit (blocks,
6175 BLOCK_FOR_INSN (split_prologue_insn)->index);
6176 if (prologue_insn)
6177 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6178 find_many_sub_basic_blocks (blocks);
6179 }
6180 }
6181
6182 default_rtl_profile ();
6183
6184 /* Emit sibling epilogues before any sibling call sites. */
6185 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6186 (e = ei_safe_edge (ei));
6187 ei_next (&ei))
6188 {
6189 /* Skip those already handled, the ones that run without prologue. */
6190 if (e->flags & EDGE_IGNORE)
6191 {
6192 e->flags &= ~EDGE_IGNORE;
6193 continue;
6194 }
6195
6196 rtx_insn *insn = BB_END (e->src);
6197
6198 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6199 continue;
6200
6201 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6202 {
6203 start_sequence ();
6204 emit_note (NOTE_INSN_EPILOGUE_BEG);
6205 emit_insn (ep_seq);
6206 rtx_insn *seq = get_insns ();
6207 end_sequence ();
6208
6209 /* Retain a map of the epilogue insns. Used in life analysis to
6210 avoid getting rid of sibcall epilogue insns. Do this before we
6211 actually emit the sequence. */
6212 record_insns (seq, NULL, &epilogue_insn_hash);
6213 set_insn_locations (seq, epilogue_location);
6214
6215 emit_insn_before (seq, insn);
6216 }
6217 }
6218
6219 if (epilogue_seq)
6220 {
6221 rtx_insn *insn, *next;
6222
6223 /* Similarly, move any line notes that appear after the epilogue.
6224 There is no need, however, to be quite so anal about the existence
6225 of such a note. Also possibly move
6226 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6227 info generation. */
6228 for (insn = epilogue_seq; insn; insn = next)
6229 {
6230 next = NEXT_INSN (insn);
6231 if (NOTE_P (insn)
6232 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6233 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6234 }
6235 }
6236
6237 /* Threading the prologue and epilogue changes the artificial refs
6238 in the entry and exit blocks. */
6239 epilogue_completed = 1;
6240 df_update_entry_exit_and_calls ();
6241 }
6242
6243 /* Reposition the prologue-end and epilogue-begin notes after
6244 instruction scheduling. */
6245
6246 void
reposition_prologue_and_epilogue_notes(void)6247 reposition_prologue_and_epilogue_notes (void)
6248 {
6249 if (!targetm.have_prologue ()
6250 && !targetm.have_epilogue ()
6251 && !targetm.have_sibcall_epilogue ())
6252 return;
6253
6254 /* Since the hash table is created on demand, the fact that it is
6255 non-null is a signal that it is non-empty. */
6256 if (prologue_insn_hash != NULL)
6257 {
6258 size_t len = prologue_insn_hash->elements ();
6259 rtx_insn *insn, *last = NULL, *note = NULL;
6260
6261 /* Scan from the beginning until we reach the last prologue insn. */
6262 /* ??? While we do have the CFG intact, there are two problems:
6263 (1) The prologue can contain loops (typically probing the stack),
6264 which means that the end of the prologue isn't in the first bb.
6265 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6266 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6267 {
6268 if (NOTE_P (insn))
6269 {
6270 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6271 note = insn;
6272 }
6273 else if (contains (insn, prologue_insn_hash))
6274 {
6275 last = insn;
6276 if (--len == 0)
6277 break;
6278 }
6279 }
6280
6281 if (last)
6282 {
6283 if (note == NULL)
6284 {
6285 /* Scan forward looking for the PROLOGUE_END note. It should
6286 be right at the beginning of the block, possibly with other
6287 insn notes that got moved there. */
6288 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6289 {
6290 if (NOTE_P (note)
6291 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6292 break;
6293 }
6294 }
6295
6296 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6297 if (LABEL_P (last))
6298 last = NEXT_INSN (last);
6299 reorder_insns (note, note, last);
6300 }
6301 }
6302
6303 if (epilogue_insn_hash != NULL)
6304 {
6305 edge_iterator ei;
6306 edge e;
6307
6308 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6309 {
6310 rtx_insn *insn, *first = NULL, *note = NULL;
6311 basic_block bb = e->src;
6312
6313 /* Scan from the beginning until we reach the first epilogue insn. */
6314 FOR_BB_INSNS (bb, insn)
6315 {
6316 if (NOTE_P (insn))
6317 {
6318 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6319 {
6320 note = insn;
6321 if (first != NULL)
6322 break;
6323 }
6324 }
6325 else if (first == NULL && contains (insn, epilogue_insn_hash))
6326 {
6327 first = insn;
6328 if (note != NULL)
6329 break;
6330 }
6331 }
6332
6333 if (note)
6334 {
6335 /* If the function has a single basic block, and no real
6336 epilogue insns (e.g. sibcall with no cleanup), the
6337 epilogue note can get scheduled before the prologue
6338 note. If we have frame related prologue insns, having
6339 them scanned during the epilogue will result in a crash.
6340 In this case re-order the epilogue note to just before
6341 the last insn in the block. */
6342 if (first == NULL)
6343 first = BB_END (bb);
6344
6345 if (PREV_INSN (first) != note)
6346 reorder_insns (note, note, PREV_INSN (first));
6347 }
6348 }
6349 }
6350 }
6351
6352 /* Returns the name of function declared by FNDECL. */
6353 const char *
fndecl_name(tree fndecl)6354 fndecl_name (tree fndecl)
6355 {
6356 if (fndecl == NULL)
6357 return "(nofn)";
6358 return lang_hooks.decl_printable_name (fndecl, 1);
6359 }
6360
6361 /* Returns the name of function FN. */
6362 const char *
function_name(struct function * fn)6363 function_name (struct function *fn)
6364 {
6365 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6366 return fndecl_name (fndecl);
6367 }
6368
6369 /* Returns the name of the current function. */
6370 const char *
current_function_name(void)6371 current_function_name (void)
6372 {
6373 return function_name (cfun);
6374 }
6375
6376
6377 static unsigned int
rest_of_handle_check_leaf_regs(void)6378 rest_of_handle_check_leaf_regs (void)
6379 {
6380 #ifdef LEAF_REGISTERS
6381 crtl->uses_only_leaf_regs
6382 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6383 #endif
6384 return 0;
6385 }
6386
6387 /* Insert a TYPE into the used types hash table of CFUN. */
6388
6389 static void
used_types_insert_helper(tree type,struct function * func)6390 used_types_insert_helper (tree type, struct function *func)
6391 {
6392 if (type != NULL && func != NULL)
6393 {
6394 if (func->used_types_hash == NULL)
6395 func->used_types_hash = hash_set<tree>::create_ggc (37);
6396
6397 func->used_types_hash->add (type);
6398 }
6399 }
6400
6401 /* Given a type, insert it into the used hash table in cfun. */
6402 void
used_types_insert(tree t)6403 used_types_insert (tree t)
6404 {
6405 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6406 if (TYPE_NAME (t))
6407 break;
6408 else
6409 t = TREE_TYPE (t);
6410 if (TREE_CODE (t) == ERROR_MARK)
6411 return;
6412 if (TYPE_NAME (t) == NULL_TREE
6413 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6414 t = TYPE_MAIN_VARIANT (t);
6415 if (debug_info_level > DINFO_LEVEL_NONE)
6416 {
6417 if (cfun)
6418 used_types_insert_helper (t, cfun);
6419 else
6420 {
6421 /* So this might be a type referenced by a global variable.
6422 Record that type so that we can later decide to emit its
6423 debug information. */
6424 vec_safe_push (types_used_by_cur_var_decl, t);
6425 }
6426 }
6427 }
6428
6429 /* Helper to Hash a struct types_used_by_vars_entry. */
6430
6431 static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)6432 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6433 {
6434 gcc_assert (entry && entry->var_decl && entry->type);
6435
6436 return iterative_hash_object (entry->type,
6437 iterative_hash_object (entry->var_decl, 0));
6438 }
6439
6440 /* Hash function of the types_used_by_vars_entry hash table. */
6441
6442 hashval_t
hash(types_used_by_vars_entry * entry)6443 used_type_hasher::hash (types_used_by_vars_entry *entry)
6444 {
6445 return hash_types_used_by_vars_entry (entry);
6446 }
6447
6448 /*Equality function of the types_used_by_vars_entry hash table. */
6449
6450 bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)6451 used_type_hasher::equal (types_used_by_vars_entry *e1,
6452 types_used_by_vars_entry *e2)
6453 {
6454 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6455 }
6456
6457 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6458
6459 void
types_used_by_var_decl_insert(tree type,tree var_decl)6460 types_used_by_var_decl_insert (tree type, tree var_decl)
6461 {
6462 if (type != NULL && var_decl != NULL)
6463 {
6464 types_used_by_vars_entry **slot;
6465 struct types_used_by_vars_entry e;
6466 e.var_decl = var_decl;
6467 e.type = type;
6468 if (types_used_by_vars_hash == NULL)
6469 types_used_by_vars_hash
6470 = hash_table<used_type_hasher>::create_ggc (37);
6471
6472 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6473 if (*slot == NULL)
6474 {
6475 struct types_used_by_vars_entry *entry;
6476 entry = ggc_alloc<types_used_by_vars_entry> ();
6477 entry->type = type;
6478 entry->var_decl = var_decl;
6479 *slot = entry;
6480 }
6481 }
6482 }
6483
6484 namespace {
6485
6486 const pass_data pass_data_leaf_regs =
6487 {
6488 RTL_PASS, /* type */
6489 "*leaf_regs", /* name */
6490 OPTGROUP_NONE, /* optinfo_flags */
6491 TV_NONE, /* tv_id */
6492 0, /* properties_required */
6493 0, /* properties_provided */
6494 0, /* properties_destroyed */
6495 0, /* todo_flags_start */
6496 0, /* todo_flags_finish */
6497 };
6498
6499 class pass_leaf_regs : public rtl_opt_pass
6500 {
6501 public:
pass_leaf_regs(gcc::context * ctxt)6502 pass_leaf_regs (gcc::context *ctxt)
6503 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6504 {}
6505
6506 /* opt_pass methods: */
execute(function *)6507 virtual unsigned int execute (function *)
6508 {
6509 return rest_of_handle_check_leaf_regs ();
6510 }
6511
6512 }; // class pass_leaf_regs
6513
6514 } // anon namespace
6515
6516 rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)6517 make_pass_leaf_regs (gcc::context *ctxt)
6518 {
6519 return new pass_leaf_regs (ctxt);
6520 }
6521
6522 static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)6523 rest_of_handle_thread_prologue_and_epilogue (void)
6524 {
6525 /* prepare_shrink_wrap is sensitive to the block structure of the control
6526 flow graph, so clean it up first. */
6527 if (optimize)
6528 cleanup_cfg (0);
6529
6530 /* On some machines, the prologue and epilogue code, or parts thereof,
6531 can be represented as RTL. Doing so lets us schedule insns between
6532 it and the rest of the code and also allows delayed branch
6533 scheduling to operate in the epilogue. */
6534 thread_prologue_and_epilogue_insns ();
6535
6536 /* Some non-cold blocks may now be only reachable from cold blocks.
6537 Fix that up. */
6538 fixup_partitions ();
6539
6540 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6541 see PR57320. */
6542 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6543
6544 /* The stack usage info is finalized during prologue expansion. */
6545 if (flag_stack_usage_info)
6546 output_stack_usage ();
6547
6548 return 0;
6549 }
6550
6551 namespace {
6552
6553 const pass_data pass_data_thread_prologue_and_epilogue =
6554 {
6555 RTL_PASS, /* type */
6556 "pro_and_epilogue", /* name */
6557 OPTGROUP_NONE, /* optinfo_flags */
6558 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6559 0, /* properties_required */
6560 0, /* properties_provided */
6561 0, /* properties_destroyed */
6562 0, /* todo_flags_start */
6563 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6564 };
6565
6566 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6567 {
6568 public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)6569 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6570 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6571 {}
6572
6573 /* opt_pass methods: */
execute(function *)6574 virtual unsigned int execute (function *)
6575 {
6576 return rest_of_handle_thread_prologue_and_epilogue ();
6577 }
6578
6579 }; // class pass_thread_prologue_and_epilogue
6580
6581 } // anon namespace
6582
6583 rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)6584 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6585 {
6586 return new pass_thread_prologue_and_epilogue (ctxt);
6587 }
6588
6589
6590 /* This mini-pass fixes fall-out from SSA in asm statements that have
6591 in-out constraints. Say you start with
6592
6593 orig = inout;
6594 asm ("": "+mr" (inout));
6595 use (orig);
6596
6597 which is transformed very early to use explicit output and match operands:
6598
6599 orig = inout;
6600 asm ("": "=mr" (inout) : "0" (inout));
6601 use (orig);
6602
6603 Or, after SSA and copyprop,
6604
6605 asm ("": "=mr" (inout_2) : "0" (inout_1));
6606 use (inout_1);
6607
6608 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6609 they represent two separate values, so they will get different pseudo
6610 registers during expansion. Then, since the two operands need to match
6611 per the constraints, but use different pseudo registers, reload can
6612 only register a reload for these operands. But reloads can only be
6613 satisfied by hardregs, not by memory, so we need a register for this
6614 reload, just because we are presented with non-matching operands.
6615 So, even though we allow memory for this operand, no memory can be
6616 used for it, just because the two operands don't match. This can
6617 cause reload failures on register-starved targets.
6618
6619 So it's a symptom of reload not being able to use memory for reloads
6620 or, alternatively it's also a symptom of both operands not coming into
6621 reload as matching (in which case the pseudo could go to memory just
6622 fine, as the alternative allows it, and no reload would be necessary).
6623 We fix the latter problem here, by transforming
6624
6625 asm ("": "=mr" (inout_2) : "0" (inout_1));
6626
6627 back to
6628
6629 inout_2 = inout_1;
6630 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6631
6632 static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)6633 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6634 {
6635 int i;
6636 bool changed = false;
6637 rtx op = SET_SRC (p_sets[0]);
6638 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6639 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6640 bool *output_matched = XALLOCAVEC (bool, noutputs);
6641
6642 memset (output_matched, 0, noutputs * sizeof (bool));
6643 for (i = 0; i < ninputs; i++)
6644 {
6645 rtx input, output;
6646 rtx_insn *insns;
6647 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6648 char *end;
6649 int match, j;
6650
6651 if (*constraint == '%')
6652 constraint++;
6653
6654 match = strtoul (constraint, &end, 10);
6655 if (end == constraint)
6656 continue;
6657
6658 gcc_assert (match < noutputs);
6659 output = SET_DEST (p_sets[match]);
6660 input = RTVEC_ELT (inputs, i);
6661 /* Only do the transformation for pseudos. */
6662 if (! REG_P (output)
6663 || rtx_equal_p (output, input)
6664 || !(REG_P (input) || SUBREG_P (input)
6665 || MEM_P (input) || CONSTANT_P (input))
6666 || !general_operand (input, GET_MODE (output)))
6667 continue;
6668
6669 /* We can't do anything if the output is also used as input,
6670 as we're going to overwrite it. */
6671 for (j = 0; j < ninputs; j++)
6672 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6673 break;
6674 if (j != ninputs)
6675 continue;
6676
6677 /* Avoid changing the same input several times. For
6678 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6679 only change in once (to out1), rather than changing it
6680 first to out1 and afterwards to out2. */
6681 if (i > 0)
6682 {
6683 for (j = 0; j < noutputs; j++)
6684 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6685 break;
6686 if (j != noutputs)
6687 continue;
6688 }
6689 output_matched[match] = true;
6690
6691 start_sequence ();
6692 emit_move_insn (output, copy_rtx (input));
6693 insns = get_insns ();
6694 end_sequence ();
6695 emit_insn_before (insns, insn);
6696
6697 /* Now replace all mentions of the input with output. We can't
6698 just replace the occurrence in inputs[i], as the register might
6699 also be used in some other input (or even in an address of an
6700 output), which would mean possibly increasing the number of
6701 inputs by one (namely 'output' in addition), which might pose
6702 a too complicated problem for reload to solve. E.g. this situation:
6703
6704 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6705
6706 Here 'input' is used in two occurrences as input (once for the
6707 input operand, once for the address in the second output operand).
6708 If we would replace only the occurrence of the input operand (to
6709 make the matching) we would be left with this:
6710
6711 output = input
6712 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6713
6714 Now we suddenly have two different input values (containing the same
6715 value, but different pseudos) where we formerly had only one.
6716 With more complicated asms this might lead to reload failures
6717 which wouldn't have happen without this pass. So, iterate over
6718 all operands and replace all occurrences of the register used. */
6719 for (j = 0; j < noutputs; j++)
6720 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6721 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6722 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6723 input, output);
6724 for (j = 0; j < ninputs; j++)
6725 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6726 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6727 input, output);
6728
6729 changed = true;
6730 }
6731
6732 if (changed)
6733 df_insn_rescan (insn);
6734 }
6735
6736 /* Add the decl D to the local_decls list of FUN. */
6737
6738 void
add_local_decl(struct function * fun,tree d)6739 add_local_decl (struct function *fun, tree d)
6740 {
6741 gcc_assert (VAR_P (d));
6742 vec_safe_push (fun->local_decls, d);
6743 }
6744
6745 namespace {
6746
6747 const pass_data pass_data_match_asm_constraints =
6748 {
6749 RTL_PASS, /* type */
6750 "asmcons", /* name */
6751 OPTGROUP_NONE, /* optinfo_flags */
6752 TV_NONE, /* tv_id */
6753 0, /* properties_required */
6754 0, /* properties_provided */
6755 0, /* properties_destroyed */
6756 0, /* todo_flags_start */
6757 0, /* todo_flags_finish */
6758 };
6759
6760 class pass_match_asm_constraints : public rtl_opt_pass
6761 {
6762 public:
pass_match_asm_constraints(gcc::context * ctxt)6763 pass_match_asm_constraints (gcc::context *ctxt)
6764 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6765 {}
6766
6767 /* opt_pass methods: */
6768 virtual unsigned int execute (function *);
6769
6770 }; // class pass_match_asm_constraints
6771
6772 unsigned
execute(function * fun)6773 pass_match_asm_constraints::execute (function *fun)
6774 {
6775 basic_block bb;
6776 rtx_insn *insn;
6777 rtx pat, *p_sets;
6778 int noutputs;
6779
6780 if (!crtl->has_asm_statement)
6781 return 0;
6782
6783 df_set_flags (DF_DEFER_INSN_RESCAN);
6784 FOR_EACH_BB_FN (bb, fun)
6785 {
6786 FOR_BB_INSNS (bb, insn)
6787 {
6788 if (!INSN_P (insn))
6789 continue;
6790
6791 pat = PATTERN (insn);
6792 if (GET_CODE (pat) == PARALLEL)
6793 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6794 else if (GET_CODE (pat) == SET)
6795 p_sets = &PATTERN (insn), noutputs = 1;
6796 else
6797 continue;
6798
6799 if (GET_CODE (*p_sets) == SET
6800 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6801 match_asm_constraints_1 (insn, p_sets, noutputs);
6802 }
6803 }
6804
6805 return TODO_df_finish;
6806 }
6807
6808 } // anon namespace
6809
6810 rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)6811 make_pass_match_asm_constraints (gcc::context *ctxt)
6812 {
6813 return new pass_match_asm_constraints (ctxt);
6814 }
6815
6816
6817 #include "gt-function.h"
6818