1 /* Expands front end tree to back end RTL for GCC. 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 4 2010, 2011, 2012 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 /* This file handles the generation of rtl code from tree structure 23 at the level of the function as a whole. 24 It creates the rtl expressions for parameters and auto variables 25 and has full responsibility for allocating stack slots. 26 27 `expand_function_start' is called at the beginning of a function, 28 before the function body is parsed, and `expand_function_end' is 29 called after parsing the body. 30 31 Call `assign_stack_local' to allocate a stack slot for a local variable. 32 This is usually done during the RTL generation for the function body, 33 but it can also be done in the reload pass when a pseudo-register does 34 not get a hard register. */ 35 36 #include "config.h" 37 #include "system.h" 38 #include "coretypes.h" 39 #include "tm.h" 40 #include "rtl-error.h" 41 #include "tree.h" 42 #include "flags.h" 43 #include "except.h" 44 #include "function.h" 45 #include "expr.h" 46 #include "optabs.h" 47 #include "libfuncs.h" 48 #include "regs.h" 49 #include "hard-reg-set.h" 50 #include "insn-config.h" 51 #include "recog.h" 52 #include "output.h" 53 #include "basic-block.h" 54 #include "hashtab.h" 55 #include "ggc.h" 56 #include "tm_p.h" 57 #include "integrate.h" 58 #include "langhooks.h" 59 #include "target.h" 60 #include "common/common-target.h" 61 #include "cfglayout.h" 62 #include "gimple.h" 63 #include "tree-pass.h" 64 #include "predict.h" 65 #include "df.h" 66 #include "timevar.h" 67 #include "vecprim.h" 68 #include "params.h" 69 #include "bb-reorder.h" 70 71 /* So we can assign to cfun in this file. */ 72 #undef cfun 73 74 #ifndef STACK_ALIGNMENT_NEEDED 75 #define STACK_ALIGNMENT_NEEDED 1 76 #endif 77 78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) 79 80 /* Some systems use __main in a way incompatible with its use in gcc, in these 81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to 82 give the same symbol without quotes for an alternative entry point. You 83 must define both, or neither. */ 84 #ifndef NAME__MAIN 85 #define NAME__MAIN "__main" 86 #endif 87 88 /* Round a value to the lowest integer less than it that is a multiple of 89 the required alignment. Avoid using division in case the value is 90 negative. Assume the alignment is a power of two. */ 91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) 92 93 /* Similar, but round to the next highest integer that meets the 94 alignment. */ 95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) 96 97 /* Nonzero if function being compiled doesn't contain any calls 98 (ignoring the prologue and epilogue). This is set prior to 99 local register allocation and is valid for the remaining 100 compiler passes. */ 101 int current_function_is_leaf; 102 103 /* Nonzero if function being compiled doesn't modify the stack pointer 104 (ignoring the prologue and epilogue). This is only valid after 105 pass_stack_ptr_mod has run. */ 106 int current_function_sp_is_unchanging; 107 108 /* Nonzero if the function being compiled is a leaf function which only 109 uses leaf registers. This is valid after reload (specifically after 110 sched2) and is useful only if the port defines LEAF_REGISTERS. */ 111 int current_function_uses_only_leaf_regs; 112 113 /* Nonzero once virtual register instantiation has been done. 114 assign_stack_local uses frame_pointer_rtx when this is nonzero. 115 calls.c:emit_library_call_value_1 uses it to set up 116 post-instantiation libcalls. */ 117 int virtuals_instantiated; 118 119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */ 120 static GTY(()) int funcdef_no; 121 122 /* These variables hold pointers to functions to create and destroy 123 target specific, per-function data structures. */ 124 struct machine_function * (*init_machine_status) (void); 125 126 /* The currently compiled function. */ 127 struct function *cfun = 0; 128 129 /* These hashes record the prologue and epilogue insns. */ 130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 131 htab_t prologue_insn_hash; 132 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) 133 htab_t epilogue_insn_hash; 134 135 136 htab_t types_used_by_vars_hash = NULL; 137 VEC(tree,gc) *types_used_by_cur_var_decl; 138 139 /* Forward declarations. */ 140 141 static struct temp_slot *find_temp_slot_from_address (rtx); 142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); 143 static void pad_below (struct args_size *, enum machine_mode, tree); 144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **); 145 static int all_blocks (tree, tree *); 146 static tree *get_block_vector (tree, int *); 147 extern tree debug_find_var_in_block_tree (tree, tree); 148 /* We always define `record_insns' even if it's not used so that we 149 can always export `prologue_epilogue_contains'. */ 150 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED; 151 static bool contains (const_rtx, htab_t); 152 static void prepare_function_start (void); 153 static void do_clobber_return_reg (rtx, void *); 154 static void do_use_return_reg (rtx, void *); 155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED; 156 157 /* Stack of nested functions. */ 158 /* Keep track of the cfun stack. */ 159 160 typedef struct function *function_p; 161 162 DEF_VEC_P(function_p); 163 DEF_VEC_ALLOC_P(function_p,heap); 164 static VEC(function_p,heap) *function_context_stack; 165 166 /* Save the current context for compilation of a nested function. 167 This is called from language-specific code. */ 168 169 void 170 push_function_context (void) 171 { 172 if (cfun == 0) 173 allocate_struct_function (NULL, false); 174 175 VEC_safe_push (function_p, heap, function_context_stack, cfun); 176 set_cfun (NULL); 177 } 178 179 /* Restore the last saved context, at the end of a nested function. 180 This function is called from language-specific code. */ 181 182 void 183 pop_function_context (void) 184 { 185 struct function *p = VEC_pop (function_p, function_context_stack); 186 set_cfun (p); 187 current_function_decl = p->decl; 188 189 /* Reset variables that have known state during rtx generation. */ 190 virtuals_instantiated = 0; 191 generating_concat_p = 1; 192 } 193 194 /* Clear out all parts of the state in F that can safely be discarded 195 after the function has been parsed, but not compiled, to let 196 garbage collection reclaim the memory. */ 197 198 void 199 free_after_parsing (struct function *f) 200 { 201 f->language = 0; 202 } 203 204 /* Clear out all parts of the state in F that can safely be discarded 205 after the function has been compiled, to let garbage collection 206 reclaim the memory. */ 207 208 void 209 free_after_compilation (struct function *f) 210 { 211 prologue_insn_hash = NULL; 212 epilogue_insn_hash = NULL; 213 214 free (crtl->emit.regno_pointer_align); 215 216 memset (crtl, 0, sizeof (struct rtl_data)); 217 f->eh = NULL; 218 f->machine = NULL; 219 f->cfg = NULL; 220 221 regno_reg_rtx = NULL; 222 insn_locators_free (); 223 } 224 225 /* Return size needed for stack frame based on slots so far allocated. 226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; 227 the caller may have to do that. */ 228 229 HOST_WIDE_INT 230 get_frame_size (void) 231 { 232 if (FRAME_GROWS_DOWNWARD) 233 return -frame_offset; 234 else 235 return frame_offset; 236 } 237 238 /* Issue an error message and return TRUE if frame OFFSET overflows in 239 the signed target pointer arithmetics for function FUNC. Otherwise 240 return FALSE. */ 241 242 bool 243 frame_offset_overflow (HOST_WIDE_INT offset, tree func) 244 { 245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; 246 247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) 248 /* Leave room for the fixed part of the frame. */ 249 - 64 * UNITS_PER_WORD) 250 { 251 error_at (DECL_SOURCE_LOCATION (func), 252 "total size of local objects too large"); 253 return TRUE; 254 } 255 256 return FALSE; 257 } 258 259 /* Return stack slot alignment in bits for TYPE and MODE. */ 260 261 static unsigned int 262 get_stack_local_alignment (tree type, enum machine_mode mode) 263 { 264 unsigned int alignment; 265 266 if (mode == BLKmode) 267 alignment = BIGGEST_ALIGNMENT; 268 else 269 alignment = GET_MODE_ALIGNMENT (mode); 270 271 /* Allow the frond-end to (possibly) increase the alignment of this 272 stack slot. */ 273 if (! type) 274 type = lang_hooks.types.type_for_mode (mode, 0); 275 276 return STACK_SLOT_ALIGNMENT (type, mode, alignment); 277 } 278 279 /* Determine whether it is possible to fit a stack slot of size SIZE and 280 alignment ALIGNMENT into an area in the stack frame that starts at 281 frame offset START and has a length of LENGTH. If so, store the frame 282 offset to be used for the stack slot in *POFFSET and return true; 283 return false otherwise. This function will extend the frame size when 284 given a start/length pair that lies at the end of the frame. */ 285 286 static bool 287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, 288 HOST_WIDE_INT size, unsigned int alignment, 289 HOST_WIDE_INT *poffset) 290 { 291 HOST_WIDE_INT this_frame_offset; 292 int frame_off, frame_alignment, frame_phase; 293 294 /* Calculate how many bytes the start of local variables is off from 295 stack alignment. */ 296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 297 frame_off = STARTING_FRAME_OFFSET % frame_alignment; 298 frame_phase = frame_off ? frame_alignment - frame_off : 0; 299 300 /* Round the frame offset to the specified alignment. */ 301 302 /* We must be careful here, since FRAME_OFFSET might be negative and 303 division with a negative dividend isn't as well defined as we might 304 like. So we instead assume that ALIGNMENT is a power of two and 305 use logical operations which are unambiguous. */ 306 if (FRAME_GROWS_DOWNWARD) 307 this_frame_offset 308 = (FLOOR_ROUND (start + length - size - frame_phase, 309 (unsigned HOST_WIDE_INT) alignment) 310 + frame_phase); 311 else 312 this_frame_offset 313 = (CEIL_ROUND (start - frame_phase, 314 (unsigned HOST_WIDE_INT) alignment) 315 + frame_phase); 316 317 /* See if it fits. If this space is at the edge of the frame, 318 consider extending the frame to make it fit. Our caller relies on 319 this when allocating a new slot. */ 320 if (frame_offset == start && this_frame_offset < frame_offset) 321 frame_offset = this_frame_offset; 322 else if (this_frame_offset < start) 323 return false; 324 else if (start + length == frame_offset 325 && this_frame_offset + size > start + length) 326 frame_offset = this_frame_offset + size; 327 else if (this_frame_offset + size > start + length) 328 return false; 329 330 *poffset = this_frame_offset; 331 return true; 332 } 333 334 /* Create a new frame_space structure describing free space in the stack 335 frame beginning at START and ending at END, and chain it into the 336 function's frame_space_list. */ 337 338 static void 339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) 340 { 341 struct frame_space *space = ggc_alloc_frame_space (); 342 space->next = crtl->frame_space_list; 343 crtl->frame_space_list = space; 344 space->start = start; 345 space->length = end - start; 346 } 347 348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it 349 with machine mode MODE. 350 351 ALIGN controls the amount of alignment for the address of the slot: 352 0 means according to MODE, 353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, 354 -2 means use BITS_PER_UNIT, 355 positive specifies alignment boundary in bits. 356 357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce 358 alignment and ASLK_RECORD_PAD bit set if we should remember 359 extra space we allocated for alignment purposes. When we are 360 called from assign_stack_temp_for_type, it is not set so we don't 361 track the same stack slot in two independent lists. 362 363 We do not round to stack_boundary here. */ 364 365 rtx 366 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, 367 int align, int kind) 368 { 369 rtx x, addr; 370 int bigend_correction = 0; 371 HOST_WIDE_INT slot_offset = 0, old_frame_offset; 372 unsigned int alignment, alignment_in_bits; 373 374 if (align == 0) 375 { 376 alignment = get_stack_local_alignment (NULL, mode); 377 alignment /= BITS_PER_UNIT; 378 } 379 else if (align == -1) 380 { 381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; 382 size = CEIL_ROUND (size, alignment); 383 } 384 else if (align == -2) 385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ 386 else 387 alignment = align / BITS_PER_UNIT; 388 389 alignment_in_bits = alignment * BITS_PER_UNIT; 390 391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ 392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) 393 { 394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; 395 alignment = alignment_in_bits / BITS_PER_UNIT; 396 } 397 398 if (SUPPORTS_STACK_ALIGNMENT) 399 { 400 if (crtl->stack_alignment_estimated < alignment_in_bits) 401 { 402 if (!crtl->stack_realign_processed) 403 crtl->stack_alignment_estimated = alignment_in_bits; 404 else 405 { 406 /* If stack is realigned and stack alignment value 407 hasn't been finalized, it is OK not to increase 408 stack_alignment_estimated. The bigger alignment 409 requirement is recorded in stack_alignment_needed 410 below. */ 411 gcc_assert (!crtl->stack_realign_finalized); 412 if (!crtl->stack_realign_needed) 413 { 414 /* It is OK to reduce the alignment as long as the 415 requested size is 0 or the estimated stack 416 alignment >= mode alignment. */ 417 gcc_assert ((kind & ASLK_REDUCE_ALIGN) 418 || size == 0 419 || (crtl->stack_alignment_estimated 420 >= GET_MODE_ALIGNMENT (mode))); 421 alignment_in_bits = crtl->stack_alignment_estimated; 422 alignment = alignment_in_bits / BITS_PER_UNIT; 423 } 424 } 425 } 426 } 427 428 if (crtl->stack_alignment_needed < alignment_in_bits) 429 crtl->stack_alignment_needed = alignment_in_bits; 430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits) 431 crtl->max_used_stack_slot_alignment = alignment_in_bits; 432 433 if (mode != BLKmode || size != 0) 434 { 435 if (kind & ASLK_RECORD_PAD) 436 { 437 struct frame_space **psp; 438 439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) 440 { 441 struct frame_space *space = *psp; 442 if (!try_fit_stack_local (space->start, space->length, size, 443 alignment, &slot_offset)) 444 continue; 445 *psp = space->next; 446 if (slot_offset > space->start) 447 add_frame_space (space->start, slot_offset); 448 if (slot_offset + size < space->start + space->length) 449 add_frame_space (slot_offset + size, 450 space->start + space->length); 451 goto found_space; 452 } 453 } 454 } 455 else if (!STACK_ALIGNMENT_NEEDED) 456 { 457 slot_offset = frame_offset; 458 goto found_space; 459 } 460 461 old_frame_offset = frame_offset; 462 463 if (FRAME_GROWS_DOWNWARD) 464 { 465 frame_offset -= size; 466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); 467 468 if (kind & ASLK_RECORD_PAD) 469 { 470 if (slot_offset > frame_offset) 471 add_frame_space (frame_offset, slot_offset); 472 if (slot_offset + size < old_frame_offset) 473 add_frame_space (slot_offset + size, old_frame_offset); 474 } 475 } 476 else 477 { 478 frame_offset += size; 479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); 480 481 if (kind & ASLK_RECORD_PAD) 482 { 483 if (slot_offset > old_frame_offset) 484 add_frame_space (old_frame_offset, slot_offset); 485 if (slot_offset + size < frame_offset) 486 add_frame_space (slot_offset + size, frame_offset); 487 } 488 } 489 490 found_space: 491 /* On a big-endian machine, if we are allocating more space than we will use, 492 use the least significant bytes of those that are allocated. */ 493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) 494 bigend_correction = size - GET_MODE_SIZE (mode); 495 496 /* If we have already instantiated virtual registers, return the actual 497 address relative to the frame pointer. */ 498 if (virtuals_instantiated) 499 addr = plus_constant (frame_pointer_rtx, 500 trunc_int_for_mode 501 (slot_offset + bigend_correction 502 + STARTING_FRAME_OFFSET, Pmode)); 503 else 504 addr = plus_constant (virtual_stack_vars_rtx, 505 trunc_int_for_mode 506 (slot_offset + bigend_correction, 507 Pmode)); 508 509 x = gen_rtx_MEM (mode, addr); 510 set_mem_align (x, alignment_in_bits); 511 MEM_NOTRAP_P (x) = 1; 512 513 stack_slot_list 514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list); 515 516 if (frame_offset_overflow (frame_offset, current_function_decl)) 517 frame_offset = 0; 518 519 return x; 520 } 521 522 /* Wrap up assign_stack_local_1 with last parameter as false. */ 523 524 rtx 525 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align) 526 { 527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); 528 } 529 530 531 /* In order to evaluate some expressions, such as function calls returning 532 structures in memory, we need to temporarily allocate stack locations. 533 We record each allocated temporary in the following structure. 534 535 Associated with each temporary slot is a nesting level. When we pop up 536 one level, all temporaries associated with the previous level are freed. 537 Normally, all temporaries are freed after the execution of the statement 538 in which they were created. However, if we are inside a ({...}) grouping, 539 the result may be in a temporary and hence must be preserved. If the 540 result could be in a temporary, we preserve it if we can determine which 541 one it is in. If we cannot determine which temporary may contain the 542 result, all temporaries are preserved. A temporary is preserved by 543 pretending it was allocated at the previous nesting level. 544 545 Automatic variables are also assigned temporary slots, at the nesting 546 level where they are defined. They are marked a "kept" so that 547 free_temp_slots will not free them. */ 548 549 struct GTY(()) temp_slot { 550 /* Points to next temporary slot. */ 551 struct temp_slot *next; 552 /* Points to previous temporary slot. */ 553 struct temp_slot *prev; 554 /* The rtx to used to reference the slot. */ 555 rtx slot; 556 /* The size, in units, of the slot. */ 557 HOST_WIDE_INT size; 558 /* The type of the object in the slot, or zero if it doesn't correspond 559 to a type. We use this to determine whether a slot can be reused. 560 It can be reused if objects of the type of the new slot will always 561 conflict with objects of the type of the old slot. */ 562 tree type; 563 /* The alignment (in bits) of the slot. */ 564 unsigned int align; 565 /* Nonzero if this temporary is currently in use. */ 566 char in_use; 567 /* Nonzero if this temporary has its address taken. */ 568 char addr_taken; 569 /* Nesting level at which this slot is being used. */ 570 int level; 571 /* Nonzero if this should survive a call to free_temp_slots. */ 572 int keep; 573 /* The offset of the slot from the frame_pointer, including extra space 574 for alignment. This info is for combine_temp_slots. */ 575 HOST_WIDE_INT base_offset; 576 /* The size of the slot, including extra space for alignment. This 577 info is for combine_temp_slots. */ 578 HOST_WIDE_INT full_size; 579 }; 580 581 /* A table of addresses that represent a stack slot. The table is a mapping 582 from address RTXen to a temp slot. */ 583 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; 584 585 /* Entry for the above hash table. */ 586 struct GTY(()) temp_slot_address_entry { 587 hashval_t hash; 588 rtx address; 589 struct temp_slot *temp_slot; 590 }; 591 592 /* Removes temporary slot TEMP from LIST. */ 593 594 static void 595 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list) 596 { 597 if (temp->next) 598 temp->next->prev = temp->prev; 599 if (temp->prev) 600 temp->prev->next = temp->next; 601 else 602 *list = temp->next; 603 604 temp->prev = temp->next = NULL; 605 } 606 607 /* Inserts temporary slot TEMP to LIST. */ 608 609 static void 610 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) 611 { 612 temp->next = *list; 613 if (*list) 614 (*list)->prev = temp; 615 temp->prev = NULL; 616 *list = temp; 617 } 618 619 /* Returns the list of used temp slots at LEVEL. */ 620 621 static struct temp_slot ** 622 temp_slots_at_level (int level) 623 { 624 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots)) 625 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1); 626 627 return &(VEC_address (temp_slot_p, used_temp_slots)[level]); 628 } 629 630 /* Returns the maximal temporary slot level. */ 631 632 static int 633 max_slot_level (void) 634 { 635 if (!used_temp_slots) 636 return -1; 637 638 return VEC_length (temp_slot_p, used_temp_slots) - 1; 639 } 640 641 /* Moves temporary slot TEMP to LEVEL. */ 642 643 static void 644 move_slot_to_level (struct temp_slot *temp, int level) 645 { 646 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 647 insert_slot_to_list (temp, temp_slots_at_level (level)); 648 temp->level = level; 649 } 650 651 /* Make temporary slot TEMP available. */ 652 653 static void 654 make_slot_available (struct temp_slot *temp) 655 { 656 cut_slot_from_list (temp, temp_slots_at_level (temp->level)); 657 insert_slot_to_list (temp, &avail_temp_slots); 658 temp->in_use = 0; 659 temp->level = -1; 660 } 661 662 /* Compute the hash value for an address -> temp slot mapping. 663 The value is cached on the mapping entry. */ 664 static hashval_t 665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t) 666 { 667 int do_not_record = 0; 668 return hash_rtx (t->address, GET_MODE (t->address), 669 &do_not_record, NULL, false); 670 } 671 672 /* Return the hash value for an address -> temp slot mapping. */ 673 static hashval_t 674 temp_slot_address_hash (const void *p) 675 { 676 const struct temp_slot_address_entry *t; 677 t = (const struct temp_slot_address_entry *) p; 678 return t->hash; 679 } 680 681 /* Compare two address -> temp slot mapping entries. */ 682 static int 683 temp_slot_address_eq (const void *p1, const void *p2) 684 { 685 const struct temp_slot_address_entry *t1, *t2; 686 t1 = (const struct temp_slot_address_entry *) p1; 687 t2 = (const struct temp_slot_address_entry *) p2; 688 return exp_equiv_p (t1->address, t2->address, 0, true); 689 } 690 691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ 692 static void 693 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) 694 { 695 void **slot; 696 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry (); 697 t->address = address; 698 t->temp_slot = temp_slot; 699 t->hash = temp_slot_address_compute_hash (t); 700 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT); 701 *slot = t; 702 } 703 704 /* Remove an address -> temp slot mapping entry if the temp slot is 705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */ 706 static int 707 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED) 708 { 709 const struct temp_slot_address_entry *t; 710 t = (const struct temp_slot_address_entry *) *slot; 711 if (! t->temp_slot->in_use) 712 *slot = NULL; 713 return 1; 714 } 715 716 /* Remove all mappings of addresses to unused temp slots. */ 717 static void 718 remove_unused_temp_slot_addresses (void) 719 { 720 htab_traverse (temp_slot_address_table, 721 remove_unused_temp_slot_addresses_1, 722 NULL); 723 } 724 725 /* Find the temp slot corresponding to the object at address X. */ 726 727 static struct temp_slot * 728 find_temp_slot_from_address (rtx x) 729 { 730 struct temp_slot *p; 731 struct temp_slot_address_entry tmp, *t; 732 733 /* First try the easy way: 734 See if X exists in the address -> temp slot mapping. */ 735 tmp.address = x; 736 tmp.temp_slot = NULL; 737 tmp.hash = temp_slot_address_compute_hash (&tmp); 738 t = (struct temp_slot_address_entry *) 739 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash); 740 if (t) 741 return t->temp_slot; 742 743 /* If we have a sum involving a register, see if it points to a temp 744 slot. */ 745 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) 746 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) 747 return p; 748 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) 749 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) 750 return p; 751 752 /* Last resort: Address is a virtual stack var address. */ 753 if (GET_CODE (x) == PLUS 754 && XEXP (x, 0) == virtual_stack_vars_rtx 755 && CONST_INT_P (XEXP (x, 1))) 756 { 757 int i; 758 for (i = max_slot_level (); i >= 0; i--) 759 for (p = *temp_slots_at_level (i); p; p = p->next) 760 { 761 if (INTVAL (XEXP (x, 1)) >= p->base_offset 762 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) 763 return p; 764 } 765 } 766 767 return NULL; 768 } 769 770 /* Allocate a temporary stack slot and record it for possible later 771 reuse. 772 773 MODE is the machine mode to be given to the returned rtx. 774 775 SIZE is the size in units of the space required. We do no rounding here 776 since assign_stack_local will do any required rounding. 777 778 KEEP is 1 if this slot is to be retained after a call to 779 free_temp_slots. Automatic variables for a block are allocated 780 with this flag. KEEP values of 2 or 3 were needed respectively 781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs 782 or for SAVE_EXPRs, but they are now unused. 783 784 TYPE is the type that will be used for the stack slot. */ 785 786 rtx 787 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, 788 int keep, tree type) 789 { 790 unsigned int align; 791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; 792 rtx slot; 793 794 /* If SIZE is -1 it means that somebody tried to allocate a temporary 795 of a variable size. */ 796 gcc_assert (size != -1); 797 798 /* These are now unused. */ 799 gcc_assert (keep <= 1); 800 801 align = get_stack_local_alignment (type, mode); 802 803 /* Try to find an available, already-allocated temporary of the proper 804 mode which meets the size and alignment requirements. Choose the 805 smallest one with the closest alignment. 806 807 If assign_stack_temp is called outside of the tree->rtl expansion, 808 we cannot reuse the stack slots (that may still refer to 809 VIRTUAL_STACK_VARS_REGNUM). */ 810 if (!virtuals_instantiated) 811 { 812 for (p = avail_temp_slots; p; p = p->next) 813 { 814 if (p->align >= align && p->size >= size 815 && GET_MODE (p->slot) == mode 816 && objects_must_conflict_p (p->type, type) 817 && (best_p == 0 || best_p->size > p->size 818 || (best_p->size == p->size && best_p->align > p->align))) 819 { 820 if (p->align == align && p->size == size) 821 { 822 selected = p; 823 cut_slot_from_list (selected, &avail_temp_slots); 824 best_p = 0; 825 break; 826 } 827 best_p = p; 828 } 829 } 830 } 831 832 /* Make our best, if any, the one to use. */ 833 if (best_p) 834 { 835 selected = best_p; 836 cut_slot_from_list (selected, &avail_temp_slots); 837 838 /* If there are enough aligned bytes left over, make them into a new 839 temp_slot so that the extra bytes don't get wasted. Do this only 840 for BLKmode slots, so that we can be sure of the alignment. */ 841 if (GET_MODE (best_p->slot) == BLKmode) 842 { 843 int alignment = best_p->align / BITS_PER_UNIT; 844 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); 845 846 if (best_p->size - rounded_size >= alignment) 847 { 848 p = ggc_alloc_temp_slot (); 849 p->in_use = p->addr_taken = 0; 850 p->size = best_p->size - rounded_size; 851 p->base_offset = best_p->base_offset + rounded_size; 852 p->full_size = best_p->full_size - rounded_size; 853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); 854 p->align = best_p->align; 855 p->type = best_p->type; 856 insert_slot_to_list (p, &avail_temp_slots); 857 858 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot, 859 stack_slot_list); 860 861 best_p->size = rounded_size; 862 best_p->full_size = rounded_size; 863 } 864 } 865 } 866 867 /* If we still didn't find one, make a new temporary. */ 868 if (selected == 0) 869 { 870 HOST_WIDE_INT frame_offset_old = frame_offset; 871 872 p = ggc_alloc_temp_slot (); 873 874 /* We are passing an explicit alignment request to assign_stack_local. 875 One side effect of that is assign_stack_local will not round SIZE 876 to ensure the frame offset remains suitably aligned. 877 878 So for requests which depended on the rounding of SIZE, we go ahead 879 and round it now. We also make sure ALIGNMENT is at least 880 BIGGEST_ALIGNMENT. */ 881 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); 882 p->slot = assign_stack_local_1 (mode, 883 (mode == BLKmode 884 ? CEIL_ROUND (size, 885 (int) align 886 / BITS_PER_UNIT) 887 : size), 888 align, 0); 889 890 p->align = align; 891 892 /* The following slot size computation is necessary because we don't 893 know the actual size of the temporary slot until assign_stack_local 894 has performed all the frame alignment and size rounding for the 895 requested temporary. Note that extra space added for alignment 896 can be either above or below this stack slot depending on which 897 way the frame grows. We include the extra space if and only if it 898 is above this slot. */ 899 if (FRAME_GROWS_DOWNWARD) 900 p->size = frame_offset_old - frame_offset; 901 else 902 p->size = size; 903 904 /* Now define the fields used by combine_temp_slots. */ 905 if (FRAME_GROWS_DOWNWARD) 906 { 907 p->base_offset = frame_offset; 908 p->full_size = frame_offset_old - frame_offset; 909 } 910 else 911 { 912 p->base_offset = frame_offset_old; 913 p->full_size = frame_offset - frame_offset_old; 914 } 915 916 selected = p; 917 } 918 919 p = selected; 920 p->in_use = 1; 921 p->addr_taken = 0; 922 p->type = type; 923 p->level = temp_slot_level; 924 p->keep = keep; 925 926 pp = temp_slots_at_level (p->level); 927 insert_slot_to_list (p, pp); 928 insert_temp_slot_address (XEXP (p->slot, 0), p); 929 930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */ 931 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0)); 932 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list); 933 934 /* If we know the alias set for the memory that will be used, use 935 it. If there's no TYPE, then we don't know anything about the 936 alias set for the memory. */ 937 set_mem_alias_set (slot, type ? get_alias_set (type) : 0); 938 set_mem_align (slot, align); 939 940 /* If a type is specified, set the relevant flags. */ 941 if (type != 0) 942 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); 943 MEM_NOTRAP_P (slot) = 1; 944 945 return slot; 946 } 947 948 /* Allocate a temporary stack slot and record it for possible later 949 reuse. First three arguments are same as in preceding function. */ 950 951 rtx 952 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep) 953 { 954 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE); 955 } 956 957 /* Assign a temporary. 958 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl 959 and so that should be used in error messages. In either case, we 960 allocate of the given type. 961 KEEP is as for assign_stack_temp. 962 MEMORY_REQUIRED is 1 if the result must be addressable stack memory; 963 it is 0 if a register is OK. 964 DONT_PROMOTE is 1 if we should not promote values in register 965 to wider modes. */ 966 967 rtx 968 assign_temp (tree type_or_decl, int keep, int memory_required, 969 int dont_promote ATTRIBUTE_UNUSED) 970 { 971 tree type, decl; 972 enum machine_mode mode; 973 #ifdef PROMOTE_MODE 974 int unsignedp; 975 #endif 976 977 if (DECL_P (type_or_decl)) 978 decl = type_or_decl, type = TREE_TYPE (decl); 979 else 980 decl = NULL, type = type_or_decl; 981 982 mode = TYPE_MODE (type); 983 #ifdef PROMOTE_MODE 984 unsignedp = TYPE_UNSIGNED (type); 985 #endif 986 987 if (mode == BLKmode || memory_required) 988 { 989 HOST_WIDE_INT size = int_size_in_bytes (type); 990 rtx tmp; 991 992 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid 993 problems with allocating the stack space. */ 994 if (size == 0) 995 size = 1; 996 997 /* Unfortunately, we don't yet know how to allocate variable-sized 998 temporaries. However, sometimes we can find a fixed upper limit on 999 the size, so try that instead. */ 1000 else if (size == -1) 1001 size = max_int_size_in_bytes (type); 1002 1003 /* The size of the temporary may be too large to fit into an integer. */ 1004 /* ??? Not sure this should happen except for user silliness, so limit 1005 this to things that aren't compiler-generated temporaries. The 1006 rest of the time we'll die in assign_stack_temp_for_type. */ 1007 if (decl && size == -1 1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) 1009 { 1010 error ("size of variable %q+D is too large", decl); 1011 size = 1; 1012 } 1013 1014 tmp = assign_stack_temp_for_type (mode, size, keep, type); 1015 return tmp; 1016 } 1017 1018 #ifdef PROMOTE_MODE 1019 if (! dont_promote) 1020 mode = promote_mode (type, mode, &unsignedp); 1021 #endif 1022 1023 return gen_reg_rtx (mode); 1024 } 1025 1026 /* Combine temporary stack slots which are adjacent on the stack. 1027 1028 This allows for better use of already allocated stack space. This is only 1029 done for BLKmode slots because we can be sure that we won't have alignment 1030 problems in this case. */ 1031 1032 static void 1033 combine_temp_slots (void) 1034 { 1035 struct temp_slot *p, *q, *next, *next_q; 1036 int num_slots; 1037 1038 /* We can't combine slots, because the information about which slot 1039 is in which alias set will be lost. */ 1040 if (flag_strict_aliasing) 1041 return; 1042 1043 /* If there are a lot of temp slots, don't do anything unless 1044 high levels of optimization. */ 1045 if (! flag_expensive_optimizations) 1046 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++) 1047 if (num_slots > 100 || (num_slots > 10 && optimize == 0)) 1048 return; 1049 1050 for (p = avail_temp_slots; p; p = next) 1051 { 1052 int delete_p = 0; 1053 1054 next = p->next; 1055 1056 if (GET_MODE (p->slot) != BLKmode) 1057 continue; 1058 1059 for (q = p->next; q; q = next_q) 1060 { 1061 int delete_q = 0; 1062 1063 next_q = q->next; 1064 1065 if (GET_MODE (q->slot) != BLKmode) 1066 continue; 1067 1068 if (p->base_offset + p->full_size == q->base_offset) 1069 { 1070 /* Q comes after P; combine Q into P. */ 1071 p->size += q->size; 1072 p->full_size += q->full_size; 1073 delete_q = 1; 1074 } 1075 else if (q->base_offset + q->full_size == p->base_offset) 1076 { 1077 /* P comes after Q; combine P into Q. */ 1078 q->size += p->size; 1079 q->full_size += p->full_size; 1080 delete_p = 1; 1081 break; 1082 } 1083 if (delete_q) 1084 cut_slot_from_list (q, &avail_temp_slots); 1085 } 1086 1087 /* Either delete P or advance past it. */ 1088 if (delete_p) 1089 cut_slot_from_list (p, &avail_temp_slots); 1090 } 1091 } 1092 1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp 1094 slot that previously was known by OLD_RTX. */ 1095 1096 void 1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx) 1098 { 1099 struct temp_slot *p; 1100 1101 if (rtx_equal_p (old_rtx, new_rtx)) 1102 return; 1103 1104 p = find_temp_slot_from_address (old_rtx); 1105 1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and 1107 NEW_RTX is a register, see if one operand of the PLUS is a 1108 temporary location. If so, NEW_RTX points into it. Otherwise, 1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register 1110 in common between them. If so, try a recursive call on those 1111 values. */ 1112 if (p == 0) 1113 { 1114 if (GET_CODE (old_rtx) != PLUS) 1115 return; 1116 1117 if (REG_P (new_rtx)) 1118 { 1119 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx); 1120 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx); 1121 return; 1122 } 1123 else if (GET_CODE (new_rtx) != PLUS) 1124 return; 1125 1126 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0))) 1127 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1)); 1128 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0))) 1129 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1)); 1130 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1))) 1131 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0)); 1132 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1))) 1133 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0)); 1134 1135 return; 1136 } 1137 1138 /* Otherwise add an alias for the temp's address. */ 1139 insert_temp_slot_address (new_rtx, p); 1140 } 1141 1142 /* If X could be a reference to a temporary slot, mark the fact that its 1143 address was taken. */ 1144 1145 void 1146 mark_temp_addr_taken (rtx x) 1147 { 1148 struct temp_slot *p; 1149 1150 if (x == 0) 1151 return; 1152 1153 /* If X is not in memory or is at a constant address, it cannot be in 1154 a temporary slot. */ 1155 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))) 1156 return; 1157 1158 p = find_temp_slot_from_address (XEXP (x, 0)); 1159 if (p != 0) 1160 p->addr_taken = 1; 1161 } 1162 1163 /* If X could be a reference to a temporary slot, mark that slot as 1164 belonging to the to one level higher than the current level. If X 1165 matched one of our slots, just mark that one. Otherwise, we can't 1166 easily predict which it is, so upgrade all of them. Kept slots 1167 need not be touched. 1168 1169 This is called when an ({...}) construct occurs and a statement 1170 returns a value in memory. */ 1171 1172 void 1173 preserve_temp_slots (rtx x) 1174 { 1175 struct temp_slot *p = 0, *next; 1176 1177 /* If there is no result, we still might have some objects whose address 1178 were taken, so we need to make sure they stay around. */ 1179 if (x == 0) 1180 { 1181 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1182 { 1183 next = p->next; 1184 1185 if (p->addr_taken) 1186 move_slot_to_level (p, temp_slot_level - 1); 1187 } 1188 1189 return; 1190 } 1191 1192 /* If X is a register that is being used as a pointer, see if we have 1193 a temporary slot we know it points to. To be consistent with 1194 the code below, we really should preserve all non-kept slots 1195 if we can't find a match, but that seems to be much too costly. */ 1196 if (REG_P (x) && REG_POINTER (x)) 1197 p = find_temp_slot_from_address (x); 1198 1199 /* If X is not in memory or is at a constant address, it cannot be in 1200 a temporary slot, but it can contain something whose address was 1201 taken. */ 1202 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))) 1203 { 1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1205 { 1206 next = p->next; 1207 1208 if (p->addr_taken) 1209 move_slot_to_level (p, temp_slot_level - 1); 1210 } 1211 1212 return; 1213 } 1214 1215 /* First see if we can find a match. */ 1216 if (p == 0) 1217 p = find_temp_slot_from_address (XEXP (x, 0)); 1218 1219 if (p != 0) 1220 { 1221 /* Move everything at our level whose address was taken to our new 1222 level in case we used its address. */ 1223 struct temp_slot *q; 1224 1225 if (p->level == temp_slot_level) 1226 { 1227 for (q = *temp_slots_at_level (temp_slot_level); q; q = next) 1228 { 1229 next = q->next; 1230 1231 if (p != q && q->addr_taken) 1232 move_slot_to_level (q, temp_slot_level - 1); 1233 } 1234 1235 move_slot_to_level (p, temp_slot_level - 1); 1236 p->addr_taken = 0; 1237 } 1238 return; 1239 } 1240 1241 /* Otherwise, preserve all non-kept slots at this level. */ 1242 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1243 { 1244 next = p->next; 1245 1246 if (!p->keep) 1247 move_slot_to_level (p, temp_slot_level - 1); 1248 } 1249 } 1250 1251 /* Free all temporaries used so far. This is normally called at the 1252 end of generating code for a statement. */ 1253 1254 void 1255 free_temp_slots (void) 1256 { 1257 struct temp_slot *p, *next; 1258 bool some_available = false; 1259 1260 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1261 { 1262 next = p->next; 1263 1264 if (!p->keep) 1265 { 1266 make_slot_available (p); 1267 some_available = true; 1268 } 1269 } 1270 1271 if (some_available) 1272 { 1273 remove_unused_temp_slot_addresses (); 1274 combine_temp_slots (); 1275 } 1276 } 1277 1278 /* Push deeper into the nesting level for stack temporaries. */ 1279 1280 void 1281 push_temp_slots (void) 1282 { 1283 temp_slot_level++; 1284 } 1285 1286 /* Pop a temporary nesting level. All slots in use in the current level 1287 are freed. */ 1288 1289 void 1290 pop_temp_slots (void) 1291 { 1292 struct temp_slot *p, *next; 1293 bool some_available = false; 1294 1295 for (p = *temp_slots_at_level (temp_slot_level); p; p = next) 1296 { 1297 next = p->next; 1298 make_slot_available (p); 1299 some_available = true; 1300 } 1301 1302 if (some_available) 1303 { 1304 remove_unused_temp_slot_addresses (); 1305 combine_temp_slots (); 1306 } 1307 1308 temp_slot_level--; 1309 } 1310 1311 /* Initialize temporary slots. */ 1312 1313 void 1314 init_temp_slots (void) 1315 { 1316 /* We have not allocated any temporaries yet. */ 1317 avail_temp_slots = 0; 1318 used_temp_slots = 0; 1319 temp_slot_level = 0; 1320 1321 /* Set up the table to map addresses to temp slots. */ 1322 if (! temp_slot_address_table) 1323 temp_slot_address_table = htab_create_ggc (32, 1324 temp_slot_address_hash, 1325 temp_slot_address_eq, 1326 NULL); 1327 else 1328 htab_empty (temp_slot_address_table); 1329 } 1330 1331 /* These routines are responsible for converting virtual register references 1332 to the actual hard register references once RTL generation is complete. 1333 1334 The following four variables are used for communication between the 1335 routines. They contain the offsets of the virtual registers from their 1336 respective hard registers. */ 1337 1338 static int in_arg_offset; 1339 static int var_offset; 1340 static int dynamic_offset; 1341 static int out_arg_offset; 1342 static int cfa_offset; 1343 1344 /* In most machines, the stack pointer register is equivalent to the bottom 1345 of the stack. */ 1346 1347 #ifndef STACK_POINTER_OFFSET 1348 #define STACK_POINTER_OFFSET 0 1349 #endif 1350 1351 /* If not defined, pick an appropriate default for the offset of dynamically 1352 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, 1353 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ 1354 1355 #ifndef STACK_DYNAMIC_OFFSET 1356 1357 /* The bottom of the stack points to the actual arguments. If 1358 REG_PARM_STACK_SPACE is defined, this includes the space for the register 1359 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, 1360 stack space for register parameters is not pushed by the caller, but 1361 rather part of the fixed stack areas and hence not included in 1362 `crtl->outgoing_args_size'. Nevertheless, we must allow 1363 for it when allocating stack dynamic objects. */ 1364 1365 #if defined(REG_PARM_STACK_SPACE) 1366 #define STACK_DYNAMIC_OFFSET(FNDECL) \ 1367 ((ACCUMULATE_OUTGOING_ARGS \ 1368 ? (crtl->outgoing_args_size \ 1369 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ 1370 : REG_PARM_STACK_SPACE (FNDECL))) \ 1371 : 0) + (STACK_POINTER_OFFSET)) 1372 #else 1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \ 1374 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ 1375 + (STACK_POINTER_OFFSET)) 1376 #endif 1377 #endif 1378 1379 1380 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX 1381 is a virtual register, return the equivalent hard register and set the 1382 offset indirectly through the pointer. Otherwise, return 0. */ 1383 1384 static rtx 1385 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) 1386 { 1387 rtx new_rtx; 1388 HOST_WIDE_INT offset; 1389 1390 if (x == virtual_incoming_args_rtx) 1391 { 1392 if (stack_realign_drap) 1393 { 1394 /* Replace virtual_incoming_args_rtx with internal arg 1395 pointer if DRAP is used to realign stack. */ 1396 new_rtx = crtl->args.internal_arg_pointer; 1397 offset = 0; 1398 } 1399 else 1400 new_rtx = arg_pointer_rtx, offset = in_arg_offset; 1401 } 1402 else if (x == virtual_stack_vars_rtx) 1403 new_rtx = frame_pointer_rtx, offset = var_offset; 1404 else if (x == virtual_stack_dynamic_rtx) 1405 new_rtx = stack_pointer_rtx, offset = dynamic_offset; 1406 else if (x == virtual_outgoing_args_rtx) 1407 new_rtx = stack_pointer_rtx, offset = out_arg_offset; 1408 else if (x == virtual_cfa_rtx) 1409 { 1410 #ifdef FRAME_POINTER_CFA_OFFSET 1411 new_rtx = frame_pointer_rtx; 1412 #else 1413 new_rtx = arg_pointer_rtx; 1414 #endif 1415 offset = cfa_offset; 1416 } 1417 else if (x == virtual_preferred_stack_boundary_rtx) 1418 { 1419 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT); 1420 offset = 0; 1421 } 1422 else 1423 return NULL_RTX; 1424 1425 *poffset = offset; 1426 return new_rtx; 1427 } 1428 1429 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx. 1430 Instantiate any virtual registers present inside of *LOC. The expression 1431 is simplified, as much as possible, but is not to be considered "valid" 1432 in any sense implied by the target. If any change is made, set CHANGED 1433 to true. */ 1434 1435 static int 1436 instantiate_virtual_regs_in_rtx (rtx *loc, void *data) 1437 { 1438 HOST_WIDE_INT offset; 1439 bool *changed = (bool *) data; 1440 rtx x, new_rtx; 1441 1442 x = *loc; 1443 if (x == 0) 1444 return 0; 1445 1446 switch (GET_CODE (x)) 1447 { 1448 case REG: 1449 new_rtx = instantiate_new_reg (x, &offset); 1450 if (new_rtx) 1451 { 1452 *loc = plus_constant (new_rtx, offset); 1453 if (changed) 1454 *changed = true; 1455 } 1456 return -1; 1457 1458 case PLUS: 1459 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); 1460 if (new_rtx) 1461 { 1462 new_rtx = plus_constant (new_rtx, offset); 1463 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1)); 1464 if (changed) 1465 *changed = true; 1466 return -1; 1467 } 1468 1469 /* FIXME -- from old code */ 1470 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know 1471 we can commute the PLUS and SUBREG because pointers into the 1472 frame are well-behaved. */ 1473 break; 1474 1475 default: 1476 break; 1477 } 1478 1479 return 0; 1480 } 1481 1482 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X 1483 matches the predicate for insn CODE operand OPERAND. */ 1484 1485 static int 1486 safe_insn_predicate (int code, int operand, rtx x) 1487 { 1488 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x); 1489 } 1490 1491 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual 1492 registers present inside of insn. The result will be a valid insn. */ 1493 1494 static void 1495 instantiate_virtual_regs_in_insn (rtx insn) 1496 { 1497 HOST_WIDE_INT offset; 1498 int insn_code, i; 1499 bool any_change = false; 1500 rtx set, new_rtx, x, seq; 1501 1502 /* There are some special cases to be handled first. */ 1503 set = single_set (insn); 1504 if (set) 1505 { 1506 /* We're allowed to assign to a virtual register. This is interpreted 1507 to mean that the underlying register gets assigned the inverse 1508 transformation. This is used, for example, in the handling of 1509 non-local gotos. */ 1510 new_rtx = instantiate_new_reg (SET_DEST (set), &offset); 1511 if (new_rtx) 1512 { 1513 start_sequence (); 1514 1515 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL); 1516 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set), 1517 GEN_INT (-offset)); 1518 x = force_operand (x, new_rtx); 1519 if (x != new_rtx) 1520 emit_move_insn (new_rtx, x); 1521 1522 seq = get_insns (); 1523 end_sequence (); 1524 1525 emit_insn_before (seq, insn); 1526 delete_insn (insn); 1527 return; 1528 } 1529 1530 /* Handle a straight copy from a virtual register by generating a 1531 new add insn. The difference between this and falling through 1532 to the generic case is avoiding a new pseudo and eliminating a 1533 move insn in the initial rtl stream. */ 1534 new_rtx = instantiate_new_reg (SET_SRC (set), &offset); 1535 if (new_rtx && offset != 0 1536 && REG_P (SET_DEST (set)) 1537 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1538 { 1539 start_sequence (); 1540 1541 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, 1542 new_rtx, GEN_INT (offset), SET_DEST (set), 1543 1, OPTAB_LIB_WIDEN); 1544 if (x != SET_DEST (set)) 1545 emit_move_insn (SET_DEST (set), x); 1546 1547 seq = get_insns (); 1548 end_sequence (); 1549 1550 emit_insn_before (seq, insn); 1551 delete_insn (insn); 1552 return; 1553 } 1554 1555 extract_insn (insn); 1556 insn_code = INSN_CODE (insn); 1557 1558 /* Handle a plus involving a virtual register by determining if the 1559 operands remain valid if they're modified in place. */ 1560 if (GET_CODE (SET_SRC (set)) == PLUS 1561 && recog_data.n_operands >= 3 1562 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) 1563 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) 1564 && CONST_INT_P (recog_data.operand[2]) 1565 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) 1566 { 1567 offset += INTVAL (recog_data.operand[2]); 1568 1569 /* If the sum is zero, then replace with a plain move. */ 1570 if (offset == 0 1571 && REG_P (SET_DEST (set)) 1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) 1573 { 1574 start_sequence (); 1575 emit_move_insn (SET_DEST (set), new_rtx); 1576 seq = get_insns (); 1577 end_sequence (); 1578 1579 emit_insn_before (seq, insn); 1580 delete_insn (insn); 1581 return; 1582 } 1583 1584 x = gen_int_mode (offset, recog_data.operand_mode[2]); 1585 1586 /* Using validate_change and apply_change_group here leaves 1587 recog_data in an invalid state. Since we know exactly what 1588 we want to check, do those two by hand. */ 1589 if (safe_insn_predicate (insn_code, 1, new_rtx) 1590 && safe_insn_predicate (insn_code, 2, x)) 1591 { 1592 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx; 1593 *recog_data.operand_loc[2] = recog_data.operand[2] = x; 1594 any_change = true; 1595 1596 /* Fall through into the regular operand fixup loop in 1597 order to take care of operands other than 1 and 2. */ 1598 } 1599 } 1600 } 1601 else 1602 { 1603 extract_insn (insn); 1604 insn_code = INSN_CODE (insn); 1605 } 1606 1607 /* In the general case, we expect virtual registers to appear only in 1608 operands, and then only as either bare registers or inside memories. */ 1609 for (i = 0; i < recog_data.n_operands; ++i) 1610 { 1611 x = recog_data.operand[i]; 1612 switch (GET_CODE (x)) 1613 { 1614 case MEM: 1615 { 1616 rtx addr = XEXP (x, 0); 1617 bool changed = false; 1618 1619 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed); 1620 if (!changed) 1621 continue; 1622 1623 start_sequence (); 1624 x = replace_equiv_address (x, addr); 1625 /* It may happen that the address with the virtual reg 1626 was valid (e.g. based on the virtual stack reg, which might 1627 be acceptable to the predicates with all offsets), whereas 1628 the address now isn't anymore, for instance when the address 1629 is still offsetted, but the base reg isn't virtual-stack-reg 1630 anymore. Below we would do a force_reg on the whole operand, 1631 but this insn might actually only accept memory. Hence, 1632 before doing that last resort, try to reload the address into 1633 a register, so this operand stays a MEM. */ 1634 if (!safe_insn_predicate (insn_code, i, x)) 1635 { 1636 addr = force_reg (GET_MODE (addr), addr); 1637 x = replace_equiv_address (x, addr); 1638 } 1639 seq = get_insns (); 1640 end_sequence (); 1641 if (seq) 1642 emit_insn_before (seq, insn); 1643 } 1644 break; 1645 1646 case REG: 1647 new_rtx = instantiate_new_reg (x, &offset); 1648 if (new_rtx == NULL) 1649 continue; 1650 if (offset == 0) 1651 x = new_rtx; 1652 else 1653 { 1654 start_sequence (); 1655 1656 /* Careful, special mode predicates may have stuff in 1657 insn_data[insn_code].operand[i].mode that isn't useful 1658 to us for computing a new value. */ 1659 /* ??? Recognize address_operand and/or "p" constraints 1660 to see if (plus new offset) is a valid before we put 1661 this through expand_simple_binop. */ 1662 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx, 1663 GEN_INT (offset), NULL_RTX, 1664 1, OPTAB_LIB_WIDEN); 1665 seq = get_insns (); 1666 end_sequence (); 1667 emit_insn_before (seq, insn); 1668 } 1669 break; 1670 1671 case SUBREG: 1672 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); 1673 if (new_rtx == NULL) 1674 continue; 1675 if (offset != 0) 1676 { 1677 start_sequence (); 1678 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx, 1679 GEN_INT (offset), NULL_RTX, 1680 1, OPTAB_LIB_WIDEN); 1681 seq = get_insns (); 1682 end_sequence (); 1683 emit_insn_before (seq, insn); 1684 } 1685 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx, 1686 GET_MODE (new_rtx), SUBREG_BYTE (x)); 1687 gcc_assert (x); 1688 break; 1689 1690 default: 1691 continue; 1692 } 1693 1694 /* At this point, X contains the new value for the operand. 1695 Validate the new value vs the insn predicate. Note that 1696 asm insns will have insn_code -1 here. */ 1697 if (!safe_insn_predicate (insn_code, i, x)) 1698 { 1699 start_sequence (); 1700 if (REG_P (x)) 1701 { 1702 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER); 1703 x = copy_to_reg (x); 1704 } 1705 else 1706 x = force_reg (insn_data[insn_code].operand[i].mode, x); 1707 seq = get_insns (); 1708 end_sequence (); 1709 if (seq) 1710 emit_insn_before (seq, insn); 1711 } 1712 1713 *recog_data.operand_loc[i] = recog_data.operand[i] = x; 1714 any_change = true; 1715 } 1716 1717 if (any_change) 1718 { 1719 /* Propagate operand changes into the duplicates. */ 1720 for (i = 0; i < recog_data.n_dups; ++i) 1721 *recog_data.dup_loc[i] 1722 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); 1723 1724 /* Force re-recognition of the instruction for validation. */ 1725 INSN_CODE (insn) = -1; 1726 } 1727 1728 if (asm_noperands (PATTERN (insn)) >= 0) 1729 { 1730 if (!check_asm_operands (PATTERN (insn))) 1731 { 1732 error_for_asm (insn, "impossible constraint in %<asm%>"); 1733 delete_insn_and_edges (insn); 1734 } 1735 } 1736 else 1737 { 1738 if (recog_memoized (insn) < 0) 1739 fatal_insn_not_found (insn); 1740 } 1741 } 1742 1743 /* Subroutine of instantiate_decls. Given RTL representing a decl, 1744 do any instantiation required. */ 1745 1746 void 1747 instantiate_decl_rtl (rtx x) 1748 { 1749 rtx addr; 1750 1751 if (x == 0) 1752 return; 1753 1754 /* If this is a CONCAT, recurse for the pieces. */ 1755 if (GET_CODE (x) == CONCAT) 1756 { 1757 instantiate_decl_rtl (XEXP (x, 0)); 1758 instantiate_decl_rtl (XEXP (x, 1)); 1759 return; 1760 } 1761 1762 /* If this is not a MEM, no need to do anything. Similarly if the 1763 address is a constant or a register that is not a virtual register. */ 1764 if (!MEM_P (x)) 1765 return; 1766 1767 addr = XEXP (x, 0); 1768 if (CONSTANT_P (addr) 1769 || (REG_P (addr) 1770 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER 1771 || REGNO (addr) > LAST_VIRTUAL_REGISTER))) 1772 return; 1773 1774 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); 1775 } 1776 1777 /* Helper for instantiate_decls called via walk_tree: Process all decls 1778 in the given DECL_VALUE_EXPR. */ 1779 1780 static tree 1781 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1782 { 1783 tree t = *tp; 1784 if (! EXPR_P (t)) 1785 { 1786 *walk_subtrees = 0; 1787 if (DECL_P (t)) 1788 { 1789 if (DECL_RTL_SET_P (t)) 1790 instantiate_decl_rtl (DECL_RTL (t)); 1791 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t) 1792 && DECL_INCOMING_RTL (t)) 1793 instantiate_decl_rtl (DECL_INCOMING_RTL (t)); 1794 if ((TREE_CODE (t) == VAR_DECL 1795 || TREE_CODE (t) == RESULT_DECL) 1796 && DECL_HAS_VALUE_EXPR_P (t)) 1797 { 1798 tree v = DECL_VALUE_EXPR (t); 1799 walk_tree (&v, instantiate_expr, NULL, NULL); 1800 } 1801 } 1802 } 1803 return NULL; 1804 } 1805 1806 /* Subroutine of instantiate_decls: Process all decls in the given 1807 BLOCK node and all its subblocks. */ 1808 1809 static void 1810 instantiate_decls_1 (tree let) 1811 { 1812 tree t; 1813 1814 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t)) 1815 { 1816 if (DECL_RTL_SET_P (t)) 1817 instantiate_decl_rtl (DECL_RTL (t)); 1818 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) 1819 { 1820 tree v = DECL_VALUE_EXPR (t); 1821 walk_tree (&v, instantiate_expr, NULL, NULL); 1822 } 1823 } 1824 1825 /* Process all subblocks. */ 1826 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t)) 1827 instantiate_decls_1 (t); 1828 } 1829 1830 /* Scan all decls in FNDECL (both variables and parameters) and instantiate 1831 all virtual registers in their DECL_RTL's. */ 1832 1833 static void 1834 instantiate_decls (tree fndecl) 1835 { 1836 tree decl; 1837 unsigned ix; 1838 1839 /* Process all parameters of the function. */ 1840 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl)) 1841 { 1842 instantiate_decl_rtl (DECL_RTL (decl)); 1843 instantiate_decl_rtl (DECL_INCOMING_RTL (decl)); 1844 if (DECL_HAS_VALUE_EXPR_P (decl)) 1845 { 1846 tree v = DECL_VALUE_EXPR (decl); 1847 walk_tree (&v, instantiate_expr, NULL, NULL); 1848 } 1849 } 1850 1851 if ((decl = DECL_RESULT (fndecl)) 1852 && TREE_CODE (decl) == RESULT_DECL) 1853 { 1854 if (DECL_RTL_SET_P (decl)) 1855 instantiate_decl_rtl (DECL_RTL (decl)); 1856 if (DECL_HAS_VALUE_EXPR_P (decl)) 1857 { 1858 tree v = DECL_VALUE_EXPR (decl); 1859 walk_tree (&v, instantiate_expr, NULL, NULL); 1860 } 1861 } 1862 1863 /* Now process all variables defined in the function or its subblocks. */ 1864 instantiate_decls_1 (DECL_INITIAL (fndecl)); 1865 1866 FOR_EACH_LOCAL_DECL (cfun, ix, decl) 1867 if (DECL_RTL_SET_P (decl)) 1868 instantiate_decl_rtl (DECL_RTL (decl)); 1869 VEC_free (tree, gc, cfun->local_decls); 1870 } 1871 1872 /* Pass through the INSNS of function FNDECL and convert virtual register 1873 references to hard register references. */ 1874 1875 static unsigned int 1876 instantiate_virtual_regs (void) 1877 { 1878 rtx insn; 1879 1880 /* Compute the offsets to use for this function. */ 1881 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl); 1882 var_offset = STARTING_FRAME_OFFSET; 1883 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); 1884 out_arg_offset = STACK_POINTER_OFFSET; 1885 #ifdef FRAME_POINTER_CFA_OFFSET 1886 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); 1887 #else 1888 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); 1889 #endif 1890 1891 /* Initialize recognition, indicating that volatile is OK. */ 1892 init_recog (); 1893 1894 /* Scan through all the insns, instantiating every virtual register still 1895 present. */ 1896 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 1897 if (INSN_P (insn)) 1898 { 1899 /* These patterns in the instruction stream can never be recognized. 1900 Fortunately, they shouldn't contain virtual registers either. */ 1901 if (GET_CODE (PATTERN (insn)) == USE 1902 || GET_CODE (PATTERN (insn)) == CLOBBER 1903 || GET_CODE (PATTERN (insn)) == ADDR_VEC 1904 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC 1905 || GET_CODE (PATTERN (insn)) == ASM_INPUT) 1906 continue; 1907 else if (DEBUG_INSN_P (insn)) 1908 for_each_rtx (&INSN_VAR_LOCATION (insn), 1909 instantiate_virtual_regs_in_rtx, NULL); 1910 else 1911 instantiate_virtual_regs_in_insn (insn); 1912 1913 if (INSN_DELETED_P (insn)) 1914 continue; 1915 1916 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); 1917 1918 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ 1919 if (CALL_P (insn)) 1920 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), 1921 instantiate_virtual_regs_in_rtx, NULL); 1922 } 1923 1924 /* Instantiate the virtual registers in the DECLs for debugging purposes. */ 1925 instantiate_decls (current_function_decl); 1926 1927 targetm.instantiate_decls (); 1928 1929 /* Indicate that, from now on, assign_stack_local should use 1930 frame_pointer_rtx. */ 1931 virtuals_instantiated = 1; 1932 1933 return 0; 1934 } 1935 1936 struct rtl_opt_pass pass_instantiate_virtual_regs = 1937 { 1938 { 1939 RTL_PASS, 1940 "vregs", /* name */ 1941 NULL, /* gate */ 1942 instantiate_virtual_regs, /* execute */ 1943 NULL, /* sub */ 1944 NULL, /* next */ 1945 0, /* static_pass_number */ 1946 TV_NONE, /* tv_id */ 1947 0, /* properties_required */ 1948 0, /* properties_provided */ 1949 0, /* properties_destroyed */ 1950 0, /* todo_flags_start */ 1951 0 /* todo_flags_finish */ 1952 } 1953 }; 1954 1955 1956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type). 1957 This means a type for which function calls must pass an address to the 1958 function or get an address back from the function. 1959 EXP may be a type node or an expression (whose type is tested). */ 1960 1961 int 1962 aggregate_value_p (const_tree exp, const_tree fntype) 1963 { 1964 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); 1965 int i, regno, nregs; 1966 rtx reg; 1967 1968 if (fntype) 1969 switch (TREE_CODE (fntype)) 1970 { 1971 case CALL_EXPR: 1972 { 1973 tree fndecl = get_callee_fndecl (fntype); 1974 fntype = (fndecl 1975 ? TREE_TYPE (fndecl) 1976 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)))); 1977 } 1978 break; 1979 case FUNCTION_DECL: 1980 fntype = TREE_TYPE (fntype); 1981 break; 1982 case FUNCTION_TYPE: 1983 case METHOD_TYPE: 1984 break; 1985 case IDENTIFIER_NODE: 1986 fntype = NULL_TREE; 1987 break; 1988 default: 1989 /* We don't expect other tree types here. */ 1990 gcc_unreachable (); 1991 } 1992 1993 if (VOID_TYPE_P (type)) 1994 return 0; 1995 1996 /* If a record should be passed the same as its first (and only) member 1997 don't pass it as an aggregate. */ 1998 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) 1999 return aggregate_value_p (first_field (type), fntype); 2000 2001 /* If the front end has decided that this needs to be passed by 2002 reference, do so. */ 2003 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) 2004 && DECL_BY_REFERENCE (exp)) 2005 return 1; 2006 2007 /* Function types that are TREE_ADDRESSABLE force return in memory. */ 2008 if (fntype && TREE_ADDRESSABLE (fntype)) 2009 return 1; 2010 2011 /* Types that are TREE_ADDRESSABLE must be constructed in memory, 2012 and thus can't be returned in registers. */ 2013 if (TREE_ADDRESSABLE (type)) 2014 return 1; 2015 2016 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) 2017 return 1; 2018 2019 if (targetm.calls.return_in_memory (type, fntype)) 2020 return 1; 2021 2022 /* Make sure we have suitable call-clobbered regs to return 2023 the value in; if not, we must return it in memory. */ 2024 reg = hard_function_value (type, 0, fntype, 0); 2025 2026 /* If we have something other than a REG (e.g. a PARALLEL), then assume 2027 it is OK. */ 2028 if (!REG_P (reg)) 2029 return 0; 2030 2031 regno = REGNO (reg); 2032 nregs = hard_regno_nregs[regno][TYPE_MODE (type)]; 2033 for (i = 0; i < nregs; i++) 2034 if (! call_used_regs[regno + i]) 2035 return 1; 2036 2037 return 0; 2038 } 2039 2040 /* Return true if we should assign DECL a pseudo register; false if it 2041 should live on the local stack. */ 2042 2043 bool 2044 use_register_for_decl (const_tree decl) 2045 { 2046 if (!targetm.calls.allocate_stack_slots_for_args()) 2047 return true; 2048 2049 /* Honor volatile. */ 2050 if (TREE_SIDE_EFFECTS (decl)) 2051 return false; 2052 2053 /* Honor addressability. */ 2054 if (TREE_ADDRESSABLE (decl)) 2055 return false; 2056 2057 /* Only register-like things go in registers. */ 2058 if (DECL_MODE (decl) == BLKmode) 2059 return false; 2060 2061 /* If -ffloat-store specified, don't put explicit float variables 2062 into registers. */ 2063 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa 2064 propagates values across these stores, and it probably shouldn't. */ 2065 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))) 2066 return false; 2067 2068 /* If we're not interested in tracking debugging information for 2069 this decl, then we can certainly put it in a register. */ 2070 if (DECL_IGNORED_P (decl)) 2071 return true; 2072 2073 if (optimize) 2074 return true; 2075 2076 if (!DECL_REGISTER (decl)) 2077 return false; 2078 2079 switch (TREE_CODE (TREE_TYPE (decl))) 2080 { 2081 case RECORD_TYPE: 2082 case UNION_TYPE: 2083 case QUAL_UNION_TYPE: 2084 /* When not optimizing, disregard register keyword for variables with 2085 types containing methods, otherwise the methods won't be callable 2086 from the debugger. */ 2087 if (TYPE_METHODS (TREE_TYPE (decl))) 2088 return false; 2089 break; 2090 default: 2091 break; 2092 } 2093 2094 return true; 2095 } 2096 2097 /* Return true if TYPE should be passed by invisible reference. */ 2098 2099 bool 2100 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode, 2101 tree type, bool named_arg) 2102 { 2103 if (type) 2104 { 2105 /* If this type contains non-trivial constructors, then it is 2106 forbidden for the middle-end to create any new copies. */ 2107 if (TREE_ADDRESSABLE (type)) 2108 return true; 2109 2110 /* GCC post 3.4 passes *all* variable sized types by reference. */ 2111 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) 2112 return true; 2113 2114 /* If a record type should be passed the same as its first (and only) 2115 member, use the type and mode of that member. */ 2116 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) 2117 { 2118 type = TREE_TYPE (first_field (type)); 2119 mode = TYPE_MODE (type); 2120 } 2121 } 2122 2123 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode, 2124 type, named_arg); 2125 } 2126 2127 /* Return true if TYPE, which is passed by reference, should be callee 2128 copied instead of caller copied. */ 2129 2130 bool 2131 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode, 2132 tree type, bool named_arg) 2133 { 2134 if (type && TREE_ADDRESSABLE (type)) 2135 return false; 2136 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type, 2137 named_arg); 2138 } 2139 2140 /* Structures to communicate between the subroutines of assign_parms. 2141 The first holds data persistent across all parameters, the second 2142 is cleared out for each parameter. */ 2143 2144 struct assign_parm_data_all 2145 { 2146 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS 2147 should become a job of the target or otherwise encapsulated. */ 2148 CUMULATIVE_ARGS args_so_far_v; 2149 cumulative_args_t args_so_far; 2150 struct args_size stack_args_size; 2151 tree function_result_decl; 2152 tree orig_fnargs; 2153 rtx first_conversion_insn; 2154 rtx last_conversion_insn; 2155 HOST_WIDE_INT pretend_args_size; 2156 HOST_WIDE_INT extra_pretend_bytes; 2157 int reg_parm_stack_space; 2158 }; 2159 2160 struct assign_parm_data_one 2161 { 2162 tree nominal_type; 2163 tree passed_type; 2164 rtx entry_parm; 2165 rtx stack_parm; 2166 enum machine_mode nominal_mode; 2167 enum machine_mode passed_mode; 2168 enum machine_mode promoted_mode; 2169 struct locate_and_pad_arg_data locate; 2170 int partial; 2171 BOOL_BITFIELD named_arg : 1; 2172 BOOL_BITFIELD passed_pointer : 1; 2173 BOOL_BITFIELD on_stack : 1; 2174 BOOL_BITFIELD loaded_in_reg : 1; 2175 }; 2176 2177 /* A subroutine of assign_parms. Initialize ALL. */ 2178 2179 static void 2180 assign_parms_initialize_all (struct assign_parm_data_all *all) 2181 { 2182 tree fntype ATTRIBUTE_UNUSED; 2183 2184 memset (all, 0, sizeof (*all)); 2185 2186 fntype = TREE_TYPE (current_function_decl); 2187 2188 #ifdef INIT_CUMULATIVE_INCOMING_ARGS 2189 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX); 2190 #else 2191 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX, 2192 current_function_decl, -1); 2193 #endif 2194 all->args_so_far = pack_cumulative_args (&all->args_so_far_v); 2195 2196 #ifdef REG_PARM_STACK_SPACE 2197 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl); 2198 #endif 2199 } 2200 2201 /* If ARGS contains entries with complex types, split the entry into two 2202 entries of the component type. Return a new list of substitutions are 2203 needed, else the old list. */ 2204 2205 static void 2206 split_complex_args (VEC(tree, heap) **args) 2207 { 2208 unsigned i; 2209 tree p; 2210 2211 FOR_EACH_VEC_ELT (tree, *args, i, p) 2212 { 2213 tree type = TREE_TYPE (p); 2214 if (TREE_CODE (type) == COMPLEX_TYPE 2215 && targetm.calls.split_complex_arg (type)) 2216 { 2217 tree decl; 2218 tree subtype = TREE_TYPE (type); 2219 bool addressable = TREE_ADDRESSABLE (p); 2220 2221 /* Rewrite the PARM_DECL's type with its component. */ 2222 p = copy_node (p); 2223 TREE_TYPE (p) = subtype; 2224 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p)); 2225 DECL_MODE (p) = VOIDmode; 2226 DECL_SIZE (p) = NULL; 2227 DECL_SIZE_UNIT (p) = NULL; 2228 /* If this arg must go in memory, put it in a pseudo here. 2229 We can't allow it to go in memory as per normal parms, 2230 because the usual place might not have the imag part 2231 adjacent to the real part. */ 2232 DECL_ARTIFICIAL (p) = addressable; 2233 DECL_IGNORED_P (p) = addressable; 2234 TREE_ADDRESSABLE (p) = 0; 2235 layout_decl (p, 0); 2236 VEC_replace (tree, *args, i, p); 2237 2238 /* Build a second synthetic decl. */ 2239 decl = build_decl (EXPR_LOCATION (p), 2240 PARM_DECL, NULL_TREE, subtype); 2241 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); 2242 DECL_ARTIFICIAL (decl) = addressable; 2243 DECL_IGNORED_P (decl) = addressable; 2244 layout_decl (decl, 0); 2245 VEC_safe_insert (tree, heap, *args, ++i, decl); 2246 } 2247 } 2248 } 2249 2250 /* A subroutine of assign_parms. Adjust the parameter list to incorporate 2251 the hidden struct return argument, and (abi willing) complex args. 2252 Return the new parameter list. */ 2253 2254 static VEC(tree, heap) * 2255 assign_parms_augmented_arg_list (struct assign_parm_data_all *all) 2256 { 2257 tree fndecl = current_function_decl; 2258 tree fntype = TREE_TYPE (fndecl); 2259 VEC(tree, heap) *fnargs = NULL; 2260 tree arg; 2261 2262 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg)) 2263 VEC_safe_push (tree, heap, fnargs, arg); 2264 2265 all->orig_fnargs = DECL_ARGUMENTS (fndecl); 2266 2267 /* If struct value address is treated as the first argument, make it so. */ 2268 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl) 2269 && ! cfun->returns_pcc_struct 2270 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) 2271 { 2272 tree type = build_pointer_type (TREE_TYPE (fntype)); 2273 tree decl; 2274 2275 decl = build_decl (DECL_SOURCE_LOCATION (fndecl), 2276 PARM_DECL, get_identifier (".result_ptr"), type); 2277 DECL_ARG_TYPE (decl) = type; 2278 DECL_ARTIFICIAL (decl) = 1; 2279 DECL_NAMELESS (decl) = 1; 2280 TREE_CONSTANT (decl) = 1; 2281 2282 DECL_CHAIN (decl) = all->orig_fnargs; 2283 all->orig_fnargs = decl; 2284 VEC_safe_insert (tree, heap, fnargs, 0, decl); 2285 2286 all->function_result_decl = decl; 2287 } 2288 2289 /* If the target wants to split complex arguments into scalars, do so. */ 2290 if (targetm.calls.split_complex_arg) 2291 split_complex_args (&fnargs); 2292 2293 return fnargs; 2294 } 2295 2296 /* A subroutine of assign_parms. Examine PARM and pull out type and mode 2297 data for the parameter. Incorporate ABI specifics such as pass-by- 2298 reference and type promotion. */ 2299 2300 static void 2301 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, 2302 struct assign_parm_data_one *data) 2303 { 2304 tree nominal_type, passed_type; 2305 enum machine_mode nominal_mode, passed_mode, promoted_mode; 2306 int unsignedp; 2307 2308 memset (data, 0, sizeof (*data)); 2309 2310 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ 2311 if (!cfun->stdarg) 2312 data->named_arg = 1; /* No variadic parms. */ 2313 else if (DECL_CHAIN (parm)) 2314 data->named_arg = 1; /* Not the last non-variadic parm. */ 2315 else if (targetm.calls.strict_argument_naming (all->args_so_far)) 2316 data->named_arg = 1; /* Only variadic ones are unnamed. */ 2317 else 2318 data->named_arg = 0; /* Treat as variadic. */ 2319 2320 nominal_type = TREE_TYPE (parm); 2321 passed_type = DECL_ARG_TYPE (parm); 2322 2323 /* Look out for errors propagating this far. Also, if the parameter's 2324 type is void then its value doesn't matter. */ 2325 if (TREE_TYPE (parm) == error_mark_node 2326 /* This can happen after weird syntax errors 2327 or if an enum type is defined among the parms. */ 2328 || TREE_CODE (parm) != PARM_DECL 2329 || passed_type == NULL 2330 || VOID_TYPE_P (nominal_type)) 2331 { 2332 nominal_type = passed_type = void_type_node; 2333 nominal_mode = passed_mode = promoted_mode = VOIDmode; 2334 goto egress; 2335 } 2336 2337 /* Find mode of arg as it is passed, and mode of arg as it should be 2338 during execution of this function. */ 2339 passed_mode = TYPE_MODE (passed_type); 2340 nominal_mode = TYPE_MODE (nominal_type); 2341 2342 /* If the parm is to be passed as a transparent union or record, use the 2343 type of the first field for the tests below. We have already verified 2344 that the modes are the same. */ 2345 if ((TREE_CODE (passed_type) == UNION_TYPE 2346 || TREE_CODE (passed_type) == RECORD_TYPE) 2347 && TYPE_TRANSPARENT_AGGR (passed_type)) 2348 passed_type = TREE_TYPE (first_field (passed_type)); 2349 2350 /* See if this arg was passed by invisible reference. */ 2351 if (pass_by_reference (&all->args_so_far_v, passed_mode, 2352 passed_type, data->named_arg)) 2353 { 2354 passed_type = nominal_type = build_pointer_type (passed_type); 2355 data->passed_pointer = true; 2356 passed_mode = nominal_mode = Pmode; 2357 } 2358 2359 /* Find mode as it is passed by the ABI. */ 2360 unsignedp = TYPE_UNSIGNED (passed_type); 2361 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, 2362 TREE_TYPE (current_function_decl), 0); 2363 2364 egress: 2365 data->nominal_type = nominal_type; 2366 data->passed_type = passed_type; 2367 data->nominal_mode = nominal_mode; 2368 data->passed_mode = passed_mode; 2369 data->promoted_mode = promoted_mode; 2370 } 2371 2372 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */ 2373 2374 static void 2375 assign_parms_setup_varargs (struct assign_parm_data_all *all, 2376 struct assign_parm_data_one *data, bool no_rtl) 2377 { 2378 int varargs_pretend_bytes = 0; 2379 2380 targetm.calls.setup_incoming_varargs (all->args_so_far, 2381 data->promoted_mode, 2382 data->passed_type, 2383 &varargs_pretend_bytes, no_rtl); 2384 2385 /* If the back-end has requested extra stack space, record how much is 2386 needed. Do not change pretend_args_size otherwise since it may be 2387 nonzero from an earlier partial argument. */ 2388 if (varargs_pretend_bytes > 0) 2389 all->pretend_args_size = varargs_pretend_bytes; 2390 } 2391 2392 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to 2393 the incoming location of the current parameter. */ 2394 2395 static void 2396 assign_parm_find_entry_rtl (struct assign_parm_data_all *all, 2397 struct assign_parm_data_one *data) 2398 { 2399 HOST_WIDE_INT pretend_bytes = 0; 2400 rtx entry_parm; 2401 bool in_regs; 2402 2403 if (data->promoted_mode == VOIDmode) 2404 { 2405 data->entry_parm = data->stack_parm = const0_rtx; 2406 return; 2407 } 2408 2409 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, 2410 data->promoted_mode, 2411 data->passed_type, 2412 data->named_arg); 2413 2414 if (entry_parm == 0) 2415 data->promoted_mode = data->passed_mode; 2416 2417 /* Determine parm's home in the stack, in case it arrives in the stack 2418 or we should pretend it did. Compute the stack position and rtx where 2419 the argument arrives and its size. 2420 2421 There is one complexity here: If this was a parameter that would 2422 have been passed in registers, but wasn't only because it is 2423 __builtin_va_alist, we want locate_and_pad_parm to treat it as if 2424 it came in a register so that REG_PARM_STACK_SPACE isn't skipped. 2425 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 2426 as it was the previous time. */ 2427 in_regs = entry_parm != 0; 2428 #ifdef STACK_PARMS_IN_REG_PARM_AREA 2429 in_regs = true; 2430 #endif 2431 if (!in_regs && !data->named_arg) 2432 { 2433 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far)) 2434 { 2435 rtx tem; 2436 tem = targetm.calls.function_incoming_arg (all->args_so_far, 2437 data->promoted_mode, 2438 data->passed_type, true); 2439 in_regs = tem != NULL; 2440 } 2441 } 2442 2443 /* If this parameter was passed both in registers and in the stack, use 2444 the copy on the stack. */ 2445 if (targetm.calls.must_pass_in_stack (data->promoted_mode, 2446 data->passed_type)) 2447 entry_parm = 0; 2448 2449 if (entry_parm) 2450 { 2451 int partial; 2452 2453 partial = targetm.calls.arg_partial_bytes (all->args_so_far, 2454 data->promoted_mode, 2455 data->passed_type, 2456 data->named_arg); 2457 data->partial = partial; 2458 2459 /* The caller might already have allocated stack space for the 2460 register parameters. */ 2461 if (partial != 0 && all->reg_parm_stack_space == 0) 2462 { 2463 /* Part of this argument is passed in registers and part 2464 is passed on the stack. Ask the prologue code to extend 2465 the stack part so that we can recreate the full value. 2466 2467 PRETEND_BYTES is the size of the registers we need to store. 2468 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra 2469 stack space that the prologue should allocate. 2470 2471 Internally, gcc assumes that the argument pointer is aligned 2472 to STACK_BOUNDARY bits. This is used both for alignment 2473 optimizations (see init_emit) and to locate arguments that are 2474 aligned to more than PARM_BOUNDARY bits. We must preserve this 2475 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to 2476 a stack boundary. */ 2477 2478 /* We assume at most one partial arg, and it must be the first 2479 argument on the stack. */ 2480 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size); 2481 2482 pretend_bytes = partial; 2483 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES); 2484 2485 /* We want to align relative to the actual stack pointer, so 2486 don't include this in the stack size until later. */ 2487 all->extra_pretend_bytes = all->pretend_args_size; 2488 } 2489 } 2490 2491 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs, 2492 entry_parm ? data->partial : 0, current_function_decl, 2493 &all->stack_args_size, &data->locate); 2494 2495 /* Update parm_stack_boundary if this parameter is passed in the 2496 stack. */ 2497 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary) 2498 crtl->parm_stack_boundary = data->locate.boundary; 2499 2500 /* Adjust offsets to include the pretend args. */ 2501 pretend_bytes = all->extra_pretend_bytes - pretend_bytes; 2502 data->locate.slot_offset.constant += pretend_bytes; 2503 data->locate.offset.constant += pretend_bytes; 2504 2505 data->entry_parm = entry_parm; 2506 } 2507 2508 /* A subroutine of assign_parms. If there is actually space on the stack 2509 for this parm, count it in stack_args_size and return true. */ 2510 2511 static bool 2512 assign_parm_is_stack_parm (struct assign_parm_data_all *all, 2513 struct assign_parm_data_one *data) 2514 { 2515 /* Trivially true if we've no incoming register. */ 2516 if (data->entry_parm == NULL) 2517 ; 2518 /* Also true if we're partially in registers and partially not, 2519 since we've arranged to drop the entire argument on the stack. */ 2520 else if (data->partial != 0) 2521 ; 2522 /* Also true if the target says that it's passed in both registers 2523 and on the stack. */ 2524 else if (GET_CODE (data->entry_parm) == PARALLEL 2525 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX) 2526 ; 2527 /* Also true if the target says that there's stack allocated for 2528 all register parameters. */ 2529 else if (all->reg_parm_stack_space > 0) 2530 ; 2531 /* Otherwise, no, this parameter has no ABI defined stack slot. */ 2532 else 2533 return false; 2534 2535 all->stack_args_size.constant += data->locate.size.constant; 2536 if (data->locate.size.var) 2537 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var); 2538 2539 return true; 2540 } 2541 2542 /* A subroutine of assign_parms. Given that this parameter is allocated 2543 stack space by the ABI, find it. */ 2544 2545 static void 2546 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) 2547 { 2548 rtx offset_rtx, stack_parm; 2549 unsigned int align, boundary; 2550 2551 /* If we're passing this arg using a reg, make its stack home the 2552 aligned stack slot. */ 2553 if (data->entry_parm) 2554 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset); 2555 else 2556 offset_rtx = ARGS_SIZE_RTX (data->locate.offset); 2557 2558 stack_parm = crtl->args.internal_arg_pointer; 2559 if (offset_rtx != const0_rtx) 2560 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); 2561 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); 2562 2563 if (!data->passed_pointer) 2564 { 2565 set_mem_attributes (stack_parm, parm, 1); 2566 /* set_mem_attributes could set MEM_SIZE to the passed mode's size, 2567 while promoted mode's size is needed. */ 2568 if (data->promoted_mode != BLKmode 2569 && data->promoted_mode != DECL_MODE (parm)) 2570 { 2571 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); 2572 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) 2573 { 2574 int offset = subreg_lowpart_offset (DECL_MODE (parm), 2575 data->promoted_mode); 2576 if (offset) 2577 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); 2578 } 2579 } 2580 } 2581 2582 boundary = data->locate.boundary; 2583 align = BITS_PER_UNIT; 2584 2585 /* If we're padding upward, we know that the alignment of the slot 2586 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're 2587 intentionally forcing upward padding. Otherwise we have to come 2588 up with a guess at the alignment based on OFFSET_RTX. */ 2589 if (data->locate.where_pad != downward || data->entry_parm) 2590 align = boundary; 2591 else if (CONST_INT_P (offset_rtx)) 2592 { 2593 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; 2594 align = align & -align; 2595 } 2596 set_mem_align (stack_parm, align); 2597 2598 if (data->entry_parm) 2599 set_reg_attrs_for_parm (data->entry_parm, stack_parm); 2600 2601 data->stack_parm = stack_parm; 2602 } 2603 2604 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's 2605 always valid and contiguous. */ 2606 2607 static void 2608 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) 2609 { 2610 rtx entry_parm = data->entry_parm; 2611 rtx stack_parm = data->stack_parm; 2612 2613 /* If this parm was passed part in regs and part in memory, pretend it 2614 arrived entirely in memory by pushing the register-part onto the stack. 2615 In the special case of a DImode or DFmode that is split, we could put 2616 it together in a pseudoreg directly, but for now that's not worth 2617 bothering with. */ 2618 if (data->partial != 0) 2619 { 2620 /* Handle calls that pass values in multiple non-contiguous 2621 locations. The Irix 6 ABI has examples of this. */ 2622 if (GET_CODE (entry_parm) == PARALLEL) 2623 emit_group_store (validize_mem (stack_parm), entry_parm, 2624 data->passed_type, 2625 int_size_in_bytes (data->passed_type)); 2626 else 2627 { 2628 gcc_assert (data->partial % UNITS_PER_WORD == 0); 2629 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm), 2630 data->partial / UNITS_PER_WORD); 2631 } 2632 2633 entry_parm = stack_parm; 2634 } 2635 2636 /* If we didn't decide this parm came in a register, by default it came 2637 on the stack. */ 2638 else if (entry_parm == NULL) 2639 entry_parm = stack_parm; 2640 2641 /* When an argument is passed in multiple locations, we can't make use 2642 of this information, but we can save some copying if the whole argument 2643 is passed in a single register. */ 2644 else if (GET_CODE (entry_parm) == PARALLEL 2645 && data->nominal_mode != BLKmode 2646 && data->passed_mode != BLKmode) 2647 { 2648 size_t i, len = XVECLEN (entry_parm, 0); 2649 2650 for (i = 0; i < len; i++) 2651 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX 2652 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0)) 2653 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) 2654 == data->passed_mode) 2655 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0) 2656 { 2657 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0); 2658 break; 2659 } 2660 } 2661 2662 data->entry_parm = entry_parm; 2663 } 2664 2665 /* A subroutine of assign_parms. Reconstitute any values which were 2666 passed in multiple registers and would fit in a single register. */ 2667 2668 static void 2669 assign_parm_remove_parallels (struct assign_parm_data_one *data) 2670 { 2671 rtx entry_parm = data->entry_parm; 2672 2673 /* Convert the PARALLEL to a REG of the same mode as the parallel. 2674 This can be done with register operations rather than on the 2675 stack, even if we will store the reconstituted parameter on the 2676 stack later. */ 2677 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode) 2678 { 2679 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm)); 2680 emit_group_store (parmreg, entry_parm, data->passed_type, 2681 GET_MODE_SIZE (GET_MODE (entry_parm))); 2682 entry_parm = parmreg; 2683 } 2684 2685 data->entry_parm = entry_parm; 2686 } 2687 2688 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's 2689 always valid and properly aligned. */ 2690 2691 static void 2692 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data) 2693 { 2694 rtx stack_parm = data->stack_parm; 2695 2696 /* If we can't trust the parm stack slot to be aligned enough for its 2697 ultimate type, don't use that slot after entry. We'll make another 2698 stack slot, if we need one. */ 2699 if (stack_parm 2700 && ((STRICT_ALIGNMENT 2701 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)) 2702 || (data->nominal_type 2703 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm) 2704 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY))) 2705 stack_parm = NULL; 2706 2707 /* If parm was passed in memory, and we need to convert it on entry, 2708 don't store it back in that same slot. */ 2709 else if (data->entry_parm == stack_parm 2710 && data->nominal_mode != BLKmode 2711 && data->nominal_mode != data->passed_mode) 2712 stack_parm = NULL; 2713 2714 /* If stack protection is in effect for this function, don't leave any 2715 pointers in their passed stack slots. */ 2716 else if (crtl->stack_protect_guard 2717 && (flag_stack_protect == 2 2718 || data->passed_pointer 2719 || POINTER_TYPE_P (data->nominal_type))) 2720 stack_parm = NULL; 2721 2722 data->stack_parm = stack_parm; 2723 } 2724 2725 /* A subroutine of assign_parms. Return true if the current parameter 2726 should be stored as a BLKmode in the current frame. */ 2727 2728 static bool 2729 assign_parm_setup_block_p (struct assign_parm_data_one *data) 2730 { 2731 if (data->nominal_mode == BLKmode) 2732 return true; 2733 if (GET_MODE (data->entry_parm) == BLKmode) 2734 return true; 2735 2736 #ifdef BLOCK_REG_PADDING 2737 /* Only assign_parm_setup_block knows how to deal with register arguments 2738 that are padded at the least significant end. */ 2739 if (REG_P (data->entry_parm) 2740 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD 2741 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) 2742 == (BYTES_BIG_ENDIAN ? upward : downward))) 2743 return true; 2744 #endif 2745 2746 return false; 2747 } 2748 2749 /* A subroutine of assign_parms. Arrange for the parameter to be 2750 present and valid in DATA->STACK_RTL. */ 2751 2752 static void 2753 assign_parm_setup_block (struct assign_parm_data_all *all, 2754 tree parm, struct assign_parm_data_one *data) 2755 { 2756 rtx entry_parm = data->entry_parm; 2757 rtx stack_parm = data->stack_parm; 2758 HOST_WIDE_INT size; 2759 HOST_WIDE_INT size_stored; 2760 2761 if (GET_CODE (entry_parm) == PARALLEL) 2762 entry_parm = emit_group_move_into_temps (entry_parm); 2763 2764 size = int_size_in_bytes (data->passed_type); 2765 size_stored = CEIL_ROUND (size, UNITS_PER_WORD); 2766 if (stack_parm == 0) 2767 { 2768 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD); 2769 stack_parm = assign_stack_local (BLKmode, size_stored, 2770 DECL_ALIGN (parm)); 2771 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) 2772 PUT_MODE (stack_parm, GET_MODE (entry_parm)); 2773 set_mem_attributes (stack_parm, parm, 1); 2774 } 2775 2776 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle 2777 calls that pass values in multiple non-contiguous locations. */ 2778 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL) 2779 { 2780 rtx mem; 2781 2782 /* Note that we will be storing an integral number of words. 2783 So we have to be careful to ensure that we allocate an 2784 integral number of words. We do this above when we call 2785 assign_stack_local if space was not allocated in the argument 2786 list. If it was, this will not work if PARM_BOUNDARY is not 2787 a multiple of BITS_PER_WORD. It isn't clear how to fix this 2788 if it becomes a problem. Exception is when BLKmode arrives 2789 with arguments not conforming to word_mode. */ 2790 2791 if (data->stack_parm == 0) 2792 ; 2793 else if (GET_CODE (entry_parm) == PARALLEL) 2794 ; 2795 else 2796 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD)); 2797 2798 mem = validize_mem (stack_parm); 2799 2800 /* Handle values in multiple non-contiguous locations. */ 2801 if (GET_CODE (entry_parm) == PARALLEL) 2802 { 2803 push_to_sequence2 (all->first_conversion_insn, 2804 all->last_conversion_insn); 2805 emit_group_store (mem, entry_parm, data->passed_type, size); 2806 all->first_conversion_insn = get_insns (); 2807 all->last_conversion_insn = get_last_insn (); 2808 end_sequence (); 2809 } 2810 2811 else if (size == 0) 2812 ; 2813 2814 /* If SIZE is that of a mode no bigger than a word, just use 2815 that mode's store operation. */ 2816 else if (size <= UNITS_PER_WORD) 2817 { 2818 enum machine_mode mode 2819 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0); 2820 2821 if (mode != BLKmode 2822 #ifdef BLOCK_REG_PADDING 2823 && (size == UNITS_PER_WORD 2824 || (BLOCK_REG_PADDING (mode, data->passed_type, 1) 2825 != (BYTES_BIG_ENDIAN ? upward : downward))) 2826 #endif 2827 ) 2828 { 2829 rtx reg; 2830 2831 /* We are really truncating a word_mode value containing 2832 SIZE bytes into a value of mode MODE. If such an 2833 operation requires no actual instructions, we can refer 2834 to the value directly in mode MODE, otherwise we must 2835 start with the register in word_mode and explicitly 2836 convert it. */ 2837 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD)) 2838 reg = gen_rtx_REG (mode, REGNO (entry_parm)); 2839 else 2840 { 2841 reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); 2842 reg = convert_to_mode (mode, copy_to_reg (reg), 1); 2843 } 2844 emit_move_insn (change_address (mem, mode, 0), reg); 2845 } 2846 2847 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN 2848 machine must be aligned to the left before storing 2849 to memory. Note that the previous test doesn't 2850 handle all cases (e.g. SIZE == 3). */ 2851 else if (size != UNITS_PER_WORD 2852 #ifdef BLOCK_REG_PADDING 2853 && (BLOCK_REG_PADDING (mode, data->passed_type, 1) 2854 == downward) 2855 #else 2856 && BYTES_BIG_ENDIAN 2857 #endif 2858 ) 2859 { 2860 rtx tem, x; 2861 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; 2862 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); 2863 2864 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1); 2865 tem = change_address (mem, word_mode, 0); 2866 emit_move_insn (tem, x); 2867 } 2868 else 2869 move_block_from_reg (REGNO (entry_parm), mem, 2870 size_stored / UNITS_PER_WORD); 2871 } 2872 else 2873 move_block_from_reg (REGNO (entry_parm), mem, 2874 size_stored / UNITS_PER_WORD); 2875 } 2876 else if (data->stack_parm == 0) 2877 { 2878 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 2879 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), 2880 BLOCK_OP_NORMAL); 2881 all->first_conversion_insn = get_insns (); 2882 all->last_conversion_insn = get_last_insn (); 2883 end_sequence (); 2884 } 2885 2886 data->stack_parm = stack_parm; 2887 SET_DECL_RTL (parm, stack_parm); 2888 } 2889 2890 /* A subroutine of assign_parms. Allocate a pseudo to hold the current 2891 parameter. Get it there. Perform all ABI specified conversions. */ 2892 2893 static void 2894 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, 2895 struct assign_parm_data_one *data) 2896 { 2897 rtx parmreg, validated_mem; 2898 rtx equiv_stack_parm; 2899 enum machine_mode promoted_nominal_mode; 2900 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); 2901 bool did_conversion = false; 2902 bool need_conversion, moved; 2903 2904 /* Store the parm in a pseudoregister during the function, but we may 2905 need to do it in a wider mode. Using 2 here makes the result 2906 consistent with promote_decl_mode and thus expand_expr_real_1. */ 2907 promoted_nominal_mode 2908 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp, 2909 TREE_TYPE (current_function_decl), 2); 2910 2911 parmreg = gen_reg_rtx (promoted_nominal_mode); 2912 2913 if (!DECL_ARTIFICIAL (parm)) 2914 mark_user_reg (parmreg); 2915 2916 /* If this was an item that we received a pointer to, 2917 set DECL_RTL appropriately. */ 2918 if (data->passed_pointer) 2919 { 2920 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg); 2921 set_mem_attributes (x, parm, 1); 2922 SET_DECL_RTL (parm, x); 2923 } 2924 else 2925 SET_DECL_RTL (parm, parmreg); 2926 2927 assign_parm_remove_parallels (data); 2928 2929 /* Copy the value into the register, thus bridging between 2930 assign_parm_find_data_types and expand_expr_real_1. */ 2931 2932 equiv_stack_parm = data->stack_parm; 2933 validated_mem = validize_mem (data->entry_parm); 2934 2935 need_conversion = (data->nominal_mode != data->passed_mode 2936 || promoted_nominal_mode != data->promoted_mode); 2937 moved = false; 2938 2939 if (need_conversion 2940 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT 2941 && data->nominal_mode == data->passed_mode 2942 && data->nominal_mode == GET_MODE (data->entry_parm)) 2943 { 2944 /* ENTRY_PARM has been converted to PROMOTED_MODE, its 2945 mode, by the caller. We now have to convert it to 2946 NOMINAL_MODE, if different. However, PARMREG may be in 2947 a different mode than NOMINAL_MODE if it is being stored 2948 promoted. 2949 2950 If ENTRY_PARM is a hard register, it might be in a register 2951 not valid for operating in its mode (e.g., an odd-numbered 2952 register for a DFmode). In that case, moves are the only 2953 thing valid, so we can't do a convert from there. This 2954 occurs when the calling sequence allow such misaligned 2955 usages. 2956 2957 In addition, the conversion may involve a call, which could 2958 clobber parameters which haven't been copied to pseudo 2959 registers yet. 2960 2961 First, we try to emit an insn which performs the necessary 2962 conversion. We verify that this insn does not clobber any 2963 hard registers. */ 2964 2965 enum insn_code icode; 2966 rtx op0, op1; 2967 2968 icode = can_extend_p (promoted_nominal_mode, data->passed_mode, 2969 unsignedp); 2970 2971 op0 = parmreg; 2972 op1 = validated_mem; 2973 if (icode != CODE_FOR_nothing 2974 && insn_operand_matches (icode, 0, op0) 2975 && insn_operand_matches (icode, 1, op1)) 2976 { 2977 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND; 2978 rtx insn, insns, t = op1; 2979 HARD_REG_SET hardregs; 2980 2981 start_sequence (); 2982 /* If op1 is a hard register that is likely spilled, first 2983 force it into a pseudo, otherwise combiner might extend 2984 its lifetime too much. */ 2985 if (GET_CODE (t) == SUBREG) 2986 t = SUBREG_REG (t); 2987 if (REG_P (t) 2988 && HARD_REGISTER_P (t) 2989 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t)) 2990 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t)))) 2991 { 2992 t = gen_reg_rtx (GET_MODE (op1)); 2993 emit_move_insn (t, op1); 2994 } 2995 else 2996 t = op1; 2997 insn = gen_extend_insn (op0, t, promoted_nominal_mode, 2998 data->passed_mode, unsignedp); 2999 emit_insn (insn); 3000 insns = get_insns (); 3001 3002 moved = true; 3003 CLEAR_HARD_REG_SET (hardregs); 3004 for (insn = insns; insn && moved; insn = NEXT_INSN (insn)) 3005 { 3006 if (INSN_P (insn)) 3007 note_stores (PATTERN (insn), record_hard_reg_sets, 3008 &hardregs); 3009 if (!hard_reg_set_empty_p (hardregs)) 3010 moved = false; 3011 } 3012 3013 end_sequence (); 3014 3015 if (moved) 3016 { 3017 emit_insn (insns); 3018 if (equiv_stack_parm != NULL_RTX) 3019 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg), 3020 equiv_stack_parm); 3021 } 3022 } 3023 } 3024 3025 if (moved) 3026 /* Nothing to do. */ 3027 ; 3028 else if (need_conversion) 3029 { 3030 /* We did not have an insn to convert directly, or the sequence 3031 generated appeared unsafe. We must first copy the parm to a 3032 pseudo reg, and save the conversion until after all 3033 parameters have been moved. */ 3034 3035 int save_tree_used; 3036 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 3037 3038 emit_move_insn (tempreg, validated_mem); 3039 3040 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 3041 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); 3042 3043 if (GET_CODE (tempreg) == SUBREG 3044 && GET_MODE (tempreg) == data->nominal_mode 3045 && REG_P (SUBREG_REG (tempreg)) 3046 && data->nominal_mode == data->passed_mode 3047 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm) 3048 && GET_MODE_SIZE (GET_MODE (tempreg)) 3049 < GET_MODE_SIZE (GET_MODE (data->entry_parm))) 3050 { 3051 /* The argument is already sign/zero extended, so note it 3052 into the subreg. */ 3053 SUBREG_PROMOTED_VAR_P (tempreg) = 1; 3054 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp); 3055 } 3056 3057 /* TREE_USED gets set erroneously during expand_assignment. */ 3058 save_tree_used = TREE_USED (parm); 3059 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false); 3060 TREE_USED (parm) = save_tree_used; 3061 all->first_conversion_insn = get_insns (); 3062 all->last_conversion_insn = get_last_insn (); 3063 end_sequence (); 3064 3065 did_conversion = true; 3066 } 3067 else 3068 emit_move_insn (parmreg, validated_mem); 3069 3070 /* If we were passed a pointer but the actual value can safely live 3071 in a register, put it in one. */ 3072 if (data->passed_pointer 3073 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode 3074 /* If by-reference argument was promoted, demote it. */ 3075 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm)) 3076 || use_register_for_decl (parm))) 3077 { 3078 /* We can't use nominal_mode, because it will have been set to 3079 Pmode above. We must use the actual mode of the parm. */ 3080 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); 3081 mark_user_reg (parmreg); 3082 3083 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm))) 3084 { 3085 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm))); 3086 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm)); 3087 3088 push_to_sequence2 (all->first_conversion_insn, 3089 all->last_conversion_insn); 3090 emit_move_insn (tempreg, DECL_RTL (parm)); 3091 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p); 3092 emit_move_insn (parmreg, tempreg); 3093 all->first_conversion_insn = get_insns (); 3094 all->last_conversion_insn = get_last_insn (); 3095 end_sequence (); 3096 3097 did_conversion = true; 3098 } 3099 else 3100 emit_move_insn (parmreg, DECL_RTL (parm)); 3101 3102 SET_DECL_RTL (parm, parmreg); 3103 3104 /* STACK_PARM is the pointer, not the parm, and PARMREG is 3105 now the parm. */ 3106 data->stack_parm = NULL; 3107 } 3108 3109 /* Mark the register as eliminable if we did no conversion and it was 3110 copied from memory at a fixed offset, and the arg pointer was not 3111 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the 3112 offset formed an invalid address, such memory-equivalences as we 3113 make here would screw up life analysis for it. */ 3114 if (data->nominal_mode == data->passed_mode 3115 && !did_conversion 3116 && data->stack_parm != 0 3117 && MEM_P (data->stack_parm) 3118 && data->locate.offset.var == 0 3119 && reg_mentioned_p (virtual_incoming_args_rtx, 3120 XEXP (data->stack_parm, 0))) 3121 { 3122 rtx linsn = get_last_insn (); 3123 rtx sinsn, set; 3124 3125 /* Mark complex types separately. */ 3126 if (GET_CODE (parmreg) == CONCAT) 3127 { 3128 enum machine_mode submode 3129 = GET_MODE_INNER (GET_MODE (parmreg)); 3130 int regnor = REGNO (XEXP (parmreg, 0)); 3131 int regnoi = REGNO (XEXP (parmreg, 1)); 3132 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0); 3133 rtx stacki = adjust_address_nv (data->stack_parm, submode, 3134 GET_MODE_SIZE (submode)); 3135 3136 /* Scan backwards for the set of the real and 3137 imaginary parts. */ 3138 for (sinsn = linsn; sinsn != 0; 3139 sinsn = prev_nonnote_insn (sinsn)) 3140 { 3141 set = single_set (sinsn); 3142 if (set == 0) 3143 continue; 3144 3145 if (SET_DEST (set) == regno_reg_rtx [regnoi]) 3146 set_unique_reg_note (sinsn, REG_EQUIV, stacki); 3147 else if (SET_DEST (set) == regno_reg_rtx [regnor]) 3148 set_unique_reg_note (sinsn, REG_EQUIV, stackr); 3149 } 3150 } 3151 else 3152 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg); 3153 } 3154 3155 /* For pointer data type, suggest pointer register. */ 3156 if (POINTER_TYPE_P (TREE_TYPE (parm))) 3157 mark_reg_pointer (parmreg, 3158 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 3159 } 3160 3161 /* A subroutine of assign_parms. Allocate stack space to hold the current 3162 parameter. Get it there. Perform all ABI specified conversions. */ 3163 3164 static void 3165 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, 3166 struct assign_parm_data_one *data) 3167 { 3168 /* Value must be stored in the stack slot STACK_PARM during function 3169 execution. */ 3170 bool to_conversion = false; 3171 3172 assign_parm_remove_parallels (data); 3173 3174 if (data->promoted_mode != data->nominal_mode) 3175 { 3176 /* Conversion is required. */ 3177 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); 3178 3179 emit_move_insn (tempreg, validize_mem (data->entry_parm)); 3180 3181 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); 3182 to_conversion = true; 3183 3184 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, 3185 TYPE_UNSIGNED (TREE_TYPE (parm))); 3186 3187 if (data->stack_parm) 3188 { 3189 int offset = subreg_lowpart_offset (data->nominal_mode, 3190 GET_MODE (data->stack_parm)); 3191 /* ??? This may need a big-endian conversion on sparc64. */ 3192 data->stack_parm 3193 = adjust_address (data->stack_parm, data->nominal_mode, 0); 3194 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm)) 3195 set_mem_offset (data->stack_parm, 3196 MEM_OFFSET (data->stack_parm) + offset); 3197 } 3198 } 3199 3200 if (data->entry_parm != data->stack_parm) 3201 { 3202 rtx src, dest; 3203 3204 if (data->stack_parm == 0) 3205 { 3206 int align = STACK_SLOT_ALIGNMENT (data->passed_type, 3207 GET_MODE (data->entry_parm), 3208 TYPE_ALIGN (data->passed_type)); 3209 data->stack_parm 3210 = assign_stack_local (GET_MODE (data->entry_parm), 3211 GET_MODE_SIZE (GET_MODE (data->entry_parm)), 3212 align); 3213 set_mem_attributes (data->stack_parm, parm, 1); 3214 } 3215 3216 dest = validize_mem (data->stack_parm); 3217 src = validize_mem (data->entry_parm); 3218 3219 if (MEM_P (src)) 3220 { 3221 /* Use a block move to handle potentially misaligned entry_parm. */ 3222 if (!to_conversion) 3223 push_to_sequence2 (all->first_conversion_insn, 3224 all->last_conversion_insn); 3225 to_conversion = true; 3226 3227 emit_block_move (dest, src, 3228 GEN_INT (int_size_in_bytes (data->passed_type)), 3229 BLOCK_OP_NORMAL); 3230 } 3231 else 3232 emit_move_insn (dest, src); 3233 } 3234 3235 if (to_conversion) 3236 { 3237 all->first_conversion_insn = get_insns (); 3238 all->last_conversion_insn = get_last_insn (); 3239 end_sequence (); 3240 } 3241 3242 SET_DECL_RTL (parm, data->stack_parm); 3243 } 3244 3245 /* A subroutine of assign_parms. If the ABI splits complex arguments, then 3246 undo the frobbing that we did in assign_parms_augmented_arg_list. */ 3247 3248 static void 3249 assign_parms_unsplit_complex (struct assign_parm_data_all *all, 3250 VEC(tree, heap) *fnargs) 3251 { 3252 tree parm; 3253 tree orig_fnargs = all->orig_fnargs; 3254 unsigned i = 0; 3255 3256 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i) 3257 { 3258 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 3259 && targetm.calls.split_complex_arg (TREE_TYPE (parm))) 3260 { 3261 rtx tmp, real, imag; 3262 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); 3263 3264 real = DECL_RTL (VEC_index (tree, fnargs, i)); 3265 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1)); 3266 if (inner != GET_MODE (real)) 3267 { 3268 real = gen_lowpart_SUBREG (inner, real); 3269 imag = gen_lowpart_SUBREG (inner, imag); 3270 } 3271 3272 if (TREE_ADDRESSABLE (parm)) 3273 { 3274 rtx rmem, imem; 3275 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm)); 3276 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), 3277 DECL_MODE (parm), 3278 TYPE_ALIGN (TREE_TYPE (parm))); 3279 3280 /* split_complex_arg put the real and imag parts in 3281 pseudos. Move them to memory. */ 3282 tmp = assign_stack_local (DECL_MODE (parm), size, align); 3283 set_mem_attributes (tmp, parm, 1); 3284 rmem = adjust_address_nv (tmp, inner, 0); 3285 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); 3286 push_to_sequence2 (all->first_conversion_insn, 3287 all->last_conversion_insn); 3288 emit_move_insn (rmem, real); 3289 emit_move_insn (imem, imag); 3290 all->first_conversion_insn = get_insns (); 3291 all->last_conversion_insn = get_last_insn (); 3292 end_sequence (); 3293 } 3294 else 3295 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); 3296 SET_DECL_RTL (parm, tmp); 3297 3298 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i)); 3299 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1)); 3300 if (inner != GET_MODE (real)) 3301 { 3302 real = gen_lowpart_SUBREG (inner, real); 3303 imag = gen_lowpart_SUBREG (inner, imag); 3304 } 3305 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); 3306 set_decl_incoming_rtl (parm, tmp, false); 3307 i++; 3308 } 3309 } 3310 } 3311 3312 /* Assign RTL expressions to the function's parameters. This may involve 3313 copying them into registers and using those registers as the DECL_RTL. */ 3314 3315 static void 3316 assign_parms (tree fndecl) 3317 { 3318 struct assign_parm_data_all all; 3319 tree parm; 3320 VEC(tree, heap) *fnargs; 3321 unsigned i; 3322 3323 crtl->args.internal_arg_pointer 3324 = targetm.calls.internal_arg_pointer (); 3325 3326 assign_parms_initialize_all (&all); 3327 fnargs = assign_parms_augmented_arg_list (&all); 3328 3329 FOR_EACH_VEC_ELT (tree, fnargs, i, parm) 3330 { 3331 struct assign_parm_data_one data; 3332 3333 /* Extract the type of PARM; adjust it according to ABI. */ 3334 assign_parm_find_data_types (&all, parm, &data); 3335 3336 /* Early out for errors and void parameters. */ 3337 if (data.passed_mode == VOIDmode) 3338 { 3339 SET_DECL_RTL (parm, const0_rtx); 3340 DECL_INCOMING_RTL (parm) = DECL_RTL (parm); 3341 continue; 3342 } 3343 3344 /* Estimate stack alignment from parameter alignment. */ 3345 if (SUPPORTS_STACK_ALIGNMENT) 3346 { 3347 unsigned int align 3348 = targetm.calls.function_arg_boundary (data.promoted_mode, 3349 data.passed_type); 3350 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, 3351 align); 3352 if (TYPE_ALIGN (data.nominal_type) > align) 3353 align = MINIMUM_ALIGNMENT (data.nominal_type, 3354 TYPE_MODE (data.nominal_type), 3355 TYPE_ALIGN (data.nominal_type)); 3356 if (crtl->stack_alignment_estimated < align) 3357 { 3358 gcc_assert (!crtl->stack_realign_processed); 3359 crtl->stack_alignment_estimated = align; 3360 } 3361 } 3362 3363 if (cfun->stdarg && !DECL_CHAIN (parm)) 3364 assign_parms_setup_varargs (&all, &data, false); 3365 3366 /* Find out where the parameter arrives in this function. */ 3367 assign_parm_find_entry_rtl (&all, &data); 3368 3369 /* Find out where stack space for this parameter might be. */ 3370 if (assign_parm_is_stack_parm (&all, &data)) 3371 { 3372 assign_parm_find_stack_rtl (parm, &data); 3373 assign_parm_adjust_entry_rtl (&data); 3374 } 3375 3376 /* Record permanently how this parm was passed. */ 3377 if (data.passed_pointer) 3378 { 3379 rtx incoming_rtl 3380 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), 3381 data.entry_parm); 3382 set_decl_incoming_rtl (parm, incoming_rtl, true); 3383 } 3384 else 3385 set_decl_incoming_rtl (parm, data.entry_parm, false); 3386 3387 /* Update info on where next arg arrives in registers. */ 3388 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, 3389 data.passed_type, data.named_arg); 3390 3391 assign_parm_adjust_stack_rtl (&data); 3392 3393 if (assign_parm_setup_block_p (&data)) 3394 assign_parm_setup_block (&all, parm, &data); 3395 else if (data.passed_pointer || use_register_for_decl (parm)) 3396 assign_parm_setup_reg (&all, parm, &data); 3397 else 3398 assign_parm_setup_stack (&all, parm, &data); 3399 } 3400 3401 if (targetm.calls.split_complex_arg) 3402 assign_parms_unsplit_complex (&all, fnargs); 3403 3404 VEC_free (tree, heap, fnargs); 3405 3406 /* Output all parameter conversion instructions (possibly including calls) 3407 now that all parameters have been copied out of hard registers. */ 3408 emit_insn (all.first_conversion_insn); 3409 3410 /* Estimate reload stack alignment from scalar return mode. */ 3411 if (SUPPORTS_STACK_ALIGNMENT) 3412 { 3413 if (DECL_RESULT (fndecl)) 3414 { 3415 tree type = TREE_TYPE (DECL_RESULT (fndecl)); 3416 enum machine_mode mode = TYPE_MODE (type); 3417 3418 if (mode != BLKmode 3419 && mode != VOIDmode 3420 && !AGGREGATE_TYPE_P (type)) 3421 { 3422 unsigned int align = GET_MODE_ALIGNMENT (mode); 3423 if (crtl->stack_alignment_estimated < align) 3424 { 3425 gcc_assert (!crtl->stack_realign_processed); 3426 crtl->stack_alignment_estimated = align; 3427 } 3428 } 3429 } 3430 } 3431 3432 /* If we are receiving a struct value address as the first argument, set up 3433 the RTL for the function result. As this might require code to convert 3434 the transmitted address to Pmode, we do this here to ensure that possible 3435 preliminary conversions of the address have been emitted already. */ 3436 if (all.function_result_decl) 3437 { 3438 tree result = DECL_RESULT (current_function_decl); 3439 rtx addr = DECL_RTL (all.function_result_decl); 3440 rtx x; 3441 3442 if (DECL_BY_REFERENCE (result)) 3443 { 3444 SET_DECL_VALUE_EXPR (result, all.function_result_decl); 3445 x = addr; 3446 } 3447 else 3448 { 3449 SET_DECL_VALUE_EXPR (result, 3450 build1 (INDIRECT_REF, TREE_TYPE (result), 3451 all.function_result_decl)); 3452 addr = convert_memory_address (Pmode, addr); 3453 x = gen_rtx_MEM (DECL_MODE (result), addr); 3454 set_mem_attributes (x, result, 1); 3455 } 3456 3457 DECL_HAS_VALUE_EXPR_P (result) = 1; 3458 3459 SET_DECL_RTL (result, x); 3460 } 3461 3462 /* We have aligned all the args, so add space for the pretend args. */ 3463 crtl->args.pretend_args_size = all.pretend_args_size; 3464 all.stack_args_size.constant += all.extra_pretend_bytes; 3465 crtl->args.size = all.stack_args_size.constant; 3466 3467 /* Adjust function incoming argument size for alignment and 3468 minimum length. */ 3469 3470 #ifdef REG_PARM_STACK_SPACE 3471 crtl->args.size = MAX (crtl->args.size, 3472 REG_PARM_STACK_SPACE (fndecl)); 3473 #endif 3474 3475 crtl->args.size = CEIL_ROUND (crtl->args.size, 3476 PARM_BOUNDARY / BITS_PER_UNIT); 3477 3478 #ifdef ARGS_GROW_DOWNWARD 3479 crtl->args.arg_offset_rtx 3480 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) 3481 : expand_expr (size_diffop (all.stack_args_size.var, 3482 size_int (-all.stack_args_size.constant)), 3483 NULL_RTX, VOIDmode, EXPAND_NORMAL)); 3484 #else 3485 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); 3486 #endif 3487 3488 /* See how many bytes, if any, of its args a function should try to pop 3489 on return. */ 3490 3491 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl, 3492 TREE_TYPE (fndecl), 3493 crtl->args.size); 3494 3495 /* For stdarg.h function, save info about 3496 regs and stack space used by the named args. */ 3497 3498 crtl->args.info = all.args_so_far_v; 3499 3500 /* Set the rtx used for the function return value. Put this in its 3501 own variable so any optimizers that need this information don't have 3502 to include tree.h. Do this here so it gets done when an inlined 3503 function gets output. */ 3504 3505 crtl->return_rtx 3506 = (DECL_RTL_SET_P (DECL_RESULT (fndecl)) 3507 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX); 3508 3509 /* If scalar return value was computed in a pseudo-reg, or was a named 3510 return value that got dumped to the stack, copy that to the hard 3511 return register. */ 3512 if (DECL_RTL_SET_P (DECL_RESULT (fndecl))) 3513 { 3514 tree decl_result = DECL_RESULT (fndecl); 3515 rtx decl_rtl = DECL_RTL (decl_result); 3516 3517 if (REG_P (decl_rtl) 3518 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER 3519 : DECL_REGISTER (decl_result)) 3520 { 3521 rtx real_decl_rtl; 3522 3523 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), 3524 fndecl, true); 3525 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; 3526 /* The delay slot scheduler assumes that crtl->return_rtx 3527 holds the hard register containing the return value, not a 3528 temporary pseudo. */ 3529 crtl->return_rtx = real_decl_rtl; 3530 } 3531 } 3532 } 3533 3534 /* A subroutine of gimplify_parameters, invoked via walk_tree. 3535 For all seen types, gimplify their sizes. */ 3536 3537 static tree 3538 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data) 3539 { 3540 tree t = *tp; 3541 3542 *walk_subtrees = 0; 3543 if (TYPE_P (t)) 3544 { 3545 if (POINTER_TYPE_P (t)) 3546 *walk_subtrees = 1; 3547 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t)) 3548 && !TYPE_SIZES_GIMPLIFIED (t)) 3549 { 3550 gimplify_type_sizes (t, (gimple_seq *) data); 3551 *walk_subtrees = 1; 3552 } 3553 } 3554 3555 return NULL; 3556 } 3557 3558 /* Gimplify the parameter list for current_function_decl. This involves 3559 evaluating SAVE_EXPRs of variable sized parameters and generating code 3560 to implement callee-copies reference parameters. Returns a sequence of 3561 statements to add to the beginning of the function. */ 3562 3563 gimple_seq 3564 gimplify_parameters (void) 3565 { 3566 struct assign_parm_data_all all; 3567 tree parm; 3568 gimple_seq stmts = NULL; 3569 VEC(tree, heap) *fnargs; 3570 unsigned i; 3571 3572 assign_parms_initialize_all (&all); 3573 fnargs = assign_parms_augmented_arg_list (&all); 3574 3575 FOR_EACH_VEC_ELT (tree, fnargs, i, parm) 3576 { 3577 struct assign_parm_data_one data; 3578 3579 /* Extract the type of PARM; adjust it according to ABI. */ 3580 assign_parm_find_data_types (&all, parm, &data); 3581 3582 /* Early out for errors and void parameters. */ 3583 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) 3584 continue; 3585 3586 /* Update info on where next arg arrives in registers. */ 3587 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, 3588 data.passed_type, data.named_arg); 3589 3590 /* ??? Once upon a time variable_size stuffed parameter list 3591 SAVE_EXPRs (amongst others) onto a pending sizes list. This 3592 turned out to be less than manageable in the gimple world. 3593 Now we have to hunt them down ourselves. */ 3594 walk_tree_without_duplicates (&data.passed_type, 3595 gimplify_parm_type, &stmts); 3596 3597 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST) 3598 { 3599 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts); 3600 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts); 3601 } 3602 3603 if (data.passed_pointer) 3604 { 3605 tree type = TREE_TYPE (data.passed_type); 3606 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type), 3607 type, data.named_arg)) 3608 { 3609 tree local, t; 3610 3611 /* For constant-sized objects, this is trivial; for 3612 variable-sized objects, we have to play games. */ 3613 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST 3614 && !(flag_stack_check == GENERIC_STACK_CHECK 3615 && compare_tree_int (DECL_SIZE_UNIT (parm), 3616 STACK_CHECK_MAX_VAR_SIZE) > 0)) 3617 { 3618 local = create_tmp_var (type, get_name (parm)); 3619 DECL_IGNORED_P (local) = 0; 3620 /* If PARM was addressable, move that flag over 3621 to the local copy, as its address will be taken, 3622 not the PARMs. Keep the parms address taken 3623 as we'll query that flag during gimplification. */ 3624 if (TREE_ADDRESSABLE (parm)) 3625 TREE_ADDRESSABLE (local) = 1; 3626 else if (TREE_CODE (type) == COMPLEX_TYPE 3627 || TREE_CODE (type) == VECTOR_TYPE) 3628 DECL_GIMPLE_REG_P (local) = 1; 3629 } 3630 else 3631 { 3632 tree ptr_type, addr; 3633 3634 ptr_type = build_pointer_type (type); 3635 addr = create_tmp_reg (ptr_type, get_name (parm)); 3636 DECL_IGNORED_P (addr) = 0; 3637 local = build_fold_indirect_ref (addr); 3638 3639 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 3640 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm), 3641 size_int (DECL_ALIGN (parm))); 3642 3643 /* The call has been built for a variable-sized object. */ 3644 CALL_ALLOCA_FOR_VAR_P (t) = 1; 3645 t = fold_convert (ptr_type, t); 3646 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 3647 gimplify_and_add (t, &stmts); 3648 } 3649 3650 gimplify_assign (local, parm, &stmts); 3651 3652 SET_DECL_VALUE_EXPR (parm, local); 3653 DECL_HAS_VALUE_EXPR_P (parm) = 1; 3654 } 3655 } 3656 } 3657 3658 VEC_free (tree, heap, fnargs); 3659 3660 return stmts; 3661 } 3662 3663 /* Compute the size and offset from the start of the stacked arguments for a 3664 parm passed in mode PASSED_MODE and with type TYPE. 3665 3666 INITIAL_OFFSET_PTR points to the current offset into the stacked 3667 arguments. 3668 3669 The starting offset and size for this parm are returned in 3670 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is 3671 nonzero, the offset is that of stack slot, which is returned in 3672 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of 3673 padding required from the initial offset ptr to the stack slot. 3674 3675 IN_REGS is nonzero if the argument will be passed in registers. It will 3676 never be set if REG_PARM_STACK_SPACE is not defined. 3677 3678 FNDECL is the function in which the argument was defined. 3679 3680 There are two types of rounding that are done. The first, controlled by 3681 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the 3682 argument list to be aligned to the specific boundary (in bits). This 3683 rounding affects the initial and starting offsets, but not the argument 3684 size. 3685 3686 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, 3687 optionally rounds the size of the parm to PARM_BOUNDARY. The 3688 initial offset is not affected by this rounding, while the size always 3689 is and the starting offset may be. */ 3690 3691 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case; 3692 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's 3693 callers pass in the total size of args so far as 3694 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */ 3695 3696 void 3697 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, 3698 int partial, tree fndecl ATTRIBUTE_UNUSED, 3699 struct args_size *initial_offset_ptr, 3700 struct locate_and_pad_arg_data *locate) 3701 { 3702 tree sizetree; 3703 enum direction where_pad; 3704 unsigned int boundary, round_boundary; 3705 int reg_parm_stack_space = 0; 3706 int part_size_in_regs; 3707 3708 #ifdef REG_PARM_STACK_SPACE 3709 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); 3710 3711 /* If we have found a stack parm before we reach the end of the 3712 area reserved for registers, skip that area. */ 3713 if (! in_regs) 3714 { 3715 if (reg_parm_stack_space > 0) 3716 { 3717 if (initial_offset_ptr->var) 3718 { 3719 initial_offset_ptr->var 3720 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), 3721 ssize_int (reg_parm_stack_space)); 3722 initial_offset_ptr->constant = 0; 3723 } 3724 else if (initial_offset_ptr->constant < reg_parm_stack_space) 3725 initial_offset_ptr->constant = reg_parm_stack_space; 3726 } 3727 } 3728 #endif /* REG_PARM_STACK_SPACE */ 3729 3730 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); 3731 3732 sizetree 3733 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); 3734 where_pad = FUNCTION_ARG_PADDING (passed_mode, type); 3735 boundary = targetm.calls.function_arg_boundary (passed_mode, type); 3736 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, 3737 type); 3738 locate->where_pad = where_pad; 3739 3740 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */ 3741 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) 3742 boundary = MAX_SUPPORTED_STACK_ALIGNMENT; 3743 3744 locate->boundary = boundary; 3745 3746 if (SUPPORTS_STACK_ALIGNMENT) 3747 { 3748 /* stack_alignment_estimated can't change after stack has been 3749 realigned. */ 3750 if (crtl->stack_alignment_estimated < boundary) 3751 { 3752 if (!crtl->stack_realign_processed) 3753 crtl->stack_alignment_estimated = boundary; 3754 else 3755 { 3756 /* If stack is realigned and stack alignment value 3757 hasn't been finalized, it is OK not to increase 3758 stack_alignment_estimated. The bigger alignment 3759 requirement is recorded in stack_alignment_needed 3760 below. */ 3761 gcc_assert (!crtl->stack_realign_finalized 3762 && crtl->stack_realign_needed); 3763 } 3764 } 3765 } 3766 3767 /* Remember if the outgoing parameter requires extra alignment on the 3768 calling function side. */ 3769 if (crtl->stack_alignment_needed < boundary) 3770 crtl->stack_alignment_needed = boundary; 3771 if (crtl->preferred_stack_boundary < boundary) 3772 crtl->preferred_stack_boundary = boundary; 3773 3774 #ifdef ARGS_GROW_DOWNWARD 3775 locate->slot_offset.constant = -initial_offset_ptr->constant; 3776 if (initial_offset_ptr->var) 3777 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), 3778 initial_offset_ptr->var); 3779 3780 { 3781 tree s2 = sizetree; 3782 if (where_pad != none 3783 && (!host_integerp (sizetree, 1) 3784 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary)) 3785 s2 = round_up (s2, round_boundary / BITS_PER_UNIT); 3786 SUB_PARM_SIZE (locate->slot_offset, s2); 3787 } 3788 3789 locate->slot_offset.constant += part_size_in_regs; 3790 3791 if (!in_regs 3792 #ifdef REG_PARM_STACK_SPACE 3793 || REG_PARM_STACK_SPACE (fndecl) > 0 3794 #endif 3795 ) 3796 pad_to_arg_alignment (&locate->slot_offset, boundary, 3797 &locate->alignment_pad); 3798 3799 locate->size.constant = (-initial_offset_ptr->constant 3800 - locate->slot_offset.constant); 3801 if (initial_offset_ptr->var) 3802 locate->size.var = size_binop (MINUS_EXPR, 3803 size_binop (MINUS_EXPR, 3804 ssize_int (0), 3805 initial_offset_ptr->var), 3806 locate->slot_offset.var); 3807 3808 /* Pad_below needs the pre-rounded size to know how much to pad 3809 below. */ 3810 locate->offset = locate->slot_offset; 3811 if (where_pad == downward) 3812 pad_below (&locate->offset, passed_mode, sizetree); 3813 3814 #else /* !ARGS_GROW_DOWNWARD */ 3815 if (!in_regs 3816 #ifdef REG_PARM_STACK_SPACE 3817 || REG_PARM_STACK_SPACE (fndecl) > 0 3818 #endif 3819 ) 3820 pad_to_arg_alignment (initial_offset_ptr, boundary, 3821 &locate->alignment_pad); 3822 locate->slot_offset = *initial_offset_ptr; 3823 3824 #ifdef PUSH_ROUNDING 3825 if (passed_mode != BLKmode) 3826 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); 3827 #endif 3828 3829 /* Pad_below needs the pre-rounded size to know how much to pad below 3830 so this must be done before rounding up. */ 3831 locate->offset = locate->slot_offset; 3832 if (where_pad == downward) 3833 pad_below (&locate->offset, passed_mode, sizetree); 3834 3835 if (where_pad != none 3836 && (!host_integerp (sizetree, 1) 3837 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary)) 3838 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT); 3839 3840 ADD_PARM_SIZE (locate->size, sizetree); 3841 3842 locate->size.constant -= part_size_in_regs; 3843 #endif /* ARGS_GROW_DOWNWARD */ 3844 3845 #ifdef FUNCTION_ARG_OFFSET 3846 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type); 3847 #endif 3848 } 3849 3850 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. 3851 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ 3852 3853 static void 3854 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, 3855 struct args_size *alignment_pad) 3856 { 3857 tree save_var = NULL_TREE; 3858 HOST_WIDE_INT save_constant = 0; 3859 int boundary_in_bytes = boundary / BITS_PER_UNIT; 3860 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; 3861 3862 #ifdef SPARC_STACK_BOUNDARY_HACK 3863 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than 3864 the real alignment of %sp. However, when it does this, the 3865 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ 3866 if (SPARC_STACK_BOUNDARY_HACK) 3867 sp_offset = 0; 3868 #endif 3869 3870 if (boundary > PARM_BOUNDARY) 3871 { 3872 save_var = offset_ptr->var; 3873 save_constant = offset_ptr->constant; 3874 } 3875 3876 alignment_pad->var = NULL_TREE; 3877 alignment_pad->constant = 0; 3878 3879 if (boundary > BITS_PER_UNIT) 3880 { 3881 if (offset_ptr->var) 3882 { 3883 tree sp_offset_tree = ssize_int (sp_offset); 3884 tree offset = size_binop (PLUS_EXPR, 3885 ARGS_SIZE_TREE (*offset_ptr), 3886 sp_offset_tree); 3887 #ifdef ARGS_GROW_DOWNWARD 3888 tree rounded = round_down (offset, boundary / BITS_PER_UNIT); 3889 #else 3890 tree rounded = round_up (offset, boundary / BITS_PER_UNIT); 3891 #endif 3892 3893 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree); 3894 /* ARGS_SIZE_TREE includes constant term. */ 3895 offset_ptr->constant = 0; 3896 if (boundary > PARM_BOUNDARY) 3897 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, 3898 save_var); 3899 } 3900 else 3901 { 3902 offset_ptr->constant = -sp_offset + 3903 #ifdef ARGS_GROW_DOWNWARD 3904 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); 3905 #else 3906 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); 3907 #endif 3908 if (boundary > PARM_BOUNDARY) 3909 alignment_pad->constant = offset_ptr->constant - save_constant; 3910 } 3911 } 3912 } 3913 3914 static void 3915 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree) 3916 { 3917 if (passed_mode != BLKmode) 3918 { 3919 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) 3920 offset_ptr->constant 3921 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) 3922 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) 3923 - GET_MODE_SIZE (passed_mode)); 3924 } 3925 else 3926 { 3927 if (TREE_CODE (sizetree) != INTEGER_CST 3928 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) 3929 { 3930 /* Round the size up to multiple of PARM_BOUNDARY bits. */ 3931 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); 3932 /* Add it in. */ 3933 ADD_PARM_SIZE (*offset_ptr, s2); 3934 SUB_PARM_SIZE (*offset_ptr, sizetree); 3935 } 3936 } 3937 } 3938 3939 3940 /* True if register REGNO was alive at a place where `setjmp' was 3941 called and was set more than once or is an argument. Such regs may 3942 be clobbered by `longjmp'. */ 3943 3944 static bool 3945 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno) 3946 { 3947 /* There appear to be cases where some local vars never reach the 3948 backend but have bogus regnos. */ 3949 if (regno >= max_reg_num ()) 3950 return false; 3951 3952 return ((REG_N_SETS (regno) > 1 3953 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno)) 3954 && REGNO_REG_SET_P (setjmp_crosses, regno)); 3955 } 3956 3957 /* Walk the tree of blocks describing the binding levels within a 3958 function and warn about variables the might be killed by setjmp or 3959 vfork. This is done after calling flow_analysis before register 3960 allocation since that will clobber the pseudo-regs to hard 3961 regs. */ 3962 3963 static void 3964 setjmp_vars_warning (bitmap setjmp_crosses, tree block) 3965 { 3966 tree decl, sub; 3967 3968 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) 3969 { 3970 if (TREE_CODE (decl) == VAR_DECL 3971 && DECL_RTL_SET_P (decl) 3972 && REG_P (DECL_RTL (decl)) 3973 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3974 warning (OPT_Wclobbered, "variable %q+D might be clobbered by" 3975 " %<longjmp%> or %<vfork%>", decl); 3976 } 3977 3978 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) 3979 setjmp_vars_warning (setjmp_crosses, sub); 3980 } 3981 3982 /* Do the appropriate part of setjmp_vars_warning 3983 but for arguments instead of local variables. */ 3984 3985 static void 3986 setjmp_args_warning (bitmap setjmp_crosses) 3987 { 3988 tree decl; 3989 for (decl = DECL_ARGUMENTS (current_function_decl); 3990 decl; decl = DECL_CHAIN (decl)) 3991 if (DECL_RTL (decl) != 0 3992 && REG_P (DECL_RTL (decl)) 3993 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) 3994 warning (OPT_Wclobbered, 3995 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", 3996 decl); 3997 } 3998 3999 /* Generate warning messages for variables live across setjmp. */ 4000 4001 void 4002 generate_setjmp_warnings (void) 4003 { 4004 bitmap setjmp_crosses = regstat_get_setjmp_crosses (); 4005 4006 if (n_basic_blocks == NUM_FIXED_BLOCKS 4007 || bitmap_empty_p (setjmp_crosses)) 4008 return; 4009 4010 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); 4011 setjmp_args_warning (setjmp_crosses); 4012 } 4013 4014 4015 /* Reverse the order of elements in the fragment chain T of blocks, 4016 and return the new head of the chain (old last element). */ 4017 4018 static tree 4019 block_fragments_nreverse (tree t) 4020 { 4021 tree prev = 0, block, next; 4022 for (block = t; block; block = next) 4023 { 4024 next = BLOCK_FRAGMENT_CHAIN (block); 4025 BLOCK_FRAGMENT_CHAIN (block) = prev; 4026 prev = block; 4027 } 4028 return prev; 4029 } 4030 4031 /* Reverse the order of elements in the chain T of blocks, 4032 and return the new head of the chain (old last element). 4033 Also do the same on subblocks and reverse the order of elements 4034 in BLOCK_FRAGMENT_CHAIN as well. */ 4035 4036 static tree 4037 blocks_nreverse_all (tree t) 4038 { 4039 tree prev = 0, block, next; 4040 for (block = t; block; block = next) 4041 { 4042 next = BLOCK_CHAIN (block); 4043 BLOCK_CHAIN (block) = prev; 4044 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); 4045 if (BLOCK_FRAGMENT_CHAIN (block) 4046 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE) 4047 BLOCK_FRAGMENT_CHAIN (block) 4048 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block)); 4049 prev = block; 4050 } 4051 return prev; 4052 } 4053 4054 4055 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, 4056 and create duplicate blocks. */ 4057 /* ??? Need an option to either create block fragments or to create 4058 abstract origin duplicates of a source block. It really depends 4059 on what optimization has been performed. */ 4060 4061 void 4062 reorder_blocks (void) 4063 { 4064 tree block = DECL_INITIAL (current_function_decl); 4065 VEC(tree,heap) *block_stack; 4066 4067 if (block == NULL_TREE) 4068 return; 4069 4070 block_stack = VEC_alloc (tree, heap, 10); 4071 4072 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */ 4073 clear_block_marks (block); 4074 4075 /* Prune the old trees away, so that they don't get in the way. */ 4076 BLOCK_SUBBLOCKS (block) = NULL_TREE; 4077 BLOCK_CHAIN (block) = NULL_TREE; 4078 4079 /* Recreate the block tree from the note nesting. */ 4080 reorder_blocks_1 (get_insns (), block, &block_stack); 4081 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); 4082 4083 VEC_free (tree, heap, block_stack); 4084 } 4085 4086 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ 4087 4088 void 4089 clear_block_marks (tree block) 4090 { 4091 while (block) 4092 { 4093 TREE_ASM_WRITTEN (block) = 0; 4094 clear_block_marks (BLOCK_SUBBLOCKS (block)); 4095 block = BLOCK_CHAIN (block); 4096 } 4097 } 4098 4099 static void 4100 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) 4101 { 4102 rtx insn; 4103 4104 for (insn = insns; insn; insn = NEXT_INSN (insn)) 4105 { 4106 if (NOTE_P (insn)) 4107 { 4108 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) 4109 { 4110 tree block = NOTE_BLOCK (insn); 4111 tree origin; 4112 4113 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE); 4114 origin = block; 4115 4116 /* If we have seen this block before, that means it now 4117 spans multiple address regions. Create a new fragment. */ 4118 if (TREE_ASM_WRITTEN (block)) 4119 { 4120 tree new_block = copy_node (block); 4121 4122 BLOCK_FRAGMENT_ORIGIN (new_block) = origin; 4123 BLOCK_FRAGMENT_CHAIN (new_block) 4124 = BLOCK_FRAGMENT_CHAIN (origin); 4125 BLOCK_FRAGMENT_CHAIN (origin) = new_block; 4126 4127 NOTE_BLOCK (insn) = new_block; 4128 block = new_block; 4129 } 4130 4131 BLOCK_SUBBLOCKS (block) = 0; 4132 TREE_ASM_WRITTEN (block) = 1; 4133 /* When there's only one block for the entire function, 4134 current_block == block and we mustn't do this, it 4135 will cause infinite recursion. */ 4136 if (block != current_block) 4137 { 4138 if (block != origin) 4139 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block); 4140 4141 BLOCK_SUPERCONTEXT (block) = current_block; 4142 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); 4143 BLOCK_SUBBLOCKS (current_block) = block; 4144 current_block = origin; 4145 } 4146 VEC_safe_push (tree, heap, *p_block_stack, block); 4147 } 4148 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) 4149 { 4150 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack); 4151 current_block = BLOCK_SUPERCONTEXT (current_block); 4152 } 4153 } 4154 } 4155 } 4156 4157 /* Reverse the order of elements in the chain T of blocks, 4158 and return the new head of the chain (old last element). */ 4159 4160 tree 4161 blocks_nreverse (tree t) 4162 { 4163 tree prev = 0, block, next; 4164 for (block = t; block; block = next) 4165 { 4166 next = BLOCK_CHAIN (block); 4167 BLOCK_CHAIN (block) = prev; 4168 prev = block; 4169 } 4170 return prev; 4171 } 4172 4173 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN) 4174 by modifying the last node in chain 1 to point to chain 2. */ 4175 4176 tree 4177 block_chainon (tree op1, tree op2) 4178 { 4179 tree t1; 4180 4181 if (!op1) 4182 return op2; 4183 if (!op2) 4184 return op1; 4185 4186 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1)) 4187 continue; 4188 BLOCK_CHAIN (t1) = op2; 4189 4190 #ifdef ENABLE_TREE_CHECKING 4191 { 4192 tree t2; 4193 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2)) 4194 gcc_assert (t2 != t1); 4195 } 4196 #endif 4197 4198 return op1; 4199 } 4200 4201 /* Count the subblocks of the list starting with BLOCK. If VECTOR is 4202 non-NULL, list them all into VECTOR, in a depth-first preorder 4203 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all 4204 blocks. */ 4205 4206 static int 4207 all_blocks (tree block, tree *vector) 4208 { 4209 int n_blocks = 0; 4210 4211 while (block) 4212 { 4213 TREE_ASM_WRITTEN (block) = 0; 4214 4215 /* Record this block. */ 4216 if (vector) 4217 vector[n_blocks] = block; 4218 4219 ++n_blocks; 4220 4221 /* Record the subblocks, and their subblocks... */ 4222 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), 4223 vector ? vector + n_blocks : 0); 4224 block = BLOCK_CHAIN (block); 4225 } 4226 4227 return n_blocks; 4228 } 4229 4230 /* Return a vector containing all the blocks rooted at BLOCK. The 4231 number of elements in the vector is stored in N_BLOCKS_P. The 4232 vector is dynamically allocated; it is the caller's responsibility 4233 to call `free' on the pointer returned. */ 4234 4235 static tree * 4236 get_block_vector (tree block, int *n_blocks_p) 4237 { 4238 tree *block_vector; 4239 4240 *n_blocks_p = all_blocks (block, NULL); 4241 block_vector = XNEWVEC (tree, *n_blocks_p); 4242 all_blocks (block, block_vector); 4243 4244 return block_vector; 4245 } 4246 4247 static GTY(()) int next_block_index = 2; 4248 4249 /* Set BLOCK_NUMBER for all the blocks in FN. */ 4250 4251 void 4252 number_blocks (tree fn) 4253 { 4254 int i; 4255 int n_blocks; 4256 tree *block_vector; 4257 4258 /* For SDB and XCOFF debugging output, we start numbering the blocks 4259 from 1 within each function, rather than keeping a running 4260 count. */ 4261 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) 4262 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) 4263 next_block_index = 1; 4264 #endif 4265 4266 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); 4267 4268 /* The top-level BLOCK isn't numbered at all. */ 4269 for (i = 1; i < n_blocks; ++i) 4270 /* We number the blocks from two. */ 4271 BLOCK_NUMBER (block_vector[i]) = next_block_index++; 4272 4273 free (block_vector); 4274 4275 return; 4276 } 4277 4278 /* If VAR is present in a subblock of BLOCK, return the subblock. */ 4279 4280 DEBUG_FUNCTION tree 4281 debug_find_var_in_block_tree (tree var, tree block) 4282 { 4283 tree t; 4284 4285 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) 4286 if (t == var) 4287 return block; 4288 4289 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) 4290 { 4291 tree ret = debug_find_var_in_block_tree (var, t); 4292 if (ret) 4293 return ret; 4294 } 4295 4296 return NULL_TREE; 4297 } 4298 4299 /* Keep track of whether we're in a dummy function context. If we are, 4300 we don't want to invoke the set_current_function hook, because we'll 4301 get into trouble if the hook calls target_reinit () recursively or 4302 when the initial initialization is not yet complete. */ 4303 4304 static bool in_dummy_function; 4305 4306 /* Invoke the target hook when setting cfun. Update the optimization options 4307 if the function uses different options than the default. */ 4308 4309 static void 4310 invoke_set_current_function_hook (tree fndecl) 4311 { 4312 if (!in_dummy_function) 4313 { 4314 tree opts = ((fndecl) 4315 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) 4316 : optimization_default_node); 4317 4318 if (!opts) 4319 opts = optimization_default_node; 4320 4321 /* Change optimization options if needed. */ 4322 if (optimization_current_node != opts) 4323 { 4324 optimization_current_node = opts; 4325 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); 4326 } 4327 4328 targetm.set_current_function (fndecl); 4329 } 4330 } 4331 4332 /* cfun should never be set directly; use this function. */ 4333 4334 void 4335 set_cfun (struct function *new_cfun) 4336 { 4337 if (cfun != new_cfun) 4338 { 4339 cfun = new_cfun; 4340 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE); 4341 } 4342 } 4343 4344 /* Initialized with NOGC, making this poisonous to the garbage collector. */ 4345 4346 static VEC(function_p,heap) *cfun_stack; 4347 4348 /* Push the current cfun onto the stack, and set cfun to new_cfun. */ 4349 4350 void 4351 push_cfun (struct function *new_cfun) 4352 { 4353 VEC_safe_push (function_p, heap, cfun_stack, cfun); 4354 set_cfun (new_cfun); 4355 } 4356 4357 /* Pop cfun from the stack. */ 4358 4359 void 4360 pop_cfun (void) 4361 { 4362 struct function *new_cfun = VEC_pop (function_p, cfun_stack); 4363 set_cfun (new_cfun); 4364 } 4365 4366 /* Return value of funcdef and increase it. */ 4367 int 4368 get_next_funcdef_no (void) 4369 { 4370 return funcdef_no++; 4371 } 4372 4373 /* Return value of funcdef. */ 4374 int 4375 get_last_funcdef_no (void) 4376 { 4377 return funcdef_no; 4378 } 4379 4380 /* Allocate a function structure for FNDECL and set its contents 4381 to the defaults. Set cfun to the newly-allocated object. 4382 Some of the helper functions invoked during initialization assume 4383 that cfun has already been set. Therefore, assign the new object 4384 directly into cfun and invoke the back end hook explicitly at the 4385 very end, rather than initializing a temporary and calling set_cfun 4386 on it. 4387 4388 ABSTRACT_P is true if this is a function that will never be seen by 4389 the middle-end. Such functions are front-end concepts (like C++ 4390 function templates) that do not correspond directly to functions 4391 placed in object files. */ 4392 4393 void 4394 allocate_struct_function (tree fndecl, bool abstract_p) 4395 { 4396 tree result; 4397 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE; 4398 4399 cfun = ggc_alloc_cleared_function (); 4400 4401 init_eh_for_function (); 4402 4403 if (init_machine_status) 4404 cfun->machine = (*init_machine_status) (); 4405 4406 #ifdef OVERRIDE_ABI_FORMAT 4407 OVERRIDE_ABI_FORMAT (fndecl); 4408 #endif 4409 4410 invoke_set_current_function_hook (fndecl); 4411 4412 if (fndecl != NULL_TREE) 4413 { 4414 DECL_STRUCT_FUNCTION (fndecl) = cfun; 4415 cfun->decl = fndecl; 4416 current_function_funcdef_no = get_next_funcdef_no (); 4417 4418 result = DECL_RESULT (fndecl); 4419 if (!abstract_p && aggregate_value_p (result, fndecl)) 4420 { 4421 #ifdef PCC_STATIC_STRUCT_RETURN 4422 cfun->returns_pcc_struct = 1; 4423 #endif 4424 cfun->returns_struct = 1; 4425 } 4426 4427 cfun->stdarg = stdarg_p (fntype); 4428 4429 /* Assume all registers in stdarg functions need to be saved. */ 4430 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 4431 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 4432 4433 /* ??? This could be set on a per-function basis by the front-end 4434 but is this worth the hassle? */ 4435 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions; 4436 } 4437 } 4438 4439 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL 4440 instead of just setting it. */ 4441 4442 void 4443 push_struct_function (tree fndecl) 4444 { 4445 VEC_safe_push (function_p, heap, cfun_stack, cfun); 4446 allocate_struct_function (fndecl, false); 4447 } 4448 4449 /* Reset crtl and other non-struct-function variables to defaults as 4450 appropriate for emitting rtl at the start of a function. */ 4451 4452 static void 4453 prepare_function_start (void) 4454 { 4455 gcc_assert (!crtl->emit.x_last_insn); 4456 init_temp_slots (); 4457 init_emit (); 4458 init_varasm_status (); 4459 init_expr (); 4460 default_rtl_profile (); 4461 4462 if (flag_stack_usage_info) 4463 { 4464 cfun->su = ggc_alloc_cleared_stack_usage (); 4465 cfun->su->static_stack_size = -1; 4466 } 4467 4468 cse_not_expected = ! optimize; 4469 4470 /* Caller save not needed yet. */ 4471 caller_save_needed = 0; 4472 4473 /* We haven't done register allocation yet. */ 4474 reg_renumber = 0; 4475 4476 /* Indicate that we have not instantiated virtual registers yet. */ 4477 virtuals_instantiated = 0; 4478 4479 /* Indicate that we want CONCATs now. */ 4480 generating_concat_p = 1; 4481 4482 /* Indicate we have no need of a frame pointer yet. */ 4483 frame_pointer_needed = 0; 4484 } 4485 4486 /* Initialize the rtl expansion mechanism so that we can do simple things 4487 like generate sequences. This is used to provide a context during global 4488 initialization of some passes. You must call expand_dummy_function_end 4489 to exit this context. */ 4490 4491 void 4492 init_dummy_function_start (void) 4493 { 4494 gcc_assert (!in_dummy_function); 4495 in_dummy_function = true; 4496 push_struct_function (NULL_TREE); 4497 prepare_function_start (); 4498 } 4499 4500 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) 4501 and initialize static variables for generating RTL for the statements 4502 of the function. */ 4503 4504 void 4505 init_function_start (tree subr) 4506 { 4507 if (subr && DECL_STRUCT_FUNCTION (subr)) 4508 set_cfun (DECL_STRUCT_FUNCTION (subr)); 4509 else 4510 allocate_struct_function (subr, false); 4511 prepare_function_start (); 4512 decide_function_section (subr); 4513 4514 /* Warn if this value is an aggregate type, 4515 regardless of which calling convention we are using for it. */ 4516 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) 4517 warning (OPT_Waggregate_return, "function returns an aggregate"); 4518 } 4519 4520 /* Make sure all values used by the optimization passes have sane defaults. */ 4521 unsigned int 4522 init_function_for_compilation (void) 4523 { 4524 reg_renumber = 0; 4525 return 0; 4526 } 4527 4528 struct rtl_opt_pass pass_init_function = 4529 { 4530 { 4531 RTL_PASS, 4532 "*init_function", /* name */ 4533 NULL, /* gate */ 4534 init_function_for_compilation, /* execute */ 4535 NULL, /* sub */ 4536 NULL, /* next */ 4537 0, /* static_pass_number */ 4538 TV_NONE, /* tv_id */ 4539 0, /* properties_required */ 4540 0, /* properties_provided */ 4541 0, /* properties_destroyed */ 4542 0, /* todo_flags_start */ 4543 0 /* todo_flags_finish */ 4544 } 4545 }; 4546 4547 4548 void 4549 expand_main_function (void) 4550 { 4551 #if (defined(INVOKE__main) \ 4552 || (!defined(HAS_INIT_SECTION) \ 4553 && !defined(INIT_SECTION_ASM_OP) \ 4554 && !defined(INIT_ARRAY_SECTION_ASM_OP))) 4555 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0); 4556 #endif 4557 } 4558 4559 /* Expand code to initialize the stack_protect_guard. This is invoked at 4560 the beginning of a function to be protected. */ 4561 4562 #ifndef HAVE_stack_protect_set 4563 # define HAVE_stack_protect_set 0 4564 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX) 4565 #endif 4566 4567 void 4568 stack_protect_prologue (void) 4569 { 4570 tree guard_decl = targetm.stack_protect_guard (); 4571 rtx x, y; 4572 4573 x = expand_normal (crtl->stack_protect_guard); 4574 y = expand_normal (guard_decl); 4575 4576 /* Allow the target to copy from Y to X without leaking Y into a 4577 register. */ 4578 if (HAVE_stack_protect_set) 4579 { 4580 rtx insn = gen_stack_protect_set (x, y); 4581 if (insn) 4582 { 4583 emit_insn (insn); 4584 return; 4585 } 4586 } 4587 4588 /* Otherwise do a straight move. */ 4589 emit_move_insn (x, y); 4590 } 4591 4592 /* Expand code to verify the stack_protect_guard. This is invoked at 4593 the end of a function to be protected. */ 4594 4595 #ifndef HAVE_stack_protect_test 4596 # define HAVE_stack_protect_test 0 4597 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX) 4598 #endif 4599 4600 void 4601 stack_protect_epilogue (void) 4602 { 4603 tree guard_decl = targetm.stack_protect_guard (); 4604 rtx label = gen_label_rtx (); 4605 rtx x, y, tmp; 4606 4607 x = expand_normal (crtl->stack_protect_guard); 4608 y = expand_normal (guard_decl); 4609 4610 /* Allow the target to compare Y with X without leaking either into 4611 a register. */ 4612 switch (HAVE_stack_protect_test != 0) 4613 { 4614 case 1: 4615 tmp = gen_stack_protect_test (x, y, label); 4616 if (tmp) 4617 { 4618 emit_insn (tmp); 4619 break; 4620 } 4621 /* FALLTHRU */ 4622 4623 default: 4624 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label); 4625 break; 4626 } 4627 4628 /* The noreturn predictor has been moved to the tree level. The rtl-level 4629 predictors estimate this branch about 20%, which isn't enough to get 4630 things moved out of line. Since this is the only extant case of adding 4631 a noreturn function at the rtl level, it doesn't seem worth doing ought 4632 except adding the prediction by hand. */ 4633 tmp = get_last_insn (); 4634 if (JUMP_P (tmp)) 4635 predict_insn_def (tmp, PRED_NORETURN, TAKEN); 4636 4637 expand_expr_stmt (targetm.stack_protect_fail ()); 4638 emit_label (label); 4639 } 4640 4641 /* Start the RTL for a new function, and set variables used for 4642 emitting RTL. 4643 SUBR is the FUNCTION_DECL node. 4644 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with 4645 the function's parameters, which must be run at any return statement. */ 4646 4647 void 4648 expand_function_start (tree subr) 4649 { 4650 /* Make sure volatile mem refs aren't considered 4651 valid operands of arithmetic insns. */ 4652 init_recog_no_volatile (); 4653 4654 crtl->profile 4655 = (profile_flag 4656 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr)); 4657 4658 crtl->limit_stack 4659 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr)); 4660 4661 /* Make the label for return statements to jump to. Do not special 4662 case machines with special return instructions -- they will be 4663 handled later during jump, ifcvt, or epilogue creation. */ 4664 return_label = gen_label_rtx (); 4665 4666 /* Initialize rtx used to return the value. */ 4667 /* Do this before assign_parms so that we copy the struct value address 4668 before any library calls that assign parms might generate. */ 4669 4670 /* Decide whether to return the value in memory or in a register. */ 4671 if (aggregate_value_p (DECL_RESULT (subr), subr)) 4672 { 4673 /* Returning something that won't go in a register. */ 4674 rtx value_address = 0; 4675 4676 #ifdef PCC_STATIC_STRUCT_RETURN 4677 if (cfun->returns_pcc_struct) 4678 { 4679 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); 4680 value_address = assemble_static_space (size); 4681 } 4682 else 4683 #endif 4684 { 4685 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); 4686 /* Expect to be passed the address of a place to store the value. 4687 If it is passed as an argument, assign_parms will take care of 4688 it. */ 4689 if (sv) 4690 { 4691 value_address = gen_reg_rtx (Pmode); 4692 emit_move_insn (value_address, sv); 4693 } 4694 } 4695 if (value_address) 4696 { 4697 rtx x = value_address; 4698 if (!DECL_BY_REFERENCE (DECL_RESULT (subr))) 4699 { 4700 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x); 4701 set_mem_attributes (x, DECL_RESULT (subr), 1); 4702 } 4703 SET_DECL_RTL (DECL_RESULT (subr), x); 4704 } 4705 } 4706 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) 4707 /* If return mode is void, this decl rtl should not be used. */ 4708 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX); 4709 else 4710 { 4711 /* Compute the return values into a pseudo reg, which we will copy 4712 into the true return register after the cleanups are done. */ 4713 tree return_type = TREE_TYPE (DECL_RESULT (subr)); 4714 if (TYPE_MODE (return_type) != BLKmode 4715 && targetm.calls.return_in_msb (return_type)) 4716 /* expand_function_end will insert the appropriate padding in 4717 this case. Use the return value's natural (unpadded) mode 4718 within the function proper. */ 4719 SET_DECL_RTL (DECL_RESULT (subr), 4720 gen_reg_rtx (TYPE_MODE (return_type))); 4721 else 4722 { 4723 /* In order to figure out what mode to use for the pseudo, we 4724 figure out what the mode of the eventual return register will 4725 actually be, and use that. */ 4726 rtx hard_reg = hard_function_value (return_type, subr, 0, 1); 4727 4728 /* Structures that are returned in registers are not 4729 aggregate_value_p, so we may see a PARALLEL or a REG. */ 4730 if (REG_P (hard_reg)) 4731 SET_DECL_RTL (DECL_RESULT (subr), 4732 gen_reg_rtx (GET_MODE (hard_reg))); 4733 else 4734 { 4735 gcc_assert (GET_CODE (hard_reg) == PARALLEL); 4736 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg)); 4737 } 4738 } 4739 4740 /* Set DECL_REGISTER flag so that expand_function_end will copy the 4741 result to the real return register(s). */ 4742 DECL_REGISTER (DECL_RESULT (subr)) = 1; 4743 } 4744 4745 /* Initialize rtx for parameters and local variables. 4746 In some cases this requires emitting insns. */ 4747 assign_parms (subr); 4748 4749 /* If function gets a static chain arg, store it. */ 4750 if (cfun->static_chain_decl) 4751 { 4752 tree parm = cfun->static_chain_decl; 4753 rtx local, chain, insn; 4754 4755 local = gen_reg_rtx (Pmode); 4756 chain = targetm.calls.static_chain (current_function_decl, true); 4757 4758 set_decl_incoming_rtl (parm, chain, false); 4759 SET_DECL_RTL (parm, local); 4760 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); 4761 4762 insn = emit_move_insn (local, chain); 4763 4764 /* Mark the register as eliminable, similar to parameters. */ 4765 if (MEM_P (chain) 4766 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0))) 4767 set_dst_reg_note (insn, REG_EQUIV, chain, local); 4768 } 4769 4770 /* If the function receives a non-local goto, then store the 4771 bits we need to restore the frame pointer. */ 4772 if (cfun->nonlocal_goto_save_area) 4773 { 4774 tree t_save; 4775 rtx r_save; 4776 4777 /* ??? We need to do this save early. Unfortunately here is 4778 before the frame variable gets declared. Help out... */ 4779 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0); 4780 if (!DECL_RTL_SET_P (var)) 4781 expand_decl (var); 4782 4783 t_save = build4 (ARRAY_REF, 4784 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), 4785 cfun->nonlocal_goto_save_area, 4786 integer_zero_node, NULL_TREE, NULL_TREE); 4787 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); 4788 gcc_assert (GET_MODE (r_save) == Pmode); 4789 4790 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ()); 4791 update_nonlocal_goto_save_area (); 4792 } 4793 4794 /* The following was moved from init_function_start. 4795 The move is supposed to make sdb output more accurate. */ 4796 /* Indicate the beginning of the function body, 4797 as opposed to parm setup. */ 4798 emit_note (NOTE_INSN_FUNCTION_BEG); 4799 4800 gcc_assert (NOTE_P (get_last_insn ())); 4801 4802 parm_birth_insn = get_last_insn (); 4803 4804 if (crtl->profile) 4805 { 4806 #ifdef PROFILE_HOOK 4807 PROFILE_HOOK (current_function_funcdef_no); 4808 #endif 4809 } 4810 4811 /* If we are doing generic stack checking, the probe should go here. */ 4812 if (flag_stack_check == GENERIC_STACK_CHECK) 4813 stack_check_probe_note = emit_note (NOTE_INSN_DELETED); 4814 4815 /* Make sure there is a line number after the function entry setup code. */ 4816 force_next_line_note (); 4817 } 4818 4819 /* Undo the effects of init_dummy_function_start. */ 4820 void 4821 expand_dummy_function_end (void) 4822 { 4823 gcc_assert (in_dummy_function); 4824 4825 /* End any sequences that failed to be closed due to syntax errors. */ 4826 while (in_sequence_p ()) 4827 end_sequence (); 4828 4829 /* Outside function body, can't compute type's actual size 4830 until next function's body starts. */ 4831 4832 free_after_parsing (cfun); 4833 free_after_compilation (cfun); 4834 pop_cfun (); 4835 in_dummy_function = false; 4836 } 4837 4838 /* Call DOIT for each hard register used as a return value from 4839 the current function. */ 4840 4841 void 4842 diddle_return_value (void (*doit) (rtx, void *), void *arg) 4843 { 4844 rtx outgoing = crtl->return_rtx; 4845 4846 if (! outgoing) 4847 return; 4848 4849 if (REG_P (outgoing)) 4850 (*doit) (outgoing, arg); 4851 else if (GET_CODE (outgoing) == PARALLEL) 4852 { 4853 int i; 4854 4855 for (i = 0; i < XVECLEN (outgoing, 0); i++) 4856 { 4857 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0); 4858 4859 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 4860 (*doit) (x, arg); 4861 } 4862 } 4863 } 4864 4865 static void 4866 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) 4867 { 4868 emit_clobber (reg); 4869 } 4870 4871 void 4872 clobber_return_register (void) 4873 { 4874 diddle_return_value (do_clobber_return_reg, NULL); 4875 4876 /* In case we do use pseudo to return value, clobber it too. */ 4877 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) 4878 { 4879 tree decl_result = DECL_RESULT (current_function_decl); 4880 rtx decl_rtl = DECL_RTL (decl_result); 4881 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER) 4882 { 4883 do_clobber_return_reg (decl_rtl, NULL); 4884 } 4885 } 4886 } 4887 4888 static void 4889 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) 4890 { 4891 emit_use (reg); 4892 } 4893 4894 static void 4895 use_return_register (void) 4896 { 4897 diddle_return_value (do_use_return_reg, NULL); 4898 } 4899 4900 /* Possibly warn about unused parameters. */ 4901 void 4902 do_warn_unused_parameter (tree fn) 4903 { 4904 tree decl; 4905 4906 for (decl = DECL_ARGUMENTS (fn); 4907 decl; decl = DECL_CHAIN (decl)) 4908 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL 4909 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl) 4910 && !TREE_NO_WARNING (decl)) 4911 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl); 4912 } 4913 4914 static GTY(()) rtx initial_trampoline; 4915 4916 /* Generate RTL for the end of the current function. */ 4917 4918 void 4919 expand_function_end (void) 4920 { 4921 rtx clobber_after; 4922 4923 /* If arg_pointer_save_area was referenced only from a nested 4924 function, we will not have initialized it yet. Do that now. */ 4925 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init) 4926 get_arg_pointer_save_area (); 4927 4928 /* If we are doing generic stack checking and this function makes calls, 4929 do a stack probe at the start of the function to ensure we have enough 4930 space for another stack frame. */ 4931 if (flag_stack_check == GENERIC_STACK_CHECK) 4932 { 4933 rtx insn, seq; 4934 4935 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 4936 if (CALL_P (insn)) 4937 { 4938 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE); 4939 start_sequence (); 4940 if (STACK_CHECK_MOVING_SP) 4941 anti_adjust_stack_and_probe (max_frame_size, true); 4942 else 4943 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); 4944 seq = get_insns (); 4945 end_sequence (); 4946 set_insn_locators (seq, prologue_locator); 4947 emit_insn_before (seq, stack_check_probe_note); 4948 break; 4949 } 4950 } 4951 4952 /* End any sequences that failed to be closed due to syntax errors. */ 4953 while (in_sequence_p ()) 4954 end_sequence (); 4955 4956 clear_pending_stack_adjust (); 4957 do_pending_stack_adjust (); 4958 4959 /* Output a linenumber for the end of the function. 4960 SDB depends on this. */ 4961 force_next_line_note (); 4962 set_curr_insn_source_location (input_location); 4963 4964 /* Before the return label (if any), clobber the return 4965 registers so that they are not propagated live to the rest of 4966 the function. This can only happen with functions that drop 4967 through; if there had been a return statement, there would 4968 have either been a return rtx, or a jump to the return label. 4969 4970 We delay actual code generation after the current_function_value_rtx 4971 is computed. */ 4972 clobber_after = get_last_insn (); 4973 4974 /* Output the label for the actual return from the function. */ 4975 emit_label (return_label); 4976 4977 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) 4978 { 4979 /* Let except.c know where it should emit the call to unregister 4980 the function context for sjlj exceptions. */ 4981 if (flag_exceptions) 4982 sjlj_emit_function_exit_after (get_last_insn ()); 4983 } 4984 else 4985 { 4986 /* We want to ensure that instructions that may trap are not 4987 moved into the epilogue by scheduling, because we don't 4988 always emit unwind information for the epilogue. */ 4989 if (cfun->can_throw_non_call_exceptions) 4990 emit_insn (gen_blockage ()); 4991 } 4992 4993 /* If this is an implementation of throw, do what's necessary to 4994 communicate between __builtin_eh_return and the epilogue. */ 4995 expand_eh_return (); 4996 4997 /* If scalar return value was computed in a pseudo-reg, or was a named 4998 return value that got dumped to the stack, copy that to the hard 4999 return register. */ 5000 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) 5001 { 5002 tree decl_result = DECL_RESULT (current_function_decl); 5003 rtx decl_rtl = DECL_RTL (decl_result); 5004 5005 if (REG_P (decl_rtl) 5006 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER 5007 : DECL_REGISTER (decl_result)) 5008 { 5009 rtx real_decl_rtl = crtl->return_rtx; 5010 5011 /* This should be set in assign_parms. */ 5012 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl)); 5013 5014 /* If this is a BLKmode structure being returned in registers, 5015 then use the mode computed in expand_return. Note that if 5016 decl_rtl is memory, then its mode may have been changed, 5017 but that crtl->return_rtx has not. */ 5018 if (GET_MODE (real_decl_rtl) == BLKmode) 5019 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl)); 5020 5021 /* If a non-BLKmode return value should be padded at the least 5022 significant end of the register, shift it left by the appropriate 5023 amount. BLKmode results are handled using the group load/store 5024 machinery. */ 5025 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode 5026 && targetm.calls.return_in_msb (TREE_TYPE (decl_result))) 5027 { 5028 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl), 5029 REGNO (real_decl_rtl)), 5030 decl_rtl); 5031 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl); 5032 } 5033 /* If a named return value dumped decl_return to memory, then 5034 we may need to re-do the PROMOTE_MODE signed/unsigned 5035 extension. */ 5036 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) 5037 { 5038 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); 5039 promote_function_mode (TREE_TYPE (decl_result), 5040 GET_MODE (decl_rtl), &unsignedp, 5041 TREE_TYPE (current_function_decl), 1); 5042 5043 convert_move (real_decl_rtl, decl_rtl, unsignedp); 5044 } 5045 else if (GET_CODE (real_decl_rtl) == PARALLEL) 5046 { 5047 /* If expand_function_start has created a PARALLEL for decl_rtl, 5048 move the result to the real return registers. Otherwise, do 5049 a group load from decl_rtl for a named return. */ 5050 if (GET_CODE (decl_rtl) == PARALLEL) 5051 emit_group_move (real_decl_rtl, decl_rtl); 5052 else 5053 emit_group_load (real_decl_rtl, decl_rtl, 5054 TREE_TYPE (decl_result), 5055 int_size_in_bytes (TREE_TYPE (decl_result))); 5056 } 5057 /* In the case of complex integer modes smaller than a word, we'll 5058 need to generate some non-trivial bitfield insertions. Do that 5059 on a pseudo and not the hard register. */ 5060 else if (GET_CODE (decl_rtl) == CONCAT 5061 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT 5062 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD) 5063 { 5064 int old_generating_concat_p; 5065 rtx tmp; 5066 5067 old_generating_concat_p = generating_concat_p; 5068 generating_concat_p = 0; 5069 tmp = gen_reg_rtx (GET_MODE (decl_rtl)); 5070 generating_concat_p = old_generating_concat_p; 5071 5072 emit_move_insn (tmp, decl_rtl); 5073 emit_move_insn (real_decl_rtl, tmp); 5074 } 5075 else 5076 emit_move_insn (real_decl_rtl, decl_rtl); 5077 } 5078 } 5079 5080 /* If returning a structure, arrange to return the address of the value 5081 in a place where debuggers expect to find it. 5082 5083 If returning a structure PCC style, 5084 the caller also depends on this value. 5085 And cfun->returns_pcc_struct is not necessarily set. */ 5086 if (cfun->returns_struct 5087 || cfun->returns_pcc_struct) 5088 { 5089 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl)); 5090 tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); 5091 rtx outgoing; 5092 5093 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))) 5094 type = TREE_TYPE (type); 5095 else 5096 value_address = XEXP (value_address, 0); 5097 5098 outgoing = targetm.calls.function_value (build_pointer_type (type), 5099 current_function_decl, true); 5100 5101 /* Mark this as a function return value so integrate will delete the 5102 assignment and USE below when inlining this function. */ 5103 REG_FUNCTION_VALUE_P (outgoing) = 1; 5104 5105 /* The address may be ptr_mode and OUTGOING may be Pmode. */ 5106 value_address = convert_memory_address (GET_MODE (outgoing), 5107 value_address); 5108 5109 emit_move_insn (outgoing, value_address); 5110 5111 /* Show return register used to hold result (in this case the address 5112 of the result. */ 5113 crtl->return_rtx = outgoing; 5114 } 5115 5116 /* Emit the actual code to clobber return register. */ 5117 { 5118 rtx seq; 5119 5120 start_sequence (); 5121 clobber_return_register (); 5122 seq = get_insns (); 5123 end_sequence (); 5124 5125 emit_insn_after (seq, clobber_after); 5126 } 5127 5128 /* Output the label for the naked return from the function. */ 5129 if (naked_return_label) 5130 emit_label (naked_return_label); 5131 5132 /* @@@ This is a kludge. We want to ensure that instructions that 5133 may trap are not moved into the epilogue by scheduling, because 5134 we don't always emit unwind information for the epilogue. */ 5135 if (cfun->can_throw_non_call_exceptions 5136 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ) 5137 emit_insn (gen_blockage ()); 5138 5139 /* If stack protection is enabled for this function, check the guard. */ 5140 if (crtl->stack_protect_guard) 5141 stack_protect_epilogue (); 5142 5143 /* If we had calls to alloca, and this machine needs 5144 an accurate stack pointer to exit the function, 5145 insert some code to save and restore the stack pointer. */ 5146 if (! EXIT_IGNORE_STACK 5147 && cfun->calls_alloca) 5148 { 5149 rtx tem = 0, seq; 5150 5151 start_sequence (); 5152 emit_stack_save (SAVE_FUNCTION, &tem); 5153 seq = get_insns (); 5154 end_sequence (); 5155 emit_insn_before (seq, parm_birth_insn); 5156 5157 emit_stack_restore (SAVE_FUNCTION, tem); 5158 } 5159 5160 /* ??? This should no longer be necessary since stupid is no longer with 5161 us, but there are some parts of the compiler (eg reload_combine, and 5162 sh mach_dep_reorg) that still try and compute their own lifetime info 5163 instead of using the general framework. */ 5164 use_return_register (); 5165 } 5166 5167 rtx 5168 get_arg_pointer_save_area (void) 5169 { 5170 rtx ret = arg_pointer_save_area; 5171 5172 if (! ret) 5173 { 5174 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); 5175 arg_pointer_save_area = ret; 5176 } 5177 5178 if (! crtl->arg_pointer_save_area_init) 5179 { 5180 rtx seq; 5181 5182 /* Save the arg pointer at the beginning of the function. The 5183 generated stack slot may not be a valid memory address, so we 5184 have to check it and fix it if necessary. */ 5185 start_sequence (); 5186 emit_move_insn (validize_mem (ret), 5187 crtl->args.internal_arg_pointer); 5188 seq = get_insns (); 5189 end_sequence (); 5190 5191 push_topmost_sequence (); 5192 emit_insn_after (seq, entry_of_function ()); 5193 pop_topmost_sequence (); 5194 5195 crtl->arg_pointer_save_area_init = true; 5196 } 5197 5198 return ret; 5199 } 5200 5201 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP 5202 for the first time. */ 5203 5204 static void 5205 record_insns (rtx insns, rtx end, htab_t *hashp) 5206 { 5207 rtx tmp; 5208 htab_t hash = *hashp; 5209 5210 if (hash == NULL) 5211 *hashp = hash 5212 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL); 5213 5214 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) 5215 { 5216 void **slot = htab_find_slot (hash, tmp, INSERT); 5217 gcc_assert (*slot == NULL); 5218 *slot = tmp; 5219 } 5220 } 5221 5222 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a 5223 basic block, splitting or peepholes. If INSN is a prologue or epilogue 5224 insn, then record COPY as well. */ 5225 5226 void 5227 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy) 5228 { 5229 htab_t hash; 5230 void **slot; 5231 5232 hash = epilogue_insn_hash; 5233 if (!hash || !htab_find (hash, insn)) 5234 { 5235 hash = prologue_insn_hash; 5236 if (!hash || !htab_find (hash, insn)) 5237 return; 5238 } 5239 5240 slot = htab_find_slot (hash, copy, INSERT); 5241 gcc_assert (*slot == NULL); 5242 *slot = copy; 5243 } 5244 5245 /* Set the locator of the insn chain starting at INSN to LOC. */ 5246 static void 5247 set_insn_locators (rtx insn, int loc) 5248 { 5249 while (insn != NULL_RTX) 5250 { 5251 if (INSN_P (insn)) 5252 INSN_LOCATOR (insn) = loc; 5253 insn = NEXT_INSN (insn); 5254 } 5255 } 5256 5257 /* Determine if any INSNs in HASH are, or are part of, INSN. Because 5258 we can be running after reorg, SEQUENCE rtl is possible. */ 5259 5260 static bool 5261 contains (const_rtx insn, htab_t hash) 5262 { 5263 if (hash == NULL) 5264 return false; 5265 5266 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) 5267 { 5268 int i; 5269 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 5270 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i))) 5271 return true; 5272 return false; 5273 } 5274 5275 return htab_find (hash, insn) != NULL; 5276 } 5277 5278 int 5279 prologue_epilogue_contains (const_rtx insn) 5280 { 5281 if (contains (insn, prologue_insn_hash)) 5282 return 1; 5283 if (contains (insn, epilogue_insn_hash)) 5284 return 1; 5285 return 0; 5286 } 5287 5288 #ifdef HAVE_simple_return 5289 5290 /* Return true if INSN requires the stack frame to be set up. 5291 PROLOGUE_USED contains the hard registers used in the function 5292 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the 5293 prologue to set up for the function. */ 5294 bool 5295 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used, 5296 HARD_REG_SET set_up_by_prologue) 5297 { 5298 df_ref *df_rec; 5299 HARD_REG_SET hardregs; 5300 unsigned regno; 5301 5302 if (CALL_P (insn)) 5303 return !SIBLING_CALL_P (insn); 5304 5305 /* We need a frame to get the unique CFA expected by the unwinder. */ 5306 if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn)) 5307 return true; 5308 5309 CLEAR_HARD_REG_SET (hardregs); 5310 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++) 5311 { 5312 rtx dreg = DF_REF_REG (*df_rec); 5313 5314 if (!REG_P (dreg)) 5315 continue; 5316 5317 add_to_hard_reg_set (&hardregs, GET_MODE (dreg), 5318 REGNO (dreg)); 5319 } 5320 if (hard_reg_set_intersect_p (hardregs, prologue_used)) 5321 return true; 5322 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set); 5323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 5324 if (TEST_HARD_REG_BIT (hardregs, regno) 5325 && df_regs_ever_live_p (regno)) 5326 return true; 5327 5328 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++) 5329 { 5330 rtx reg = DF_REF_REG (*df_rec); 5331 5332 if (!REG_P (reg)) 5333 continue; 5334 5335 add_to_hard_reg_set (&hardregs, GET_MODE (reg), 5336 REGNO (reg)); 5337 } 5338 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue)) 5339 return true; 5340 5341 return false; 5342 } 5343 5344 /* See whether BB has a single successor that uses [REGNO, END_REGNO), 5345 and if BB is its only predecessor. Return that block if so, 5346 otherwise return null. */ 5347 5348 static basic_block 5349 next_block_for_reg (basic_block bb, int regno, int end_regno) 5350 { 5351 edge e, live_edge; 5352 edge_iterator ei; 5353 bitmap live; 5354 int i; 5355 5356 live_edge = NULL; 5357 FOR_EACH_EDGE (e, ei, bb->succs) 5358 { 5359 live = df_get_live_in (e->dest); 5360 for (i = regno; i < end_regno; i++) 5361 if (REGNO_REG_SET_P (live, i)) 5362 { 5363 if (live_edge && live_edge != e) 5364 return NULL; 5365 live_edge = e; 5366 } 5367 } 5368 5369 /* We can sometimes encounter dead code. Don't try to move it 5370 into the exit block. */ 5371 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR) 5372 return NULL; 5373 5374 /* Reject targets of abnormal edges. This is needed for correctness 5375 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on 5376 exception edges even though it is generally treated as call-saved 5377 for the majority of the compilation. Moving across abnormal edges 5378 isn't going to be interesting for shrink-wrap usage anyway. */ 5379 if (live_edge->flags & EDGE_ABNORMAL) 5380 return NULL; 5381 5382 if (EDGE_COUNT (live_edge->dest->preds) > 1) 5383 return NULL; 5384 5385 return live_edge->dest; 5386 } 5387 5388 /* Try to move INSN from BB to a successor. Return true on success. 5389 USES and DEFS are the set of registers that are used and defined 5390 after INSN in BB. */ 5391 5392 static bool 5393 move_insn_for_shrink_wrap (basic_block bb, rtx insn, 5394 const HARD_REG_SET uses, 5395 const HARD_REG_SET defs) 5396 { 5397 rtx set, src, dest; 5398 bitmap live_out, live_in, bb_uses, bb_defs; 5399 unsigned int i, dregno, end_dregno, sregno, end_sregno; 5400 basic_block next_block; 5401 5402 /* Look for a simple register copy. */ 5403 set = single_set (insn); 5404 if (!set) 5405 return false; 5406 src = SET_SRC (set); 5407 dest = SET_DEST (set); 5408 if (!REG_P (dest) || !REG_P (src)) 5409 return false; 5410 5411 /* Make sure that the source register isn't defined later in BB. */ 5412 sregno = REGNO (src); 5413 end_sregno = END_REGNO (src); 5414 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno)) 5415 return false; 5416 5417 /* Make sure that the destination register isn't referenced later in BB. */ 5418 dregno = REGNO (dest); 5419 end_dregno = END_REGNO (dest); 5420 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno) 5421 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno)) 5422 return false; 5423 5424 /* See whether there is a successor block to which we could move INSN. */ 5425 next_block = next_block_for_reg (bb, dregno, end_dregno); 5426 if (!next_block) 5427 return false; 5428 5429 /* At this point we are committed to moving INSN, but let's try to 5430 move it as far as we can. */ 5431 do 5432 { 5433 live_out = df_get_live_out (bb); 5434 live_in = df_get_live_in (next_block); 5435 bb = next_block; 5436 5437 /* Check whether BB uses DEST or clobbers DEST. We need to add 5438 INSN to BB if so. Either way, DEST is no longer live on entry, 5439 except for any part that overlaps SRC (next loop). */ 5440 bb_uses = &DF_LR_BB_INFO (bb)->use; 5441 bb_defs = &DF_LR_BB_INFO (bb)->def; 5442 for (i = dregno; i < end_dregno; i++) 5443 { 5444 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i)) 5445 next_block = NULL; 5446 CLEAR_REGNO_REG_SET (live_out, i); 5447 CLEAR_REGNO_REG_SET (live_in, i); 5448 } 5449 5450 /* Check whether BB clobbers SRC. We need to add INSN to BB if so. 5451 Either way, SRC is now live on entry. */ 5452 for (i = sregno; i < end_sregno; i++) 5453 { 5454 if (REGNO_REG_SET_P (bb_defs, i)) 5455 next_block = NULL; 5456 SET_REGNO_REG_SET (live_out, i); 5457 SET_REGNO_REG_SET (live_in, i); 5458 } 5459 5460 /* If we don't need to add the move to BB, look for a single 5461 successor block. */ 5462 if (next_block) 5463 next_block = next_block_for_reg (next_block, dregno, end_dregno); 5464 } 5465 while (next_block); 5466 5467 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC 5468 (next loop). */ 5469 for (i = dregno; i < end_dregno; i++) 5470 { 5471 CLEAR_REGNO_REG_SET (bb_uses, i); 5472 SET_REGNO_REG_SET (bb_defs, i); 5473 } 5474 5475 /* BB now uses SRC. */ 5476 for (i = sregno; i < end_sregno; i++) 5477 SET_REGNO_REG_SET (bb_uses, i); 5478 5479 emit_insn_after (PATTERN (insn), bb_note (bb)); 5480 delete_insn (insn); 5481 return true; 5482 } 5483 5484 /* Look for register copies in the first block of the function, and move 5485 them down into successor blocks if the register is used only on one 5486 path. This exposes more opportunities for shrink-wrapping. These 5487 kinds of sets often occur when incoming argument registers are moved 5488 to call-saved registers because their values are live across one or 5489 more calls during the function. */ 5490 5491 static void 5492 prepare_shrink_wrap (basic_block entry_block) 5493 { 5494 rtx insn, curr, x; 5495 HARD_REG_SET uses, defs; 5496 df_ref *ref; 5497 5498 CLEAR_HARD_REG_SET (uses); 5499 CLEAR_HARD_REG_SET (defs); 5500 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr) 5501 if (NONDEBUG_INSN_P (insn) 5502 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs)) 5503 { 5504 /* Add all defined registers to DEFs. */ 5505 for (ref = DF_INSN_DEFS (insn); *ref; ref++) 5506 { 5507 x = DF_REF_REG (*ref); 5508 if (REG_P (x) && HARD_REGISTER_P (x)) 5509 SET_HARD_REG_BIT (defs, REGNO (x)); 5510 } 5511 5512 /* Add all used registers to USESs. */ 5513 for (ref = DF_INSN_USES (insn); *ref; ref++) 5514 { 5515 x = DF_REF_REG (*ref); 5516 if (REG_P (x) && HARD_REGISTER_P (x)) 5517 SET_HARD_REG_BIT (uses, REGNO (x)); 5518 } 5519 } 5520 } 5521 5522 #endif 5523 5524 #ifdef HAVE_return 5525 /* Insert use of return register before the end of BB. */ 5526 5527 static void 5528 emit_use_return_register_into_block (basic_block bb) 5529 { 5530 rtx seq; 5531 start_sequence (); 5532 use_return_register (); 5533 seq = get_insns (); 5534 end_sequence (); 5535 emit_insn_before (seq, BB_END (bb)); 5536 } 5537 5538 5539 /* Create a return pattern, either simple_return or return, depending on 5540 simple_p. */ 5541 5542 static rtx 5543 gen_return_pattern (bool simple_p) 5544 { 5545 #ifdef HAVE_simple_return 5546 return simple_p ? gen_simple_return () : gen_return (); 5547 #else 5548 gcc_assert (!simple_p); 5549 return gen_return (); 5550 #endif 5551 } 5552 5553 /* Insert an appropriate return pattern at the end of block BB. This 5554 also means updating block_for_insn appropriately. SIMPLE_P is 5555 the same as in gen_return_pattern and passed to it. */ 5556 5557 static void 5558 emit_return_into_block (bool simple_p, basic_block bb) 5559 { 5560 rtx jump, pat; 5561 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb)); 5562 pat = PATTERN (jump); 5563 if (GET_CODE (pat) == PARALLEL) 5564 pat = XVECEXP (pat, 0, 0); 5565 gcc_assert (ANY_RETURN_P (pat)); 5566 JUMP_LABEL (jump) = pat; 5567 } 5568 #endif 5569 5570 /* Set JUMP_LABEL for a return insn. */ 5571 5572 void 5573 set_return_jump_label (rtx returnjump) 5574 { 5575 rtx pat = PATTERN (returnjump); 5576 if (GET_CODE (pat) == PARALLEL) 5577 pat = XVECEXP (pat, 0, 0); 5578 if (ANY_RETURN_P (pat)) 5579 JUMP_LABEL (returnjump) = pat; 5580 else 5581 JUMP_LABEL (returnjump) = ret_rtx; 5582 } 5583 5584 #ifdef HAVE_simple_return 5585 /* Create a copy of BB instructions and insert at BEFORE. Redirect 5586 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */ 5587 static void 5588 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before, 5589 bitmap_head *need_prologue) 5590 { 5591 edge_iterator ei; 5592 edge e; 5593 rtx insn = BB_END (bb); 5594 5595 /* We know BB has a single successor, so there is no need to copy a 5596 simple jump at the end of BB. */ 5597 if (simplejump_p (insn)) 5598 insn = PREV_INSN (insn); 5599 5600 start_sequence (); 5601 duplicate_insn_chain (BB_HEAD (bb), insn); 5602 if (dump_file) 5603 { 5604 unsigned count = 0; 5605 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 5606 if (active_insn_p (insn)) 5607 ++count; 5608 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n", 5609 bb->index, copy_bb->index, count); 5610 } 5611 insn = get_insns (); 5612 end_sequence (); 5613 emit_insn_before (insn, before); 5614 5615 /* Redirect all the paths that need no prologue into copy_bb. */ 5616 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) 5617 if (!bitmap_bit_p (need_prologue, e->src->index)) 5618 { 5619 redirect_edge_and_branch_force (e, copy_bb); 5620 continue; 5621 } 5622 else 5623 ei_next (&ei); 5624 } 5625 #endif 5626 5627 #if defined (HAVE_return) || defined (HAVE_simple_return) 5628 /* Return true if there are any active insns between HEAD and TAIL. */ 5629 static bool 5630 active_insn_between (rtx head, rtx tail) 5631 { 5632 while (tail) 5633 { 5634 if (active_insn_p (tail)) 5635 return true; 5636 if (tail == head) 5637 return false; 5638 tail = PREV_INSN (tail); 5639 } 5640 return false; 5641 } 5642 5643 /* LAST_BB is a block that exits, and empty of active instructions. 5644 Examine its predecessors for jumps that can be converted to 5645 (conditional) returns. */ 5646 static VEC (edge, heap) * 5647 convert_jumps_to_returns (basic_block last_bb, bool simple_p, 5648 VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED) 5649 { 5650 int i; 5651 basic_block bb; 5652 rtx label; 5653 edge_iterator ei; 5654 edge e; 5655 VEC(basic_block,heap) *src_bbs; 5656 5657 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds)); 5658 FOR_EACH_EDGE (e, ei, last_bb->preds) 5659 if (e->src != ENTRY_BLOCK_PTR) 5660 VEC_quick_push (basic_block, src_bbs, e->src); 5661 5662 label = BB_HEAD (last_bb); 5663 5664 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb) 5665 { 5666 rtx jump = BB_END (bb); 5667 5668 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label) 5669 continue; 5670 5671 e = find_edge (bb, last_bb); 5672 5673 /* If we have an unconditional jump, we can replace that 5674 with a simple return instruction. */ 5675 if (simplejump_p (jump)) 5676 { 5677 /* The use of the return register might be present in the exit 5678 fallthru block. Either: 5679 - removing the use is safe, and we should remove the use in 5680 the exit fallthru block, or 5681 - removing the use is not safe, and we should add it here. 5682 For now, we conservatively choose the latter. Either of the 5683 2 helps in crossjumping. */ 5684 emit_use_return_register_into_block (bb); 5685 5686 emit_return_into_block (simple_p, bb); 5687 delete_insn (jump); 5688 } 5689 5690 /* If we have a conditional jump branching to the last 5691 block, we can try to replace that with a conditional 5692 return instruction. */ 5693 else if (condjump_p (jump)) 5694 { 5695 rtx dest; 5696 5697 if (simple_p) 5698 dest = simple_return_rtx; 5699 else 5700 dest = ret_rtx; 5701 if (!redirect_jump (jump, dest, 0)) 5702 { 5703 #ifdef HAVE_simple_return 5704 if (simple_p) 5705 { 5706 if (dump_file) 5707 fprintf (dump_file, 5708 "Failed to redirect bb %d branch.\n", bb->index); 5709 VEC_safe_push (edge, heap, unconverted, e); 5710 } 5711 #endif 5712 continue; 5713 } 5714 5715 /* See comment in simplejump_p case above. */ 5716 emit_use_return_register_into_block (bb); 5717 5718 /* If this block has only one successor, it both jumps 5719 and falls through to the fallthru block, so we can't 5720 delete the edge. */ 5721 if (single_succ_p (bb)) 5722 continue; 5723 } 5724 else 5725 { 5726 #ifdef HAVE_simple_return 5727 if (simple_p) 5728 { 5729 if (dump_file) 5730 fprintf (dump_file, 5731 "Failed to redirect bb %d branch.\n", bb->index); 5732 VEC_safe_push (edge, heap, unconverted, e); 5733 } 5734 #endif 5735 continue; 5736 } 5737 5738 /* Fix up the CFG for the successful change we just made. */ 5739 redirect_edge_succ (e, EXIT_BLOCK_PTR); 5740 e->flags &= ~EDGE_CROSSING; 5741 } 5742 VEC_free (basic_block, heap, src_bbs); 5743 return unconverted; 5744 } 5745 5746 /* Emit a return insn for the exit fallthru block. */ 5747 static basic_block 5748 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p) 5749 { 5750 basic_block last_bb = exit_fallthru_edge->src; 5751 5752 if (JUMP_P (BB_END (last_bb))) 5753 { 5754 last_bb = split_edge (exit_fallthru_edge); 5755 exit_fallthru_edge = single_succ_edge (last_bb); 5756 } 5757 emit_barrier_after (BB_END (last_bb)); 5758 emit_return_into_block (simple_p, last_bb); 5759 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU; 5760 return last_bb; 5761 } 5762 #endif 5763 5764 5765 /* Generate the prologue and epilogue RTL if the machine supports it. Thread 5766 this into place with notes indicating where the prologue ends and where 5767 the epilogue begins. Update the basic block information when possible. 5768 5769 Notes on epilogue placement: 5770 There are several kinds of edges to the exit block: 5771 * a single fallthru edge from LAST_BB 5772 * possibly, edges from blocks containing sibcalls 5773 * possibly, fake edges from infinite loops 5774 5775 The epilogue is always emitted on the fallthru edge from the last basic 5776 block in the function, LAST_BB, into the exit block. 5777 5778 If LAST_BB is empty except for a label, it is the target of every 5779 other basic block in the function that ends in a return. If a 5780 target has a return or simple_return pattern (possibly with 5781 conditional variants), these basic blocks can be changed so that a 5782 return insn is emitted into them, and their target is adjusted to 5783 the real exit block. 5784 5785 Notes on shrink wrapping: We implement a fairly conservative 5786 version of shrink-wrapping rather than the textbook one. We only 5787 generate a single prologue and a single epilogue. This is 5788 sufficient to catch a number of interesting cases involving early 5789 exits. 5790 5791 First, we identify the blocks that require the prologue to occur before 5792 them. These are the ones that modify a call-saved register, or reference 5793 any of the stack or frame pointer registers. To simplify things, we then 5794 mark everything reachable from these blocks as also requiring a prologue. 5795 This takes care of loops automatically, and avoids the need to examine 5796 whether MEMs reference the frame, since it is sufficient to check for 5797 occurrences of the stack or frame pointer. 5798 5799 We then compute the set of blocks for which the need for a prologue 5800 is anticipatable (borrowing terminology from the shrink-wrapping 5801 description in Muchnick's book). These are the blocks which either 5802 require a prologue themselves, or those that have only successors 5803 where the prologue is anticipatable. The prologue needs to be 5804 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1 5805 is not. For the moment, we ensure that only one such edge exists. 5806 5807 The epilogue is placed as described above, but we make a 5808 distinction between inserting return and simple_return patterns 5809 when modifying other blocks that end in a return. Blocks that end 5810 in a sibcall omit the sibcall_epilogue if the block is not in 5811 ANTIC. */ 5812 5813 static void 5814 thread_prologue_and_epilogue_insns (void) 5815 { 5816 bool inserted; 5817 #ifdef HAVE_simple_return 5818 VEC (edge, heap) *unconverted_simple_returns = NULL; 5819 bool nonempty_prologue; 5820 bitmap_head bb_flags; 5821 unsigned max_grow_size; 5822 #endif 5823 rtx returnjump; 5824 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED; 5825 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED; 5826 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge; 5827 edge_iterator ei; 5828 5829 df_analyze (); 5830 5831 rtl_profile_for_bb (ENTRY_BLOCK_PTR); 5832 5833 inserted = false; 5834 seq = NULL_RTX; 5835 epilogue_end = NULL_RTX; 5836 returnjump = NULL_RTX; 5837 5838 /* Can't deal with multiple successors of the entry block at the 5839 moment. Function should always have at least one entry 5840 point. */ 5841 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR)); 5842 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR); 5843 orig_entry_edge = entry_edge; 5844 5845 split_prologue_seq = NULL_RTX; 5846 if (flag_split_stack 5847 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)) 5848 == NULL)) 5849 { 5850 #ifndef HAVE_split_stack_prologue 5851 gcc_unreachable (); 5852 #else 5853 gcc_assert (HAVE_split_stack_prologue); 5854 5855 start_sequence (); 5856 emit_insn (gen_split_stack_prologue ()); 5857 split_prologue_seq = get_insns (); 5858 end_sequence (); 5859 5860 record_insns (split_prologue_seq, NULL, &prologue_insn_hash); 5861 set_insn_locators (split_prologue_seq, prologue_locator); 5862 #endif 5863 } 5864 5865 prologue_seq = NULL_RTX; 5866 #ifdef HAVE_prologue 5867 if (HAVE_prologue) 5868 { 5869 start_sequence (); 5870 seq = gen_prologue (); 5871 emit_insn (seq); 5872 5873 /* Insert an explicit USE for the frame pointer 5874 if the profiling is on and the frame pointer is required. */ 5875 if (crtl->profile && frame_pointer_needed) 5876 emit_use (hard_frame_pointer_rtx); 5877 5878 /* Retain a map of the prologue insns. */ 5879 record_insns (seq, NULL, &prologue_insn_hash); 5880 emit_note (NOTE_INSN_PROLOGUE_END); 5881 5882 /* Ensure that instructions are not moved into the prologue when 5883 profiling is on. The call to the profiling routine can be 5884 emitted within the live range of a call-clobbered register. */ 5885 if (!targetm.profile_before_prologue () && crtl->profile) 5886 emit_insn (gen_blockage ()); 5887 5888 prologue_seq = get_insns (); 5889 end_sequence (); 5890 set_insn_locators (prologue_seq, prologue_locator); 5891 } 5892 #endif 5893 5894 #ifdef HAVE_simple_return 5895 bitmap_initialize (&bb_flags, &bitmap_default_obstack); 5896 5897 /* Try to perform a kind of shrink-wrapping, making sure the 5898 prologue/epilogue is emitted only around those parts of the 5899 function that require it. */ 5900 5901 nonempty_prologue = false; 5902 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq)) 5903 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END) 5904 { 5905 nonempty_prologue = true; 5906 break; 5907 } 5908 5909 if (flag_shrink_wrap && HAVE_simple_return 5910 && (targetm.profile_before_prologue () || !crtl->profile) 5911 && nonempty_prologue && !crtl->calls_eh_return) 5912 { 5913 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge; 5914 struct hard_reg_set_container set_up_by_prologue; 5915 rtx p_insn; 5916 VEC(basic_block, heap) *vec; 5917 basic_block bb; 5918 bitmap_head bb_antic_flags; 5919 bitmap_head bb_on_list; 5920 bitmap_head bb_tail; 5921 5922 if (dump_file) 5923 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n"); 5924 5925 /* Compute the registers set and used in the prologue. */ 5926 CLEAR_HARD_REG_SET (prologue_clobbered); 5927 CLEAR_HARD_REG_SET (prologue_used); 5928 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn)) 5929 { 5930 HARD_REG_SET this_used; 5931 if (!NONDEBUG_INSN_P (p_insn)) 5932 continue; 5933 5934 CLEAR_HARD_REG_SET (this_used); 5935 note_uses (&PATTERN (p_insn), record_hard_reg_uses, 5936 &this_used); 5937 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered); 5938 IOR_HARD_REG_SET (prologue_used, this_used); 5939 note_stores (PATTERN (p_insn), record_hard_reg_sets, 5940 &prologue_clobbered); 5941 } 5942 5943 prepare_shrink_wrap (entry_edge->dest); 5944 5945 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack); 5946 bitmap_initialize (&bb_on_list, &bitmap_default_obstack); 5947 bitmap_initialize (&bb_tail, &bitmap_default_obstack); 5948 5949 /* Find the set of basic blocks that require a stack frame, 5950 and blocks that are too big to be duplicated. */ 5951 5952 vec = VEC_alloc (basic_block, heap, n_basic_blocks); 5953 5954 CLEAR_HARD_REG_SET (set_up_by_prologue.set); 5955 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, 5956 STACK_POINTER_REGNUM); 5957 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM); 5958 if (frame_pointer_needed) 5959 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, 5960 HARD_FRAME_POINTER_REGNUM); 5961 if (pic_offset_table_rtx) 5962 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, 5963 PIC_OFFSET_TABLE_REGNUM); 5964 if (stack_realign_drap && crtl->drap_reg) 5965 add_to_hard_reg_set (&set_up_by_prologue.set, 5966 GET_MODE (crtl->drap_reg), 5967 REGNO (crtl->drap_reg)); 5968 if (targetm.set_up_by_prologue) 5969 targetm.set_up_by_prologue (&set_up_by_prologue); 5970 5971 /* We don't use a different max size depending on 5972 optimize_bb_for_speed_p because increasing shrink-wrapping 5973 opportunities by duplicating tail blocks can actually result 5974 in an overall decrease in code size. */ 5975 max_grow_size = get_uncond_jump_length (); 5976 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS); 5977 5978 FOR_EACH_BB (bb) 5979 { 5980 rtx insn; 5981 unsigned size = 0; 5982 5983 FOR_BB_INSNS (bb, insn) 5984 if (NONDEBUG_INSN_P (insn)) 5985 { 5986 if (requires_stack_frame_p (insn, prologue_used, 5987 set_up_by_prologue.set)) 5988 { 5989 if (bb == entry_edge->dest) 5990 goto fail_shrinkwrap; 5991 bitmap_set_bit (&bb_flags, bb->index); 5992 VEC_quick_push (basic_block, vec, bb); 5993 break; 5994 } 5995 else if (size <= max_grow_size) 5996 { 5997 size += get_attr_min_length (insn); 5998 if (size > max_grow_size) 5999 bitmap_set_bit (&bb_on_list, bb->index); 6000 } 6001 } 6002 } 6003 6004 /* Blocks that really need a prologue, or are too big for tails. */ 6005 bitmap_ior_into (&bb_on_list, &bb_flags); 6006 6007 /* For every basic block that needs a prologue, mark all blocks 6008 reachable from it, so as to ensure they are also seen as 6009 requiring a prologue. */ 6010 while (!VEC_empty (basic_block, vec)) 6011 { 6012 basic_block tmp_bb = VEC_pop (basic_block, vec); 6013 6014 FOR_EACH_EDGE (e, ei, tmp_bb->succs) 6015 if (e->dest != EXIT_BLOCK_PTR 6016 && bitmap_set_bit (&bb_flags, e->dest->index)) 6017 VEC_quick_push (basic_block, vec, e->dest); 6018 } 6019 6020 /* Find the set of basic blocks that need no prologue, have a 6021 single successor, can be duplicated, meet a max size 6022 requirement, and go to the exit via like blocks. */ 6023 VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR); 6024 while (!VEC_empty (basic_block, vec)) 6025 { 6026 basic_block tmp_bb = VEC_pop (basic_block, vec); 6027 6028 FOR_EACH_EDGE (e, ei, tmp_bb->preds) 6029 if (single_succ_p (e->src) 6030 && !bitmap_bit_p (&bb_on_list, e->src->index) 6031 && can_duplicate_block_p (e->src)) 6032 { 6033 edge pe; 6034 edge_iterator pei; 6035 6036 /* If there is predecessor of e->src which doesn't 6037 need prologue and the edge is complex, 6038 we might not be able to redirect the branch 6039 to a copy of e->src. */ 6040 FOR_EACH_EDGE (pe, pei, e->src->preds) 6041 if ((pe->flags & EDGE_COMPLEX) != 0 6042 && !bitmap_bit_p (&bb_flags, pe->src->index)) 6043 break; 6044 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index)) 6045 VEC_quick_push (basic_block, vec, e->src); 6046 } 6047 } 6048 6049 /* Now walk backwards from every block that is marked as needing 6050 a prologue to compute the bb_antic_flags bitmap. Exclude 6051 tail blocks; They can be duplicated to be used on paths not 6052 needing a prologue. */ 6053 bitmap_clear (&bb_on_list); 6054 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail); 6055 FOR_EACH_BB (bb) 6056 { 6057 if (!bitmap_bit_p (&bb_antic_flags, bb->index)) 6058 continue; 6059 FOR_EACH_EDGE (e, ei, bb->preds) 6060 if (!bitmap_bit_p (&bb_antic_flags, e->src->index) 6061 && bitmap_set_bit (&bb_on_list, e->src->index)) 6062 VEC_quick_push (basic_block, vec, e->src); 6063 } 6064 while (!VEC_empty (basic_block, vec)) 6065 { 6066 basic_block tmp_bb = VEC_pop (basic_block, vec); 6067 bool all_set = true; 6068 6069 bitmap_clear_bit (&bb_on_list, tmp_bb->index); 6070 FOR_EACH_EDGE (e, ei, tmp_bb->succs) 6071 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index)) 6072 { 6073 all_set = false; 6074 break; 6075 } 6076 6077 if (all_set) 6078 { 6079 bitmap_set_bit (&bb_antic_flags, tmp_bb->index); 6080 FOR_EACH_EDGE (e, ei, tmp_bb->preds) 6081 if (!bitmap_bit_p (&bb_antic_flags, e->src->index) 6082 && bitmap_set_bit (&bb_on_list, e->src->index)) 6083 VEC_quick_push (basic_block, vec, e->src); 6084 } 6085 } 6086 /* Find exactly one edge that leads to a block in ANTIC from 6087 a block that isn't. */ 6088 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index)) 6089 FOR_EACH_BB (bb) 6090 { 6091 if (!bitmap_bit_p (&bb_antic_flags, bb->index)) 6092 continue; 6093 FOR_EACH_EDGE (e, ei, bb->preds) 6094 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)) 6095 { 6096 if (entry_edge != orig_entry_edge) 6097 { 6098 entry_edge = orig_entry_edge; 6099 if (dump_file) 6100 fprintf (dump_file, "More than one candidate edge.\n"); 6101 goto fail_shrinkwrap; 6102 } 6103 if (dump_file) 6104 fprintf (dump_file, "Found candidate edge for " 6105 "shrink-wrapping, %d->%d.\n", e->src->index, 6106 e->dest->index); 6107 entry_edge = e; 6108 } 6109 } 6110 6111 if (entry_edge != orig_entry_edge) 6112 { 6113 /* Test whether the prologue is known to clobber any register 6114 (other than FP or SP) which are live on the edge. */ 6115 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM); 6116 if (frame_pointer_needed) 6117 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM); 6118 CLEAR_HARD_REG_SET (live_on_edge); 6119 reg_set_to_hard_reg_set (&live_on_edge, 6120 df_get_live_in (entry_edge->dest)); 6121 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered)) 6122 { 6123 entry_edge = orig_entry_edge; 6124 if (dump_file) 6125 fprintf (dump_file, 6126 "Shrink-wrapping aborted due to clobber.\n"); 6127 } 6128 } 6129 if (entry_edge != orig_entry_edge) 6130 { 6131 crtl->shrink_wrapped = true; 6132 if (dump_file) 6133 fprintf (dump_file, "Performing shrink-wrapping.\n"); 6134 6135 /* Find tail blocks reachable from both blocks needing a 6136 prologue and blocks not needing a prologue. */ 6137 if (!bitmap_empty_p (&bb_tail)) 6138 FOR_EACH_BB (bb) 6139 { 6140 bool some_pro, some_no_pro; 6141 if (!bitmap_bit_p (&bb_tail, bb->index)) 6142 continue; 6143 some_pro = some_no_pro = false; 6144 FOR_EACH_EDGE (e, ei, bb->preds) 6145 { 6146 if (bitmap_bit_p (&bb_flags, e->src->index)) 6147 some_pro = true; 6148 else 6149 some_no_pro = true; 6150 } 6151 if (some_pro && some_no_pro) 6152 VEC_quick_push (basic_block, vec, bb); 6153 else 6154 bitmap_clear_bit (&bb_tail, bb->index); 6155 } 6156 /* Find the head of each tail. */ 6157 while (!VEC_empty (basic_block, vec)) 6158 { 6159 basic_block tbb = VEC_pop (basic_block, vec); 6160 6161 if (!bitmap_bit_p (&bb_tail, tbb->index)) 6162 continue; 6163 6164 while (single_succ_p (tbb)) 6165 { 6166 tbb = single_succ (tbb); 6167 bitmap_clear_bit (&bb_tail, tbb->index); 6168 } 6169 } 6170 /* Now duplicate the tails. */ 6171 if (!bitmap_empty_p (&bb_tail)) 6172 FOR_EACH_BB_REVERSE (bb) 6173 { 6174 basic_block copy_bb, tbb; 6175 rtx insert_point; 6176 int eflags; 6177 6178 if (!bitmap_clear_bit (&bb_tail, bb->index)) 6179 continue; 6180 6181 /* Create a copy of BB, instructions and all, for 6182 use on paths that don't need a prologue. 6183 Ideal placement of the copy is on a fall-thru edge 6184 or after a block that would jump to the copy. */ 6185 FOR_EACH_EDGE (e, ei, bb->preds) 6186 if (!bitmap_bit_p (&bb_flags, e->src->index) 6187 && single_succ_p (e->src)) 6188 break; 6189 if (e) 6190 { 6191 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)), 6192 NULL_RTX, e->src); 6193 BB_COPY_PARTITION (copy_bb, e->src); 6194 } 6195 else 6196 { 6197 /* Otherwise put the copy at the end of the function. */ 6198 copy_bb = create_basic_block (NULL_RTX, NULL_RTX, 6199 EXIT_BLOCK_PTR->prev_bb); 6200 BB_COPY_PARTITION (copy_bb, bb); 6201 } 6202 6203 insert_point = emit_note_after (NOTE_INSN_DELETED, 6204 BB_END (copy_bb)); 6205 emit_barrier_after (BB_END (copy_bb)); 6206 6207 tbb = bb; 6208 while (1) 6209 { 6210 dup_block_and_redirect (tbb, copy_bb, insert_point, 6211 &bb_flags); 6212 tbb = single_succ (tbb); 6213 if (tbb == EXIT_BLOCK_PTR) 6214 break; 6215 e = split_block (copy_bb, PREV_INSN (insert_point)); 6216 copy_bb = e->dest; 6217 } 6218 6219 /* Quiet verify_flow_info by (ab)using EDGE_FAKE. 6220 We have yet to add a simple_return to the tails, 6221 as we'd like to first convert_jumps_to_returns in 6222 case the block is no longer used after that. */ 6223 eflags = EDGE_FAKE; 6224 if (CALL_P (PREV_INSN (insert_point)) 6225 && SIBLING_CALL_P (PREV_INSN (insert_point))) 6226 eflags = EDGE_SIBCALL | EDGE_ABNORMAL; 6227 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags); 6228 6229 /* verify_flow_info doesn't like a note after a 6230 sibling call. */ 6231 delete_insn (insert_point); 6232 if (bitmap_empty_p (&bb_tail)) 6233 break; 6234 } 6235 } 6236 6237 fail_shrinkwrap: 6238 bitmap_clear (&bb_tail); 6239 bitmap_clear (&bb_antic_flags); 6240 bitmap_clear (&bb_on_list); 6241 VEC_free (basic_block, heap, vec); 6242 } 6243 #endif 6244 6245 if (split_prologue_seq != NULL_RTX) 6246 { 6247 insert_insn_on_edge (split_prologue_seq, orig_entry_edge); 6248 inserted = true; 6249 } 6250 if (prologue_seq != NULL_RTX) 6251 { 6252 insert_insn_on_edge (prologue_seq, entry_edge); 6253 inserted = true; 6254 } 6255 6256 /* If the exit block has no non-fake predecessors, we don't need 6257 an epilogue. */ 6258 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6259 if ((e->flags & EDGE_FAKE) == 0) 6260 break; 6261 if (e == NULL) 6262 goto epilogue_done; 6263 6264 rtl_profile_for_bb (EXIT_BLOCK_PTR); 6265 6266 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds); 6267 6268 /* If we're allowed to generate a simple return instruction, then by 6269 definition we don't need a full epilogue. If the last basic 6270 block before the exit block does not contain active instructions, 6271 examine its predecessors and try to emit (conditional) return 6272 instructions. */ 6273 #ifdef HAVE_simple_return 6274 if (entry_edge != orig_entry_edge) 6275 { 6276 if (optimize) 6277 { 6278 unsigned i, last; 6279 6280 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds 6281 (but won't remove). Stop at end of current preds. */ 6282 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds); 6283 for (i = 0; i < last; i++) 6284 { 6285 e = EDGE_I (EXIT_BLOCK_PTR->preds, i); 6286 if (LABEL_P (BB_HEAD (e->src)) 6287 && !bitmap_bit_p (&bb_flags, e->src->index) 6288 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src))) 6289 unconverted_simple_returns 6290 = convert_jumps_to_returns (e->src, true, 6291 unconverted_simple_returns); 6292 } 6293 } 6294 6295 if (exit_fallthru_edge != NULL 6296 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0 6297 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index)) 6298 { 6299 basic_block last_bb; 6300 6301 last_bb = emit_return_for_exit (exit_fallthru_edge, true); 6302 returnjump = BB_END (last_bb); 6303 exit_fallthru_edge = NULL; 6304 } 6305 } 6306 #endif 6307 #ifdef HAVE_return 6308 if (HAVE_return) 6309 { 6310 if (exit_fallthru_edge == NULL) 6311 goto epilogue_done; 6312 6313 if (optimize) 6314 { 6315 basic_block last_bb = exit_fallthru_edge->src; 6316 6317 if (LABEL_P (BB_HEAD (last_bb)) 6318 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb))) 6319 convert_jumps_to_returns (last_bb, false, NULL); 6320 6321 if (EDGE_COUNT (last_bb->preds) != 0 6322 && single_succ_p (last_bb)) 6323 { 6324 last_bb = emit_return_for_exit (exit_fallthru_edge, false); 6325 epilogue_end = returnjump = BB_END (last_bb); 6326 #ifdef HAVE_simple_return 6327 /* Emitting the return may add a basic block. 6328 Fix bb_flags for the added block. */ 6329 if (last_bb != exit_fallthru_edge->src) 6330 bitmap_set_bit (&bb_flags, last_bb->index); 6331 #endif 6332 goto epilogue_done; 6333 } 6334 } 6335 } 6336 #endif 6337 6338 /* A small fib -- epilogue is not yet completed, but we wish to re-use 6339 this marker for the splits of EH_RETURN patterns, and nothing else 6340 uses the flag in the meantime. */ 6341 epilogue_completed = 1; 6342 6343 #ifdef HAVE_eh_return 6344 /* Find non-fallthru edges that end with EH_RETURN instructions. On 6345 some targets, these get split to a special version of the epilogue 6346 code. In order to be able to properly annotate these with unwind 6347 info, try to split them now. If we get a valid split, drop an 6348 EPILOGUE_BEG note and mark the insns as epilogue insns. */ 6349 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6350 { 6351 rtx prev, last, trial; 6352 6353 if (e->flags & EDGE_FALLTHRU) 6354 continue; 6355 last = BB_END (e->src); 6356 if (!eh_returnjump_p (last)) 6357 continue; 6358 6359 prev = PREV_INSN (last); 6360 trial = try_split (PATTERN (last), last, 1); 6361 if (trial == last) 6362 continue; 6363 6364 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash); 6365 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev); 6366 } 6367 #endif 6368 6369 /* If nothing falls through into the exit block, we don't need an 6370 epilogue. */ 6371 6372 if (exit_fallthru_edge == NULL) 6373 goto epilogue_done; 6374 6375 #ifdef HAVE_epilogue 6376 if (HAVE_epilogue) 6377 { 6378 start_sequence (); 6379 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); 6380 seq = gen_epilogue (); 6381 if (seq) 6382 emit_jump_insn (seq); 6383 6384 /* Retain a map of the epilogue insns. */ 6385 record_insns (seq, NULL, &epilogue_insn_hash); 6386 set_insn_locators (seq, epilogue_locator); 6387 6388 seq = get_insns (); 6389 returnjump = get_last_insn (); 6390 end_sequence (); 6391 6392 insert_insn_on_edge (seq, exit_fallthru_edge); 6393 inserted = true; 6394 6395 if (JUMP_P (returnjump)) 6396 set_return_jump_label (returnjump); 6397 } 6398 else 6399 #endif 6400 { 6401 basic_block cur_bb; 6402 6403 if (! next_active_insn (BB_END (exit_fallthru_edge->src))) 6404 goto epilogue_done; 6405 /* We have a fall-through edge to the exit block, the source is not 6406 at the end of the function, and there will be an assembler epilogue 6407 at the end of the function. 6408 We can't use force_nonfallthru here, because that would try to 6409 use return. Inserting a jump 'by hand' is extremely messy, so 6410 we take advantage of cfg_layout_finalize using 6411 fixup_fallthru_exit_predecessor. */ 6412 cfg_layout_initialize (0); 6413 FOR_EACH_BB (cur_bb) 6414 if (cur_bb->index >= NUM_FIXED_BLOCKS 6415 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) 6416 cur_bb->aux = cur_bb->next_bb; 6417 cfg_layout_finalize (); 6418 } 6419 6420 epilogue_done: 6421 6422 default_rtl_profile (); 6423 6424 if (inserted) 6425 { 6426 sbitmap blocks; 6427 6428 commit_edge_insertions (); 6429 6430 /* Look for basic blocks within the prologue insns. */ 6431 blocks = sbitmap_alloc (last_basic_block); 6432 sbitmap_zero (blocks); 6433 SET_BIT (blocks, entry_edge->dest->index); 6434 SET_BIT (blocks, orig_entry_edge->dest->index); 6435 find_many_sub_basic_blocks (blocks); 6436 sbitmap_free (blocks); 6437 6438 /* The epilogue insns we inserted may cause the exit edge to no longer 6439 be fallthru. */ 6440 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6441 { 6442 if (((e->flags & EDGE_FALLTHRU) != 0) 6443 && returnjump_p (BB_END (e->src))) 6444 e->flags &= ~EDGE_FALLTHRU; 6445 } 6446 } 6447 6448 #ifdef HAVE_simple_return 6449 /* If there were branches to an empty LAST_BB which we tried to 6450 convert to conditional simple_returns, but couldn't for some 6451 reason, create a block to hold a simple_return insn and redirect 6452 those remaining edges. */ 6453 if (!VEC_empty (edge, unconverted_simple_returns)) 6454 { 6455 basic_block simple_return_block_hot = NULL; 6456 basic_block simple_return_block_cold = NULL; 6457 edge pending_edge_hot = NULL; 6458 edge pending_edge_cold = NULL; 6459 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb; 6460 int i; 6461 6462 gcc_assert (entry_edge != orig_entry_edge); 6463 6464 /* See if we can reuse the last insn that was emitted for the 6465 epilogue. */ 6466 if (returnjump != NULL_RTX 6467 && JUMP_LABEL (returnjump) == simple_return_rtx) 6468 { 6469 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump)); 6470 if (BB_PARTITION (e->src) == BB_HOT_PARTITION) 6471 simple_return_block_hot = e->dest; 6472 else 6473 simple_return_block_cold = e->dest; 6474 } 6475 6476 /* Also check returns we might need to add to tail blocks. */ 6477 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6478 if (EDGE_COUNT (e->src->preds) != 0 6479 && (e->flags & EDGE_FAKE) != 0 6480 && !bitmap_bit_p (&bb_flags, e->src->index)) 6481 { 6482 if (BB_PARTITION (e->src) == BB_HOT_PARTITION) 6483 pending_edge_hot = e; 6484 else 6485 pending_edge_cold = e; 6486 } 6487 6488 FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e) 6489 { 6490 basic_block *pdest_bb; 6491 edge pending; 6492 6493 if (BB_PARTITION (e->src) == BB_HOT_PARTITION) 6494 { 6495 pdest_bb = &simple_return_block_hot; 6496 pending = pending_edge_hot; 6497 } 6498 else 6499 { 6500 pdest_bb = &simple_return_block_cold; 6501 pending = pending_edge_cold; 6502 } 6503 6504 if (*pdest_bb == NULL && pending != NULL) 6505 { 6506 emit_return_into_block (true, pending->src); 6507 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE); 6508 *pdest_bb = pending->src; 6509 } 6510 else if (*pdest_bb == NULL) 6511 { 6512 basic_block bb; 6513 rtx start; 6514 6515 bb = create_basic_block (NULL, NULL, exit_pred); 6516 BB_COPY_PARTITION (bb, e->src); 6517 start = emit_jump_insn_after (gen_simple_return (), 6518 BB_END (bb)); 6519 JUMP_LABEL (start) = simple_return_rtx; 6520 emit_barrier_after (start); 6521 6522 *pdest_bb = bb; 6523 make_edge (bb, EXIT_BLOCK_PTR, 0); 6524 } 6525 redirect_edge_and_branch_force (e, *pdest_bb); 6526 } 6527 VEC_free (edge, heap, unconverted_simple_returns); 6528 } 6529 6530 if (entry_edge != orig_entry_edge) 6531 { 6532 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6533 if (EDGE_COUNT (e->src->preds) != 0 6534 && (e->flags & EDGE_FAKE) != 0 6535 && !bitmap_bit_p (&bb_flags, e->src->index)) 6536 { 6537 emit_return_into_block (true, e->src); 6538 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE); 6539 } 6540 } 6541 #endif 6542 6543 #ifdef HAVE_sibcall_epilogue 6544 /* Emit sibling epilogues before any sibling call sites. */ 6545 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); ) 6546 { 6547 basic_block bb = e->src; 6548 rtx insn = BB_END (bb); 6549 rtx ep_seq; 6550 6551 if (!CALL_P (insn) 6552 || ! SIBLING_CALL_P (insn) 6553 #ifdef HAVE_simple_return 6554 || (entry_edge != orig_entry_edge 6555 && !bitmap_bit_p (&bb_flags, bb->index)) 6556 #endif 6557 ) 6558 { 6559 ei_next (&ei); 6560 continue; 6561 } 6562 6563 ep_seq = gen_sibcall_epilogue (); 6564 if (ep_seq) 6565 { 6566 start_sequence (); 6567 emit_note (NOTE_INSN_EPILOGUE_BEG); 6568 emit_insn (ep_seq); 6569 seq = get_insns (); 6570 end_sequence (); 6571 6572 /* Retain a map of the epilogue insns. Used in life analysis to 6573 avoid getting rid of sibcall epilogue insns. Do this before we 6574 actually emit the sequence. */ 6575 record_insns (seq, NULL, &epilogue_insn_hash); 6576 set_insn_locators (seq, epilogue_locator); 6577 6578 emit_insn_before (seq, insn); 6579 } 6580 ei_next (&ei); 6581 } 6582 #endif 6583 6584 #ifdef HAVE_epilogue 6585 if (epilogue_end) 6586 { 6587 rtx insn, next; 6588 6589 /* Similarly, move any line notes that appear after the epilogue. 6590 There is no need, however, to be quite so anal about the existence 6591 of such a note. Also possibly move 6592 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug 6593 info generation. */ 6594 for (insn = epilogue_end; insn; insn = next) 6595 { 6596 next = NEXT_INSN (insn); 6597 if (NOTE_P (insn) 6598 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) 6599 reorder_insns (insn, insn, PREV_INSN (epilogue_end)); 6600 } 6601 } 6602 #endif 6603 6604 #ifdef HAVE_simple_return 6605 bitmap_clear (&bb_flags); 6606 #endif 6607 6608 /* Threading the prologue and epilogue changes the artificial refs 6609 in the entry and exit blocks. */ 6610 epilogue_completed = 1; 6611 df_update_entry_exit_and_calls (); 6612 } 6613 6614 /* Reposition the prologue-end and epilogue-begin notes after 6615 instruction scheduling. */ 6616 6617 void 6618 reposition_prologue_and_epilogue_notes (void) 6619 { 6620 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \ 6621 || defined (HAVE_sibcall_epilogue) 6622 /* Since the hash table is created on demand, the fact that it is 6623 non-null is a signal that it is non-empty. */ 6624 if (prologue_insn_hash != NULL) 6625 { 6626 size_t len = htab_elements (prologue_insn_hash); 6627 rtx insn, last = NULL, note = NULL; 6628 6629 /* Scan from the beginning until we reach the last prologue insn. */ 6630 /* ??? While we do have the CFG intact, there are two problems: 6631 (1) The prologue can contain loops (typically probing the stack), 6632 which means that the end of the prologue isn't in the first bb. 6633 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */ 6634 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 6635 { 6636 if (NOTE_P (insn)) 6637 { 6638 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) 6639 note = insn; 6640 } 6641 else if (contains (insn, prologue_insn_hash)) 6642 { 6643 last = insn; 6644 if (--len == 0) 6645 break; 6646 } 6647 } 6648 6649 if (last) 6650 { 6651 if (note == NULL) 6652 { 6653 /* Scan forward looking for the PROLOGUE_END note. It should 6654 be right at the beginning of the block, possibly with other 6655 insn notes that got moved there. */ 6656 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note)) 6657 { 6658 if (NOTE_P (note) 6659 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) 6660 break; 6661 } 6662 } 6663 6664 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ 6665 if (LABEL_P (last)) 6666 last = NEXT_INSN (last); 6667 reorder_insns (note, note, last); 6668 } 6669 } 6670 6671 if (epilogue_insn_hash != NULL) 6672 { 6673 edge_iterator ei; 6674 edge e; 6675 6676 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 6677 { 6678 rtx insn, first = NULL, note = NULL; 6679 basic_block bb = e->src; 6680 6681 /* Scan from the beginning until we reach the first epilogue insn. */ 6682 FOR_BB_INSNS (bb, insn) 6683 { 6684 if (NOTE_P (insn)) 6685 { 6686 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) 6687 { 6688 note = insn; 6689 if (first != NULL) 6690 break; 6691 } 6692 } 6693 else if (first == NULL && contains (insn, epilogue_insn_hash)) 6694 { 6695 first = insn; 6696 if (note != NULL) 6697 break; 6698 } 6699 } 6700 6701 if (note) 6702 { 6703 /* If the function has a single basic block, and no real 6704 epilogue insns (e.g. sibcall with no cleanup), the 6705 epilogue note can get scheduled before the prologue 6706 note. If we have frame related prologue insns, having 6707 them scanned during the epilogue will result in a crash. 6708 In this case re-order the epilogue note to just before 6709 the last insn in the block. */ 6710 if (first == NULL) 6711 first = BB_END (bb); 6712 6713 if (PREV_INSN (first) != note) 6714 reorder_insns (note, note, PREV_INSN (first)); 6715 } 6716 } 6717 } 6718 #endif /* HAVE_prologue or HAVE_epilogue */ 6719 } 6720 6721 /* Returns the name of the current function. */ 6722 const char * 6723 current_function_name (void) 6724 { 6725 if (cfun == NULL) 6726 return "<none>"; 6727 return lang_hooks.decl_printable_name (cfun->decl, 2); 6728 } 6729 6730 6731 static unsigned int 6732 rest_of_handle_check_leaf_regs (void) 6733 { 6734 #ifdef LEAF_REGISTERS 6735 current_function_uses_only_leaf_regs 6736 = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); 6737 #endif 6738 return 0; 6739 } 6740 6741 /* Insert a TYPE into the used types hash table of CFUN. */ 6742 6743 static void 6744 used_types_insert_helper (tree type, struct function *func) 6745 { 6746 if (type != NULL && func != NULL) 6747 { 6748 void **slot; 6749 6750 if (func->used_types_hash == NULL) 6751 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, 6752 htab_eq_pointer, NULL); 6753 slot = htab_find_slot (func->used_types_hash, type, INSERT); 6754 if (*slot == NULL) 6755 *slot = type; 6756 } 6757 } 6758 6759 /* Given a type, insert it into the used hash table in cfun. */ 6760 void 6761 used_types_insert (tree t) 6762 { 6763 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) 6764 if (TYPE_NAME (t)) 6765 break; 6766 else 6767 t = TREE_TYPE (t); 6768 if (TREE_CODE (t) == ERROR_MARK) 6769 return; 6770 if (TYPE_NAME (t) == NULL_TREE 6771 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) 6772 t = TYPE_MAIN_VARIANT (t); 6773 if (debug_info_level > DINFO_LEVEL_NONE) 6774 { 6775 if (cfun) 6776 used_types_insert_helper (t, cfun); 6777 else 6778 /* So this might be a type referenced by a global variable. 6779 Record that type so that we can later decide to emit its debug 6780 information. */ 6781 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t); 6782 } 6783 } 6784 6785 /* Helper to Hash a struct types_used_by_vars_entry. */ 6786 6787 static hashval_t 6788 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) 6789 { 6790 gcc_assert (entry && entry->var_decl && entry->type); 6791 6792 return iterative_hash_object (entry->type, 6793 iterative_hash_object (entry->var_decl, 0)); 6794 } 6795 6796 /* Hash function of the types_used_by_vars_entry hash table. */ 6797 6798 hashval_t 6799 types_used_by_vars_do_hash (const void *x) 6800 { 6801 const struct types_used_by_vars_entry *entry = 6802 (const struct types_used_by_vars_entry *) x; 6803 6804 return hash_types_used_by_vars_entry (entry); 6805 } 6806 6807 /*Equality function of the types_used_by_vars_entry hash table. */ 6808 6809 int 6810 types_used_by_vars_eq (const void *x1, const void *x2) 6811 { 6812 const struct types_used_by_vars_entry *e1 = 6813 (const struct types_used_by_vars_entry *) x1; 6814 const struct types_used_by_vars_entry *e2 = 6815 (const struct types_used_by_vars_entry *)x2; 6816 6817 return (e1->var_decl == e2->var_decl && e1->type == e2->type); 6818 } 6819 6820 /* Inserts an entry into the types_used_by_vars_hash hash table. */ 6821 6822 void 6823 types_used_by_var_decl_insert (tree type, tree var_decl) 6824 { 6825 if (type != NULL && var_decl != NULL) 6826 { 6827 void **slot; 6828 struct types_used_by_vars_entry e; 6829 e.var_decl = var_decl; 6830 e.type = type; 6831 if (types_used_by_vars_hash == NULL) 6832 types_used_by_vars_hash = 6833 htab_create_ggc (37, types_used_by_vars_do_hash, 6834 types_used_by_vars_eq, NULL); 6835 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e, 6836 hash_types_used_by_vars_entry (&e), INSERT); 6837 if (*slot == NULL) 6838 { 6839 struct types_used_by_vars_entry *entry; 6840 entry = ggc_alloc_types_used_by_vars_entry (); 6841 entry->type = type; 6842 entry->var_decl = var_decl; 6843 *slot = entry; 6844 } 6845 } 6846 } 6847 6848 struct rtl_opt_pass pass_leaf_regs = 6849 { 6850 { 6851 RTL_PASS, 6852 "*leaf_regs", /* name */ 6853 NULL, /* gate */ 6854 rest_of_handle_check_leaf_regs, /* execute */ 6855 NULL, /* sub */ 6856 NULL, /* next */ 6857 0, /* static_pass_number */ 6858 TV_NONE, /* tv_id */ 6859 0, /* properties_required */ 6860 0, /* properties_provided */ 6861 0, /* properties_destroyed */ 6862 0, /* todo_flags_start */ 6863 0 /* todo_flags_finish */ 6864 } 6865 }; 6866 6867 static unsigned int 6868 rest_of_handle_thread_prologue_and_epilogue (void) 6869 { 6870 if (optimize) 6871 cleanup_cfg (CLEANUP_EXPENSIVE); 6872 6873 /* On some machines, the prologue and epilogue code, or parts thereof, 6874 can be represented as RTL. Doing so lets us schedule insns between 6875 it and the rest of the code and also allows delayed branch 6876 scheduling to operate in the epilogue. */ 6877 thread_prologue_and_epilogue_insns (); 6878 6879 /* The stack usage info is finalized during prologue expansion. */ 6880 if (flag_stack_usage_info) 6881 output_stack_usage (); 6882 6883 return 0; 6884 } 6885 6886 struct rtl_opt_pass pass_thread_prologue_and_epilogue = 6887 { 6888 { 6889 RTL_PASS, 6890 "pro_and_epilogue", /* name */ 6891 NULL, /* gate */ 6892 rest_of_handle_thread_prologue_and_epilogue, /* execute */ 6893 NULL, /* sub */ 6894 NULL, /* next */ 6895 0, /* static_pass_number */ 6896 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */ 6897 0, /* properties_required */ 6898 0, /* properties_provided */ 6899 0, /* properties_destroyed */ 6900 TODO_verify_flow, /* todo_flags_start */ 6901 TODO_df_verify | 6902 TODO_df_finish | TODO_verify_rtl_sharing | 6903 TODO_ggc_collect /* todo_flags_finish */ 6904 } 6905 }; 6906 6907 6908 /* This mini-pass fixes fall-out from SSA in asm statements that have 6909 in-out constraints. Say you start with 6910 6911 orig = inout; 6912 asm ("": "+mr" (inout)); 6913 use (orig); 6914 6915 which is transformed very early to use explicit output and match operands: 6916 6917 orig = inout; 6918 asm ("": "=mr" (inout) : "0" (inout)); 6919 use (orig); 6920 6921 Or, after SSA and copyprop, 6922 6923 asm ("": "=mr" (inout_2) : "0" (inout_1)); 6924 use (inout_1); 6925 6926 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as 6927 they represent two separate values, so they will get different pseudo 6928 registers during expansion. Then, since the two operands need to match 6929 per the constraints, but use different pseudo registers, reload can 6930 only register a reload for these operands. But reloads can only be 6931 satisfied by hardregs, not by memory, so we need a register for this 6932 reload, just because we are presented with non-matching operands. 6933 So, even though we allow memory for this operand, no memory can be 6934 used for it, just because the two operands don't match. This can 6935 cause reload failures on register-starved targets. 6936 6937 So it's a symptom of reload not being able to use memory for reloads 6938 or, alternatively it's also a symptom of both operands not coming into 6939 reload as matching (in which case the pseudo could go to memory just 6940 fine, as the alternative allows it, and no reload would be necessary). 6941 We fix the latter problem here, by transforming 6942 6943 asm ("": "=mr" (inout_2) : "0" (inout_1)); 6944 6945 back to 6946 6947 inout_2 = inout_1; 6948 asm ("": "=mr" (inout_2) : "0" (inout_2)); */ 6949 6950 static void 6951 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) 6952 { 6953 int i; 6954 bool changed = false; 6955 rtx op = SET_SRC (p_sets[0]); 6956 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op); 6957 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op); 6958 bool *output_matched = XALLOCAVEC (bool, noutputs); 6959 6960 memset (output_matched, 0, noutputs * sizeof (bool)); 6961 for (i = 0; i < ninputs; i++) 6962 { 6963 rtx input, output, insns; 6964 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); 6965 char *end; 6966 int match, j; 6967 6968 if (*constraint == '%') 6969 constraint++; 6970 6971 match = strtoul (constraint, &end, 10); 6972 if (end == constraint) 6973 continue; 6974 6975 gcc_assert (match < noutputs); 6976 output = SET_DEST (p_sets[match]); 6977 input = RTVEC_ELT (inputs, i); 6978 /* Only do the transformation for pseudos. */ 6979 if (! REG_P (output) 6980 || rtx_equal_p (output, input) 6981 || (GET_MODE (input) != VOIDmode 6982 && GET_MODE (input) != GET_MODE (output))) 6983 continue; 6984 6985 /* We can't do anything if the output is also used as input, 6986 as we're going to overwrite it. */ 6987 for (j = 0; j < ninputs; j++) 6988 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j))) 6989 break; 6990 if (j != ninputs) 6991 continue; 6992 6993 /* Avoid changing the same input several times. For 6994 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in)); 6995 only change in once (to out1), rather than changing it 6996 first to out1 and afterwards to out2. */ 6997 if (i > 0) 6998 { 6999 for (j = 0; j < noutputs; j++) 7000 if (output_matched[j] && input == SET_DEST (p_sets[j])) 7001 break; 7002 if (j != noutputs) 7003 continue; 7004 } 7005 output_matched[match] = true; 7006 7007 start_sequence (); 7008 emit_move_insn (output, input); 7009 insns = get_insns (); 7010 end_sequence (); 7011 emit_insn_before (insns, insn); 7012 7013 /* Now replace all mentions of the input with output. We can't 7014 just replace the occurrence in inputs[i], as the register might 7015 also be used in some other input (or even in an address of an 7016 output), which would mean possibly increasing the number of 7017 inputs by one (namely 'output' in addition), which might pose 7018 a too complicated problem for reload to solve. E.g. this situation: 7019 7020 asm ("" : "=r" (output), "=m" (input) : "0" (input)) 7021 7022 Here 'input' is used in two occurrences as input (once for the 7023 input operand, once for the address in the second output operand). 7024 If we would replace only the occurrence of the input operand (to 7025 make the matching) we would be left with this: 7026 7027 output = input 7028 asm ("" : "=r" (output), "=m" (input) : "0" (output)) 7029 7030 Now we suddenly have two different input values (containing the same 7031 value, but different pseudos) where we formerly had only one. 7032 With more complicated asms this might lead to reload failures 7033 which wouldn't have happen without this pass. So, iterate over 7034 all operands and replace all occurrences of the register used. */ 7035 for (j = 0; j < noutputs; j++) 7036 if (!rtx_equal_p (SET_DEST (p_sets[j]), input) 7037 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j]))) 7038 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]), 7039 input, output); 7040 for (j = 0; j < ninputs; j++) 7041 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j))) 7042 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j), 7043 input, output); 7044 7045 changed = true; 7046 } 7047 7048 if (changed) 7049 df_insn_rescan (insn); 7050 } 7051 7052 static unsigned 7053 rest_of_match_asm_constraints (void) 7054 { 7055 basic_block bb; 7056 rtx insn, pat, *p_sets; 7057 int noutputs; 7058 7059 if (!crtl->has_asm_statement) 7060 return 0; 7061 7062 df_set_flags (DF_DEFER_INSN_RESCAN); 7063 FOR_EACH_BB (bb) 7064 { 7065 FOR_BB_INSNS (bb, insn) 7066 { 7067 if (!INSN_P (insn)) 7068 continue; 7069 7070 pat = PATTERN (insn); 7071 if (GET_CODE (pat) == PARALLEL) 7072 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0); 7073 else if (GET_CODE (pat) == SET) 7074 p_sets = &PATTERN (insn), noutputs = 1; 7075 else 7076 continue; 7077 7078 if (GET_CODE (*p_sets) == SET 7079 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS) 7080 match_asm_constraints_1 (insn, p_sets, noutputs); 7081 } 7082 } 7083 7084 return TODO_df_finish; 7085 } 7086 7087 struct rtl_opt_pass pass_match_asm_constraints = 7088 { 7089 { 7090 RTL_PASS, 7091 "asmcons", /* name */ 7092 NULL, /* gate */ 7093 rest_of_match_asm_constraints, /* execute */ 7094 NULL, /* sub */ 7095 NULL, /* next */ 7096 0, /* static_pass_number */ 7097 TV_NONE, /* tv_id */ 7098 0, /* properties_required */ 7099 0, /* properties_provided */ 7100 0, /* properties_destroyed */ 7101 0, /* todo_flags_start */ 7102 0 /* todo_flags_finish */ 7103 } 7104 }; 7105 7106 7107 #include "gt-function.h" 7108