1 /* Control flow graph manipulation code for GNU compiler. 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 2, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING. If not, write to the Free 19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 20 02110-1301, USA. */ 21 22 /* This file contains low level functions to manipulate the CFG and analyze it 23 that are aware of the RTL intermediate language. 24 25 Available functionality: 26 - Basic CFG/RTL manipulation API documented in cfghooks.h 27 - CFG-aware instruction chain manipulation 28 delete_insn, delete_insn_chain 29 - Edge splitting and committing to edges 30 insert_insn_on_edge, commit_edge_insertions 31 - CFG updating after insn simplification 32 purge_dead_edges, purge_all_dead_edges 33 34 Functions not supposed for generic use: 35 - Infrastructure to determine quickly basic block for insn 36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn, 37 - Edge redirection with updating and optimizing of insn chain 38 block_label, tidy_fallthru_edge, force_nonfallthru */ 39 40 #include "config.h" 41 #include "system.h" 42 #include "coretypes.h" 43 #include "tm.h" 44 #include "tree.h" 45 #include "rtl.h" 46 #include "hard-reg-set.h" 47 #include "basic-block.h" 48 #include "regs.h" 49 #include "flags.h" 50 #include "output.h" 51 #include "function.h" 52 #include "except.h" 53 #include "toplev.h" 54 #include "tm_p.h" 55 #include "obstack.h" 56 #include "insn-config.h" 57 #include "cfglayout.h" 58 #include "expr.h" 59 #include "target.h" 60 #include "cfgloop.h" 61 #include "ggc.h" 62 #include "tree-pass.h" 63 64 static int can_delete_note_p (rtx); 65 static int can_delete_label_p (rtx); 66 static void commit_one_edge_insertion (edge, int); 67 static basic_block rtl_split_edge (edge); 68 static bool rtl_move_block_after (basic_block, basic_block); 69 static int rtl_verify_flow_info (void); 70 static basic_block cfg_layout_split_block (basic_block, void *); 71 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block); 72 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block); 73 static void cfg_layout_delete_block (basic_block); 74 static void rtl_delete_block (basic_block); 75 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block); 76 static edge rtl_redirect_edge_and_branch (edge, basic_block); 77 static basic_block rtl_split_block (basic_block, void *); 78 static void rtl_dump_bb (basic_block, FILE *, int); 79 static int rtl_verify_flow_info_1 (void); 80 static void rtl_make_forwarder_block (edge); 81 82 /* Return true if NOTE is not one of the ones that must be kept paired, 83 so that we may simply delete it. */ 84 85 static int 86 can_delete_note_p (rtx note) 87 { 88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED 89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK); 90 } 91 92 /* True if a given label can be deleted. */ 93 94 static int 95 can_delete_label_p (rtx label) 96 { 97 return (!LABEL_PRESERVE_P (label) 98 /* User declared labels must be preserved. */ 99 && LABEL_NAME (label) == 0 100 && !in_expr_list_p (forced_labels, label)); 101 } 102 103 /* Delete INSN by patching it out. Return the next insn. */ 104 105 rtx 106 delete_insn (rtx insn) 107 { 108 rtx next = NEXT_INSN (insn); 109 rtx note; 110 bool really_delete = true; 111 112 if (LABEL_P (insn)) 113 { 114 /* Some labels can't be directly removed from the INSN chain, as they 115 might be references via variables, constant pool etc. 116 Convert them to the special NOTE_INSN_DELETED_LABEL note. */ 117 if (! can_delete_label_p (insn)) 118 { 119 const char *name = LABEL_NAME (insn); 120 121 really_delete = false; 122 PUT_CODE (insn, NOTE); 123 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL; 124 NOTE_DELETED_LABEL_NAME (insn) = name; 125 } 126 127 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels); 128 } 129 130 if (really_delete) 131 { 132 /* If this insn has already been deleted, something is very wrong. */ 133 gcc_assert (!INSN_DELETED_P (insn)); 134 remove_insn (insn); 135 INSN_DELETED_P (insn) = 1; 136 } 137 138 /* If deleting a jump, decrement the use count of the label. Deleting 139 the label itself should happen in the normal course of block merging. */ 140 if (JUMP_P (insn) 141 && JUMP_LABEL (insn) 142 && LABEL_P (JUMP_LABEL (insn))) 143 LABEL_NUSES (JUMP_LABEL (insn))--; 144 145 /* Also if deleting an insn that references a label. */ 146 else 147 { 148 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX 149 && LABEL_P (XEXP (note, 0))) 150 { 151 LABEL_NUSES (XEXP (note, 0))--; 152 remove_note (insn, note); 153 } 154 } 155 156 if (JUMP_P (insn) 157 && (GET_CODE (PATTERN (insn)) == ADDR_VEC 158 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)) 159 { 160 rtx pat = PATTERN (insn); 161 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC; 162 int len = XVECLEN (pat, diff_vec_p); 163 int i; 164 165 for (i = 0; i < len; i++) 166 { 167 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0); 168 169 /* When deleting code in bulk (e.g. removing many unreachable 170 blocks) we can delete a label that's a target of the vector 171 before deleting the vector itself. */ 172 if (!NOTE_P (label)) 173 LABEL_NUSES (label)--; 174 } 175 } 176 177 return next; 178 } 179 180 /* Like delete_insn but also purge dead edges from BB. */ 181 rtx 182 delete_insn_and_edges (rtx insn) 183 { 184 rtx x; 185 bool purge = false; 186 187 if (INSN_P (insn) 188 && BLOCK_FOR_INSN (insn) 189 && BB_END (BLOCK_FOR_INSN (insn)) == insn) 190 purge = true; 191 x = delete_insn (insn); 192 if (purge) 193 purge_dead_edges (BLOCK_FOR_INSN (insn)); 194 return x; 195 } 196 197 /* Unlink a chain of insns between START and FINISH, leaving notes 198 that must be paired. */ 199 200 void 201 delete_insn_chain (rtx start, rtx finish) 202 { 203 rtx next; 204 205 /* Unchain the insns one by one. It would be quicker to delete all of these 206 with a single unchaining, rather than one at a time, but we need to keep 207 the NOTE's. */ 208 while (1) 209 { 210 next = NEXT_INSN (start); 211 if (NOTE_P (start) && !can_delete_note_p (start)) 212 ; 213 else 214 next = delete_insn (start); 215 216 if (start == finish) 217 break; 218 start = next; 219 } 220 } 221 222 /* Like delete_insn but also purge dead edges from BB. */ 223 void 224 delete_insn_chain_and_edges (rtx first, rtx last) 225 { 226 bool purge = false; 227 228 if (INSN_P (last) 229 && BLOCK_FOR_INSN (last) 230 && BB_END (BLOCK_FOR_INSN (last)) == last) 231 purge = true; 232 delete_insn_chain (first, last); 233 if (purge) 234 purge_dead_edges (BLOCK_FOR_INSN (last)); 235 } 236 237 /* Create a new basic block consisting of the instructions between HEAD and END 238 inclusive. This function is designed to allow fast BB construction - reuses 239 the note and basic block struct in BB_NOTE, if any and do not grow 240 BASIC_BLOCK chain and should be used directly only by CFG construction code. 241 END can be NULL in to create new empty basic block before HEAD. Both END 242 and HEAD can be NULL to create basic block at the end of INSN chain. 243 AFTER is the basic block we should be put after. */ 244 245 basic_block 246 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after) 247 { 248 basic_block bb; 249 250 if (bb_note 251 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL 252 && bb->aux == NULL) 253 { 254 /* If we found an existing note, thread it back onto the chain. */ 255 256 rtx after; 257 258 if (LABEL_P (head)) 259 after = head; 260 else 261 { 262 after = PREV_INSN (head); 263 head = bb_note; 264 } 265 266 if (after != bb_note && NEXT_INSN (after) != bb_note) 267 reorder_insns_nobb (bb_note, bb_note, after); 268 } 269 else 270 { 271 /* Otherwise we must create a note and a basic block structure. */ 272 273 bb = alloc_block (); 274 275 init_rtl_bb_info (bb); 276 if (!head && !end) 277 head = end = bb_note 278 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ()); 279 else if (LABEL_P (head) && end) 280 { 281 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head); 282 if (head == end) 283 end = bb_note; 284 } 285 else 286 { 287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head); 288 head = bb_note; 289 if (!end) 290 end = head; 291 } 292 293 NOTE_BASIC_BLOCK (bb_note) = bb; 294 } 295 296 /* Always include the bb note in the block. */ 297 if (NEXT_INSN (end) == bb_note) 298 end = bb_note; 299 300 BB_HEAD (bb) = head; 301 BB_END (bb) = end; 302 bb->index = last_basic_block++; 303 bb->flags = BB_NEW | BB_RTL; 304 link_block (bb, after); 305 SET_BASIC_BLOCK (bb->index, bb); 306 update_bb_for_insn (bb); 307 BB_SET_PARTITION (bb, BB_UNPARTITIONED); 308 309 /* Tag the block so that we know it has been used when considering 310 other basic block notes. */ 311 bb->aux = bb; 312 313 return bb; 314 } 315 316 /* Create new basic block consisting of instructions in between HEAD and END 317 and place it to the BB chain after block AFTER. END can be NULL in to 318 create new empty basic block before HEAD. Both END and HEAD can be NULL to 319 create basic block at the end of INSN chain. */ 320 321 static basic_block 322 rtl_create_basic_block (void *headp, void *endp, basic_block after) 323 { 324 rtx head = headp, end = endp; 325 basic_block bb; 326 327 /* Grow the basic block array if needed. */ 328 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info)) 329 { 330 size_t old_size = VEC_length (basic_block, basic_block_info); 331 size_t new_size = last_basic_block + (last_basic_block + 3) / 4; 332 basic_block *p; 333 VEC_safe_grow (basic_block, gc, basic_block_info, new_size); 334 p = VEC_address (basic_block, basic_block_info); 335 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size)); 336 } 337 338 n_basic_blocks++; 339 340 bb = create_basic_block_structure (head, end, NULL, after); 341 bb->aux = NULL; 342 return bb; 343 } 344 345 static basic_block 346 cfg_layout_create_basic_block (void *head, void *end, basic_block after) 347 { 348 basic_block newbb = rtl_create_basic_block (head, end, after); 349 350 return newbb; 351 } 352 353 /* Delete the insns in a (non-live) block. We physically delete every 354 non-deleted-note insn, and update the flow graph appropriately. 355 356 Return nonzero if we deleted an exception handler. */ 357 358 /* ??? Preserving all such notes strikes me as wrong. It would be nice 359 to post-process the stream to remove empty blocks, loops, ranges, etc. */ 360 361 static void 362 rtl_delete_block (basic_block b) 363 { 364 rtx insn, end; 365 366 /* If the head of this block is a CODE_LABEL, then it might be the 367 label for an exception handler which can't be reached. We need 368 to remove the label from the exception_handler_label list. */ 369 insn = BB_HEAD (b); 370 if (LABEL_P (insn)) 371 maybe_remove_eh_handler (insn); 372 373 end = get_last_bb_insn (b); 374 375 /* Selectively delete the entire chain. */ 376 BB_HEAD (b) = NULL; 377 delete_insn_chain (insn, end); 378 if (b->il.rtl->global_live_at_start) 379 { 380 FREE_REG_SET (b->il.rtl->global_live_at_start); 381 FREE_REG_SET (b->il.rtl->global_live_at_end); 382 b->il.rtl->global_live_at_start = NULL; 383 b->il.rtl->global_live_at_end = NULL; 384 } 385 } 386 387 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */ 388 389 void 390 compute_bb_for_insn (void) 391 { 392 basic_block bb; 393 394 FOR_EACH_BB (bb) 395 { 396 rtx end = BB_END (bb); 397 rtx insn; 398 399 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn)) 400 { 401 BLOCK_FOR_INSN (insn) = bb; 402 if (insn == end) 403 break; 404 } 405 } 406 } 407 408 /* Release the basic_block_for_insn array. */ 409 410 unsigned int 411 free_bb_for_insn (void) 412 { 413 rtx insn; 414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 415 if (!BARRIER_P (insn)) 416 BLOCK_FOR_INSN (insn) = NULL; 417 return 0; 418 } 419 420 struct tree_opt_pass pass_free_cfg = 421 { 422 NULL, /* name */ 423 NULL, /* gate */ 424 free_bb_for_insn, /* execute */ 425 NULL, /* sub */ 426 NULL, /* next */ 427 0, /* static_pass_number */ 428 0, /* tv_id */ 429 0, /* properties_required */ 430 0, /* properties_provided */ 431 PROP_cfg, /* properties_destroyed */ 432 0, /* todo_flags_start */ 433 0, /* todo_flags_finish */ 434 0 /* letter */ 435 }; 436 437 /* Return RTX to emit after when we want to emit code on the entry of function. */ 438 rtx 439 entry_of_function (void) 440 { 441 return (n_basic_blocks > NUM_FIXED_BLOCKS ? 442 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ()); 443 } 444 445 /* Emit INSN at the entry point of the function, ensuring that it is only 446 executed once per function. */ 447 void 448 emit_insn_at_entry (rtx insn) 449 { 450 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs); 451 edge e = ei_safe_edge (ei); 452 gcc_assert (e->flags & EDGE_FALLTHRU); 453 454 insert_insn_on_edge (insn, e); 455 commit_edge_insertions (); 456 } 457 458 /* Update insns block within BB. */ 459 460 void 461 update_bb_for_insn (basic_block bb) 462 { 463 rtx insn; 464 465 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn)) 466 { 467 if (!BARRIER_P (insn)) 468 set_block_for_insn (insn, bb); 469 if (insn == BB_END (bb)) 470 break; 471 } 472 } 473 474 /* Creates a new basic block just after basic block B by splitting 475 everything after specified instruction I. */ 476 477 static basic_block 478 rtl_split_block (basic_block bb, void *insnp) 479 { 480 basic_block new_bb; 481 rtx insn = insnp; 482 edge e; 483 edge_iterator ei; 484 485 if (!insn) 486 { 487 insn = first_insn_after_basic_block_note (bb); 488 489 if (insn) 490 insn = PREV_INSN (insn); 491 else 492 insn = get_last_insn (); 493 } 494 495 /* We probably should check type of the insn so that we do not create 496 inconsistent cfg. It is checked in verify_flow_info anyway, so do not 497 bother. */ 498 if (insn == BB_END (bb)) 499 emit_note_after (NOTE_INSN_DELETED, insn); 500 501 /* Create the new basic block. */ 502 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb); 503 BB_COPY_PARTITION (new_bb, bb); 504 BB_END (bb) = insn; 505 506 /* Redirect the outgoing edges. */ 507 new_bb->succs = bb->succs; 508 bb->succs = NULL; 509 FOR_EACH_EDGE (e, ei, new_bb->succs) 510 e->src = new_bb; 511 512 if (bb->il.rtl->global_live_at_start) 513 { 514 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack); 515 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack); 516 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end); 517 518 /* We now have to calculate which registers are live at the end 519 of the split basic block and at the start of the new basic 520 block. Start with those registers that are known to be live 521 at the end of the original basic block and get 522 propagate_block to determine which registers are live. */ 523 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end); 524 propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0); 525 COPY_REG_SET (bb->il.rtl->global_live_at_end, 526 new_bb->il.rtl->global_live_at_start); 527 #ifdef HAVE_conditional_execution 528 /* In the presence of conditional execution we are not able to update 529 liveness precisely. */ 530 if (reload_completed) 531 { 532 bb->flags |= BB_DIRTY; 533 new_bb->flags |= BB_DIRTY; 534 } 535 #endif 536 } 537 538 return new_bb; 539 } 540 541 /* Blocks A and B are to be merged into a single block A. The insns 542 are already contiguous. */ 543 544 static void 545 rtl_merge_blocks (basic_block a, basic_block b) 546 { 547 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a); 548 rtx del_first = NULL_RTX, del_last = NULL_RTX; 549 int b_empty = 0; 550 551 /* If there was a CODE_LABEL beginning B, delete it. */ 552 if (LABEL_P (b_head)) 553 { 554 /* This might have been an EH label that no longer has incoming 555 EH edges. Update data structures to match. */ 556 maybe_remove_eh_handler (b_head); 557 558 /* Detect basic blocks with nothing but a label. This can happen 559 in particular at the end of a function. */ 560 if (b_head == b_end) 561 b_empty = 1; 562 563 del_first = del_last = b_head; 564 b_head = NEXT_INSN (b_head); 565 } 566 567 /* Delete the basic block note and handle blocks containing just that 568 note. */ 569 if (NOTE_INSN_BASIC_BLOCK_P (b_head)) 570 { 571 if (b_head == b_end) 572 b_empty = 1; 573 if (! del_last) 574 del_first = b_head; 575 576 del_last = b_head; 577 b_head = NEXT_INSN (b_head); 578 } 579 580 /* If there was a jump out of A, delete it. */ 581 if (JUMP_P (a_end)) 582 { 583 rtx prev; 584 585 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev)) 586 if (!NOTE_P (prev) 587 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK 588 || prev == BB_HEAD (a)) 589 break; 590 591 del_first = a_end; 592 593 #ifdef HAVE_cc0 594 /* If this was a conditional jump, we need to also delete 595 the insn that set cc0. */ 596 if (only_sets_cc0_p (prev)) 597 { 598 rtx tmp = prev; 599 600 prev = prev_nonnote_insn (prev); 601 if (!prev) 602 prev = BB_HEAD (a); 603 del_first = tmp; 604 } 605 #endif 606 607 a_end = PREV_INSN (del_first); 608 } 609 else if (BARRIER_P (NEXT_INSN (a_end))) 610 del_first = NEXT_INSN (a_end); 611 612 /* Delete everything marked above as well as crap that might be 613 hanging out between the two blocks. */ 614 BB_HEAD (b) = NULL; 615 delete_insn_chain (del_first, del_last); 616 617 /* Reassociate the insns of B with A. */ 618 if (!b_empty) 619 { 620 rtx x; 621 622 for (x = a_end; x != b_end; x = NEXT_INSN (x)) 623 set_block_for_insn (x, a); 624 625 set_block_for_insn (b_end, a); 626 627 a_end = b_end; 628 } 629 630 BB_END (a) = a_end; 631 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end; 632 } 633 634 /* Return true when block A and B can be merged. */ 635 static bool 636 rtl_can_merge_blocks (basic_block a,basic_block b) 637 { 638 /* If we are partitioning hot/cold basic blocks, we don't want to 639 mess up unconditional or indirect jumps that cross between hot 640 and cold sections. 641 642 Basic block partitioning may result in some jumps that appear to 643 be optimizable (or blocks that appear to be mergeable), but which really 644 must be left untouched (they are required to make it safely across 645 partition boundaries). See the comments at the top of 646 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */ 647 648 if (BB_PARTITION (a) != BB_PARTITION (b)) 649 return false; 650 651 /* There must be exactly one edge in between the blocks. */ 652 return (single_succ_p (a) 653 && single_succ (a) == b 654 && single_pred_p (b) 655 && a != b 656 /* Must be simple edge. */ 657 && !(single_succ_edge (a)->flags & EDGE_COMPLEX) 658 && a->next_bb == b 659 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR 660 /* If the jump insn has side effects, 661 we can't kill the edge. */ 662 && (!JUMP_P (BB_END (a)) 663 || (reload_completed 664 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a))))); 665 } 666 667 /* Return the label in the head of basic block BLOCK. Create one if it doesn't 668 exist. */ 669 670 rtx 671 block_label (basic_block block) 672 { 673 if (block == EXIT_BLOCK_PTR) 674 return NULL_RTX; 675 676 if (!LABEL_P (BB_HEAD (block))) 677 { 678 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block)); 679 } 680 681 return BB_HEAD (block); 682 } 683 684 /* Attempt to perform edge redirection by replacing possibly complex jump 685 instruction by unconditional jump or removing jump completely. This can 686 apply only if all edges now point to the same block. The parameters and 687 return values are equivalent to redirect_edge_and_branch. */ 688 689 edge 690 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout) 691 { 692 basic_block src = e->src; 693 rtx insn = BB_END (src), kill_from; 694 rtx set; 695 int fallthru = 0; 696 697 /* If we are partitioning hot/cold basic blocks, we don't want to 698 mess up unconditional or indirect jumps that cross between hot 699 and cold sections. 700 701 Basic block partitioning may result in some jumps that appear to 702 be optimizable (or blocks that appear to be mergeable), but which really 703 must be left untouched (they are required to make it safely across 704 partition boundaries). See the comments at the top of 705 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */ 706 707 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX) 708 || BB_PARTITION (src) != BB_PARTITION (target)) 709 return NULL; 710 711 /* We can replace or remove a complex jump only when we have exactly 712 two edges. Also, if we have exactly one outgoing edge, we can 713 redirect that. */ 714 if (EDGE_COUNT (src->succs) >= 3 715 /* Verify that all targets will be TARGET. Specifically, the 716 edge that is not E must also go to TARGET. */ 717 || (EDGE_COUNT (src->succs) == 2 718 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)) 719 return NULL; 720 721 if (!onlyjump_p (insn)) 722 return NULL; 723 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL)) 724 return NULL; 725 726 /* Avoid removing branch with side effects. */ 727 set = single_set (insn); 728 if (!set || side_effects_p (set)) 729 return NULL; 730 731 /* In case we zap a conditional jump, we'll need to kill 732 the cc0 setter too. */ 733 kill_from = insn; 734 #ifdef HAVE_cc0 735 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))) 736 kill_from = PREV_INSN (insn); 737 #endif 738 739 /* See if we can create the fallthru edge. */ 740 if (in_cfglayout || can_fallthru (src, target)) 741 { 742 if (dump_file) 743 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn)); 744 fallthru = 1; 745 746 /* Selectively unlink whole insn chain. */ 747 if (in_cfglayout) 748 { 749 rtx insn = src->il.rtl->footer; 750 751 delete_insn_chain (kill_from, BB_END (src)); 752 753 /* Remove barriers but keep jumptables. */ 754 while (insn) 755 { 756 if (BARRIER_P (insn)) 757 { 758 if (PREV_INSN (insn)) 759 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn); 760 else 761 src->il.rtl->footer = NEXT_INSN (insn); 762 if (NEXT_INSN (insn)) 763 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn); 764 } 765 if (LABEL_P (insn)) 766 break; 767 insn = NEXT_INSN (insn); 768 } 769 } 770 else 771 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target))); 772 } 773 774 /* If this already is simplejump, redirect it. */ 775 else if (simplejump_p (insn)) 776 { 777 if (e->dest == target) 778 return NULL; 779 if (dump_file) 780 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n", 781 INSN_UID (insn), e->dest->index, target->index); 782 if (!redirect_jump (insn, block_label (target), 0)) 783 { 784 gcc_assert (target == EXIT_BLOCK_PTR); 785 return NULL; 786 } 787 } 788 789 /* Cannot do anything for target exit block. */ 790 else if (target == EXIT_BLOCK_PTR) 791 return NULL; 792 793 /* Or replace possibly complicated jump insn by simple jump insn. */ 794 else 795 { 796 rtx target_label = block_label (target); 797 rtx barrier, label, table; 798 799 emit_jump_insn_after_noloc (gen_jump (target_label), insn); 800 JUMP_LABEL (BB_END (src)) = target_label; 801 LABEL_NUSES (target_label)++; 802 if (dump_file) 803 fprintf (dump_file, "Replacing insn %i by jump %i\n", 804 INSN_UID (insn), INSN_UID (BB_END (src))); 805 806 807 delete_insn_chain (kill_from, insn); 808 809 /* Recognize a tablejump that we are converting to a 810 simple jump and remove its associated CODE_LABEL 811 and ADDR_VEC or ADDR_DIFF_VEC. */ 812 if (tablejump_p (insn, &label, &table)) 813 delete_insn_chain (label, table); 814 815 barrier = next_nonnote_insn (BB_END (src)); 816 if (!barrier || !BARRIER_P (barrier)) 817 emit_barrier_after (BB_END (src)); 818 else 819 { 820 if (barrier != NEXT_INSN (BB_END (src))) 821 { 822 /* Move the jump before barrier so that the notes 823 which originally were or were created before jump table are 824 inside the basic block. */ 825 rtx new_insn = BB_END (src); 826 rtx tmp; 827 828 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier; 829 tmp = NEXT_INSN (tmp)) 830 set_block_for_insn (tmp, src); 831 832 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn); 833 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn); 834 835 NEXT_INSN (new_insn) = barrier; 836 NEXT_INSN (PREV_INSN (barrier)) = new_insn; 837 838 PREV_INSN (new_insn) = PREV_INSN (barrier); 839 PREV_INSN (barrier) = new_insn; 840 } 841 } 842 } 843 844 /* Keep only one edge out and set proper flags. */ 845 if (!single_succ_p (src)) 846 remove_edge (e); 847 gcc_assert (single_succ_p (src)); 848 849 e = single_succ_edge (src); 850 if (fallthru) 851 e->flags = EDGE_FALLTHRU; 852 else 853 e->flags = 0; 854 855 e->probability = REG_BR_PROB_BASE; 856 e->count = src->count; 857 858 /* We don't want a block to end on a line-number note since that has 859 the potential of changing the code between -g and not -g. */ 860 while (NOTE_P (BB_END (e->src)) 861 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0) 862 delete_insn (BB_END (e->src)); 863 864 if (e->dest != target) 865 redirect_edge_succ (e, target); 866 867 return e; 868 } 869 870 /* Redirect edge representing branch of (un)conditional jump or tablejump, 871 NULL on failure */ 872 static edge 873 redirect_branch_edge (edge e, basic_block target) 874 { 875 rtx tmp; 876 rtx old_label = BB_HEAD (e->dest); 877 basic_block src = e->src; 878 rtx insn = BB_END (src); 879 880 /* We can only redirect non-fallthru edges of jump insn. */ 881 if (e->flags & EDGE_FALLTHRU) 882 return NULL; 883 else if (!JUMP_P (insn)) 884 return NULL; 885 886 /* Recognize a tablejump and adjust all matching cases. */ 887 if (tablejump_p (insn, NULL, &tmp)) 888 { 889 rtvec vec; 890 int j; 891 rtx new_label = block_label (target); 892 893 if (target == EXIT_BLOCK_PTR) 894 return NULL; 895 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) 896 vec = XVEC (PATTERN (tmp), 0); 897 else 898 vec = XVEC (PATTERN (tmp), 1); 899 900 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 901 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label) 902 { 903 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label); 904 --LABEL_NUSES (old_label); 905 ++LABEL_NUSES (new_label); 906 } 907 908 /* Handle casesi dispatch insns. */ 909 if ((tmp = single_set (insn)) != NULL 910 && SET_DEST (tmp) == pc_rtx 911 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 912 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF 913 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label) 914 { 915 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode, 916 new_label); 917 --LABEL_NUSES (old_label); 918 ++LABEL_NUSES (new_label); 919 } 920 } 921 else 922 { 923 /* ?? We may play the games with moving the named labels from 924 one basic block to the other in case only one computed_jump is 925 available. */ 926 if (computed_jump_p (insn) 927 /* A return instruction can't be redirected. */ 928 || returnjump_p (insn)) 929 return NULL; 930 931 /* If the insn doesn't go where we think, we're confused. */ 932 gcc_assert (JUMP_LABEL (insn) == old_label); 933 934 /* If the substitution doesn't succeed, die. This can happen 935 if the back end emitted unrecognizable instructions or if 936 target is exit block on some arches. */ 937 if (!redirect_jump (insn, block_label (target), 0)) 938 { 939 gcc_assert (target == EXIT_BLOCK_PTR); 940 return NULL; 941 } 942 } 943 944 if (dump_file) 945 fprintf (dump_file, "Edge %i->%i redirected to %i\n", 946 e->src->index, e->dest->index, target->index); 947 948 if (e->dest != target) 949 e = redirect_edge_succ_nodup (e, target); 950 return e; 951 } 952 953 /* Attempt to change code to redirect edge E to TARGET. Don't do that on 954 expense of adding new instructions or reordering basic blocks. 955 956 Function can be also called with edge destination equivalent to the TARGET. 957 Then it should try the simplifications and do nothing if none is possible. 958 959 Return edge representing the branch if transformation succeeded. Return NULL 960 on failure. 961 We still return NULL in case E already destinated TARGET and we didn't 962 managed to simplify instruction stream. */ 963 964 static edge 965 rtl_redirect_edge_and_branch (edge e, basic_block target) 966 { 967 edge ret; 968 basic_block src = e->src; 969 970 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)) 971 return NULL; 972 973 if (e->dest == target) 974 return e; 975 976 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL) 977 { 978 src->flags |= BB_DIRTY; 979 return ret; 980 } 981 982 ret = redirect_branch_edge (e, target); 983 if (!ret) 984 return NULL; 985 986 src->flags |= BB_DIRTY; 987 return ret; 988 } 989 990 /* Like force_nonfallthru below, but additionally performs redirection 991 Used by redirect_edge_and_branch_force. */ 992 993 static basic_block 994 force_nonfallthru_and_redirect (edge e, basic_block target) 995 { 996 basic_block jump_block, new_bb = NULL, src = e->src; 997 rtx note; 998 edge new_edge; 999 int abnormal_edge_flags = 0; 1000 1001 /* In the case the last instruction is conditional jump to the next 1002 instruction, first redirect the jump itself and then continue 1003 by creating a basic block afterwards to redirect fallthru edge. */ 1004 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR 1005 && any_condjump_p (BB_END (e->src)) 1006 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest)) 1007 { 1008 rtx note; 1009 edge b = unchecked_make_edge (e->src, target, 0); 1010 bool redirected; 1011 1012 redirected = redirect_jump (BB_END (e->src), block_label (target), 0); 1013 gcc_assert (redirected); 1014 1015 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX); 1016 if (note) 1017 { 1018 int prob = INTVAL (XEXP (note, 0)); 1019 1020 b->probability = prob; 1021 b->count = e->count * prob / REG_BR_PROB_BASE; 1022 e->probability -= e->probability; 1023 e->count -= b->count; 1024 if (e->probability < 0) 1025 e->probability = 0; 1026 if (e->count < 0) 1027 e->count = 0; 1028 } 1029 } 1030 1031 if (e->flags & EDGE_ABNORMAL) 1032 { 1033 /* Irritating special case - fallthru edge to the same block as abnormal 1034 edge. 1035 We can't redirect abnormal edge, but we still can split the fallthru 1036 one and create separate abnormal edge to original destination. 1037 This allows bb-reorder to make such edge non-fallthru. */ 1038 gcc_assert (e->dest == target); 1039 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU); 1040 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU; 1041 } 1042 else 1043 { 1044 gcc_assert (e->flags & EDGE_FALLTHRU); 1045 if (e->src == ENTRY_BLOCK_PTR) 1046 { 1047 /* We can't redirect the entry block. Create an empty block 1048 at the start of the function which we use to add the new 1049 jump. */ 1050 edge tmp; 1051 edge_iterator ei; 1052 bool found = false; 1053 1054 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR); 1055 1056 /* Change the existing edge's source to be the new block, and add 1057 a new edge from the entry block to the new block. */ 1058 e->src = bb; 1059 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); ) 1060 { 1061 if (tmp == e) 1062 { 1063 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index); 1064 found = true; 1065 break; 1066 } 1067 else 1068 ei_next (&ei); 1069 } 1070 1071 gcc_assert (found); 1072 1073 VEC_safe_push (edge, gc, bb->succs, e); 1074 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU); 1075 } 1076 } 1077 1078 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags) 1079 { 1080 /* Create the new structures. */ 1081 1082 /* If the old block ended with a tablejump, skip its table 1083 by searching forward from there. Otherwise start searching 1084 forward from the last instruction of the old block. */ 1085 if (!tablejump_p (BB_END (e->src), NULL, ¬e)) 1086 note = BB_END (e->src); 1087 note = NEXT_INSN (note); 1088 1089 jump_block = create_basic_block (note, NULL, e->src); 1090 jump_block->count = e->count; 1091 jump_block->frequency = EDGE_FREQUENCY (e); 1092 jump_block->loop_depth = target->loop_depth; 1093 1094 if (target->il.rtl->global_live_at_start) 1095 { 1096 jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack); 1097 jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack); 1098 COPY_REG_SET (jump_block->il.rtl->global_live_at_start, 1099 target->il.rtl->global_live_at_start); 1100 COPY_REG_SET (jump_block->il.rtl->global_live_at_end, 1101 target->il.rtl->global_live_at_start); 1102 } 1103 1104 /* Make sure new block ends up in correct hot/cold section. */ 1105 1106 BB_COPY_PARTITION (jump_block, e->src); 1107 if (flag_reorder_blocks_and_partition 1108 && targetm.have_named_sections 1109 && JUMP_P (BB_END (jump_block)) 1110 && !any_condjump_p (BB_END (jump_block)) 1111 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING)) 1112 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP, 1113 NULL_RTX, 1114 REG_NOTES 1115 (BB_END 1116 (jump_block))); 1117 1118 /* Wire edge in. */ 1119 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU); 1120 new_edge->probability = e->probability; 1121 new_edge->count = e->count; 1122 1123 /* Redirect old edge. */ 1124 redirect_edge_pred (e, jump_block); 1125 e->probability = REG_BR_PROB_BASE; 1126 1127 new_bb = jump_block; 1128 } 1129 else 1130 jump_block = e->src; 1131 1132 e->flags &= ~EDGE_FALLTHRU; 1133 if (target == EXIT_BLOCK_PTR) 1134 { 1135 #ifdef HAVE_return 1136 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block)); 1137 #else 1138 gcc_unreachable (); 1139 #endif 1140 } 1141 else 1142 { 1143 rtx label = block_label (target); 1144 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block)); 1145 JUMP_LABEL (BB_END (jump_block)) = label; 1146 LABEL_NUSES (label)++; 1147 } 1148 1149 emit_barrier_after (BB_END (jump_block)); 1150 redirect_edge_succ_nodup (e, target); 1151 1152 if (abnormal_edge_flags) 1153 make_edge (src, target, abnormal_edge_flags); 1154 1155 return new_bb; 1156 } 1157 1158 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction 1159 (and possibly create new basic block) to make edge non-fallthru. 1160 Return newly created BB or NULL if none. */ 1161 1162 basic_block 1163 force_nonfallthru (edge e) 1164 { 1165 return force_nonfallthru_and_redirect (e, e->dest); 1166 } 1167 1168 /* Redirect edge even at the expense of creating new jump insn or 1169 basic block. Return new basic block if created, NULL otherwise. 1170 Conversion must be possible. */ 1171 1172 static basic_block 1173 rtl_redirect_edge_and_branch_force (edge e, basic_block target) 1174 { 1175 if (redirect_edge_and_branch (e, target) 1176 || e->dest == target) 1177 return NULL; 1178 1179 /* In case the edge redirection failed, try to force it to be non-fallthru 1180 and redirect newly created simplejump. */ 1181 e->src->flags |= BB_DIRTY; 1182 return force_nonfallthru_and_redirect (e, target); 1183 } 1184 1185 /* The given edge should potentially be a fallthru edge. If that is in 1186 fact true, delete the jump and barriers that are in the way. */ 1187 1188 static void 1189 rtl_tidy_fallthru_edge (edge e) 1190 { 1191 rtx q; 1192 basic_block b = e->src, c = b->next_bb; 1193 1194 /* ??? In a late-running flow pass, other folks may have deleted basic 1195 blocks by nopping out blocks, leaving multiple BARRIERs between here 1196 and the target label. They ought to be chastised and fixed. 1197 1198 We can also wind up with a sequence of undeletable labels between 1199 one block and the next. 1200 1201 So search through a sequence of barriers, labels, and notes for 1202 the head of block C and assert that we really do fall through. */ 1203 1204 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q)) 1205 if (INSN_P (q)) 1206 return; 1207 1208 /* Remove what will soon cease being the jump insn from the source block. 1209 If block B consisted only of this single jump, turn it into a deleted 1210 note. */ 1211 q = BB_END (b); 1212 if (JUMP_P (q) 1213 && onlyjump_p (q) 1214 && (any_uncondjump_p (q) 1215 || single_succ_p (b))) 1216 { 1217 #ifdef HAVE_cc0 1218 /* If this was a conditional jump, we need to also delete 1219 the insn that set cc0. */ 1220 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q))) 1221 q = PREV_INSN (q); 1222 #endif 1223 1224 q = PREV_INSN (q); 1225 1226 /* We don't want a block to end on a line-number note since that has 1227 the potential of changing the code between -g and not -g. */ 1228 while (NOTE_P (q) && NOTE_LINE_NUMBER (q) >= 0) 1229 q = PREV_INSN (q); 1230 } 1231 1232 /* Selectively unlink the sequence. */ 1233 if (q != PREV_INSN (BB_HEAD (c))) 1234 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c))); 1235 1236 e->flags |= EDGE_FALLTHRU; 1237 } 1238 1239 /* Should move basic block BB after basic block AFTER. NIY. */ 1240 1241 static bool 1242 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED, 1243 basic_block after ATTRIBUTE_UNUSED) 1244 { 1245 return false; 1246 } 1247 1248 /* Split a (typically critical) edge. Return the new block. 1249 The edge must not be abnormal. 1250 1251 ??? The code generally expects to be called on critical edges. 1252 The case of a block ending in an unconditional jump to a 1253 block with multiple predecessors is not handled optimally. */ 1254 1255 static basic_block 1256 rtl_split_edge (edge edge_in) 1257 { 1258 basic_block bb; 1259 rtx before; 1260 1261 /* Abnormal edges cannot be split. */ 1262 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL)); 1263 1264 /* We are going to place the new block in front of edge destination. 1265 Avoid existence of fallthru predecessors. */ 1266 if ((edge_in->flags & EDGE_FALLTHRU) == 0) 1267 { 1268 edge e; 1269 edge_iterator ei; 1270 1271 FOR_EACH_EDGE (e, ei, edge_in->dest->preds) 1272 if (e->flags & EDGE_FALLTHRU) 1273 break; 1274 1275 if (e) 1276 force_nonfallthru (e); 1277 } 1278 1279 /* Create the basic block note. */ 1280 if (edge_in->dest != EXIT_BLOCK_PTR) 1281 before = BB_HEAD (edge_in->dest); 1282 else 1283 before = NULL_RTX; 1284 1285 /* If this is a fall through edge to the exit block, the blocks might be 1286 not adjacent, and the right place is the after the source. */ 1287 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR) 1288 { 1289 before = NEXT_INSN (BB_END (edge_in->src)); 1290 bb = create_basic_block (before, NULL, edge_in->src); 1291 BB_COPY_PARTITION (bb, edge_in->src); 1292 } 1293 else 1294 { 1295 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb); 1296 /* ??? Why not edge_in->dest->prev_bb here? */ 1297 BB_COPY_PARTITION (bb, edge_in->dest); 1298 } 1299 1300 /* ??? This info is likely going to be out of date very soon. */ 1301 if (edge_in->dest->il.rtl->global_live_at_start) 1302 { 1303 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack); 1304 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack); 1305 COPY_REG_SET (bb->il.rtl->global_live_at_start, 1306 edge_in->dest->il.rtl->global_live_at_start); 1307 COPY_REG_SET (bb->il.rtl->global_live_at_end, 1308 edge_in->dest->il.rtl->global_live_at_start); 1309 } 1310 1311 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU); 1312 1313 /* For non-fallthru edges, we must adjust the predecessor's 1314 jump instruction to target our new block. */ 1315 if ((edge_in->flags & EDGE_FALLTHRU) == 0) 1316 { 1317 edge redirected = redirect_edge_and_branch (edge_in, bb); 1318 gcc_assert (redirected); 1319 } 1320 else 1321 redirect_edge_succ (edge_in, bb); 1322 1323 return bb; 1324 } 1325 1326 /* Queue instructions for insertion on an edge between two basic blocks. 1327 The new instructions and basic blocks (if any) will not appear in the 1328 CFG until commit_edge_insertions is called. */ 1329 1330 void 1331 insert_insn_on_edge (rtx pattern, edge e) 1332 { 1333 /* We cannot insert instructions on an abnormal critical edge. 1334 It will be easier to find the culprit if we die now. */ 1335 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))); 1336 1337 if (e->insns.r == NULL_RTX) 1338 start_sequence (); 1339 else 1340 push_to_sequence (e->insns.r); 1341 1342 emit_insn (pattern); 1343 1344 e->insns.r = get_insns (); 1345 end_sequence (); 1346 } 1347 1348 /* Update the CFG for the instructions queued on edge E. */ 1349 1350 static void 1351 commit_one_edge_insertion (edge e, int watch_calls) 1352 { 1353 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last; 1354 basic_block bb = NULL; 1355 1356 /* Pull the insns off the edge now since the edge might go away. */ 1357 insns = e->insns.r; 1358 e->insns.r = NULL_RTX; 1359 1360 /* Special case -- avoid inserting code between call and storing 1361 its return value. */ 1362 if (watch_calls && (e->flags & EDGE_FALLTHRU) 1363 && single_pred_p (e->dest) 1364 && e->src != ENTRY_BLOCK_PTR 1365 && CALL_P (BB_END (e->src))) 1366 { 1367 rtx next = next_nonnote_insn (BB_END (e->src)); 1368 1369 after = BB_HEAD (e->dest); 1370 /* The first insn after the call may be a stack pop, skip it. */ 1371 while (next 1372 && keep_with_call_p (next)) 1373 { 1374 after = next; 1375 next = next_nonnote_insn (next); 1376 } 1377 bb = e->dest; 1378 } 1379 if (!before && !after) 1380 { 1381 /* Figure out where to put these things. If the destination has 1382 one predecessor, insert there. Except for the exit block. */ 1383 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR) 1384 { 1385 bb = e->dest; 1386 1387 /* Get the location correct wrt a code label, and "nice" wrt 1388 a basic block note, and before everything else. */ 1389 tmp = BB_HEAD (bb); 1390 if (LABEL_P (tmp)) 1391 tmp = NEXT_INSN (tmp); 1392 if (NOTE_INSN_BASIC_BLOCK_P (tmp)) 1393 tmp = NEXT_INSN (tmp); 1394 if (tmp == BB_HEAD (bb)) 1395 before = tmp; 1396 else if (tmp) 1397 after = PREV_INSN (tmp); 1398 else 1399 after = get_last_insn (); 1400 } 1401 1402 /* If the source has one successor and the edge is not abnormal, 1403 insert there. Except for the entry block. */ 1404 else if ((e->flags & EDGE_ABNORMAL) == 0 1405 && single_succ_p (e->src) 1406 && e->src != ENTRY_BLOCK_PTR) 1407 { 1408 bb = e->src; 1409 1410 /* It is possible to have a non-simple jump here. Consider a target 1411 where some forms of unconditional jumps clobber a register. This 1412 happens on the fr30 for example. 1413 1414 We know this block has a single successor, so we can just emit 1415 the queued insns before the jump. */ 1416 if (JUMP_P (BB_END (bb))) 1417 before = BB_END (bb); 1418 else 1419 { 1420 /* We'd better be fallthru, or we've lost track of 1421 what's what. */ 1422 gcc_assert (e->flags & EDGE_FALLTHRU); 1423 1424 after = BB_END (bb); 1425 } 1426 } 1427 /* Otherwise we must split the edge. */ 1428 else 1429 { 1430 bb = split_edge (e); 1431 after = BB_END (bb); 1432 1433 if (flag_reorder_blocks_and_partition 1434 && targetm.have_named_sections 1435 && e->src != ENTRY_BLOCK_PTR 1436 && BB_PARTITION (e->src) == BB_COLD_PARTITION 1437 && !(e->flags & EDGE_CROSSING)) 1438 { 1439 rtx bb_note, cur_insn; 1440 1441 bb_note = NULL_RTX; 1442 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb)); 1443 cur_insn = NEXT_INSN (cur_insn)) 1444 if (NOTE_P (cur_insn) 1445 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK) 1446 { 1447 bb_note = cur_insn; 1448 break; 1449 } 1450 1451 if (JUMP_P (BB_END (bb)) 1452 && !any_condjump_p (BB_END (bb)) 1453 && (single_succ_edge (bb)->flags & EDGE_CROSSING)) 1454 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST 1455 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb))); 1456 } 1457 } 1458 } 1459 1460 /* Now that we've found the spot, do the insertion. */ 1461 1462 if (before) 1463 { 1464 emit_insn_before_noloc (insns, before); 1465 last = prev_nonnote_insn (before); 1466 } 1467 else 1468 last = emit_insn_after_noloc (insns, after); 1469 1470 if (returnjump_p (last)) 1471 { 1472 /* ??? Remove all outgoing edges from BB and add one for EXIT. 1473 This is not currently a problem because this only happens 1474 for the (single) epilogue, which already has a fallthru edge 1475 to EXIT. */ 1476 1477 e = single_succ_edge (bb); 1478 gcc_assert (e->dest == EXIT_BLOCK_PTR 1479 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU)); 1480 1481 e->flags &= ~EDGE_FALLTHRU; 1482 emit_barrier_after (last); 1483 1484 if (before) 1485 delete_insn (before); 1486 } 1487 else 1488 gcc_assert (!JUMP_P (last)); 1489 1490 /* Mark the basic block for find_many_sub_basic_blocks. */ 1491 bb->aux = &bb->aux; 1492 } 1493 1494 /* Update the CFG for all queued instructions. */ 1495 1496 void 1497 commit_edge_insertions (void) 1498 { 1499 basic_block bb; 1500 sbitmap blocks; 1501 bool changed = false; 1502 1503 #ifdef ENABLE_CHECKING 1504 verify_flow_info (); 1505 #endif 1506 1507 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) 1508 { 1509 edge e; 1510 edge_iterator ei; 1511 1512 FOR_EACH_EDGE (e, ei, bb->succs) 1513 if (e->insns.r) 1514 { 1515 changed = true; 1516 commit_one_edge_insertion (e, false); 1517 } 1518 } 1519 1520 if (!changed) 1521 return; 1522 1523 blocks = sbitmap_alloc (last_basic_block); 1524 sbitmap_zero (blocks); 1525 FOR_EACH_BB (bb) 1526 if (bb->aux) 1527 { 1528 SET_BIT (blocks, bb->index); 1529 /* Check for forgotten bb->aux values before commit_edge_insertions 1530 call. */ 1531 gcc_assert (bb->aux == &bb->aux); 1532 bb->aux = NULL; 1533 } 1534 find_many_sub_basic_blocks (blocks); 1535 sbitmap_free (blocks); 1536 } 1537 1538 /* Update the CFG for all queued instructions, taking special care of inserting 1539 code on edges between call and storing its return value. */ 1540 1541 void 1542 commit_edge_insertions_watch_calls (void) 1543 { 1544 basic_block bb; 1545 sbitmap blocks; 1546 bool changed = false; 1547 1548 #ifdef ENABLE_CHECKING 1549 verify_flow_info (); 1550 #endif 1551 1552 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) 1553 { 1554 edge e; 1555 edge_iterator ei; 1556 1557 FOR_EACH_EDGE (e, ei, bb->succs) 1558 if (e->insns.r) 1559 { 1560 changed = true; 1561 commit_one_edge_insertion (e, true); 1562 } 1563 } 1564 1565 if (!changed) 1566 return; 1567 1568 blocks = sbitmap_alloc (last_basic_block); 1569 sbitmap_zero (blocks); 1570 FOR_EACH_BB (bb) 1571 if (bb->aux) 1572 { 1573 SET_BIT (blocks, bb->index); 1574 /* Check for forgotten bb->aux values before commit_edge_insertions 1575 call. */ 1576 gcc_assert (bb->aux == &bb->aux); 1577 bb->aux = NULL; 1578 } 1579 find_many_sub_basic_blocks (blocks); 1580 sbitmap_free (blocks); 1581 } 1582 1583 /* Print out RTL-specific basic block information (live information 1584 at start and end). */ 1585 1586 static void 1587 rtl_dump_bb (basic_block bb, FILE *outf, int indent) 1588 { 1589 rtx insn; 1590 rtx last; 1591 char *s_indent; 1592 1593 s_indent = alloca ((size_t) indent + 1); 1594 memset (s_indent, ' ', (size_t) indent); 1595 s_indent[indent] = '\0'; 1596 1597 fprintf (outf, ";;%s Registers live at start: ", s_indent); 1598 dump_regset (bb->il.rtl->global_live_at_start, outf); 1599 putc ('\n', outf); 1600 1601 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last; 1602 insn = NEXT_INSN (insn)) 1603 print_rtl_single (outf, insn); 1604 1605 fprintf (outf, ";;%s Registers live at end: ", s_indent); 1606 dump_regset (bb->il.rtl->global_live_at_end, outf); 1607 putc ('\n', outf); 1608 } 1609 1610 /* Like print_rtl, but also print out live information for the start of each 1611 basic block. */ 1612 1613 void 1614 print_rtl_with_bb (FILE *outf, rtx rtx_first) 1615 { 1616 rtx tmp_rtx; 1617 1618 if (rtx_first == 0) 1619 fprintf (outf, "(nil)\n"); 1620 else 1621 { 1622 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB }; 1623 int max_uid = get_max_uid (); 1624 basic_block *start = XCNEWVEC (basic_block, max_uid); 1625 basic_block *end = XCNEWVEC (basic_block, max_uid); 1626 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid); 1627 1628 basic_block bb; 1629 1630 FOR_EACH_BB_REVERSE (bb) 1631 { 1632 rtx x; 1633 1634 start[INSN_UID (BB_HEAD (bb))] = bb; 1635 end[INSN_UID (BB_END (bb))] = bb; 1636 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x)) 1637 { 1638 enum bb_state state = IN_MULTIPLE_BB; 1639 1640 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB) 1641 state = IN_ONE_BB; 1642 in_bb_p[INSN_UID (x)] = state; 1643 1644 if (x == BB_END (bb)) 1645 break; 1646 } 1647 } 1648 1649 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx)) 1650 { 1651 int did_output; 1652 1653 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL) 1654 { 1655 fprintf (outf, ";; Start of basic block %d, registers live:", 1656 bb->index); 1657 dump_regset (bb->il.rtl->global_live_at_start, outf); 1658 putc ('\n', outf); 1659 } 1660 1661 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB 1662 && !NOTE_P (tmp_rtx) 1663 && !BARRIER_P (tmp_rtx)) 1664 fprintf (outf, ";; Insn is not within a basic block\n"); 1665 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB) 1666 fprintf (outf, ";; Insn is in multiple basic blocks\n"); 1667 1668 did_output = print_rtl_single (outf, tmp_rtx); 1669 1670 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL) 1671 { 1672 fprintf (outf, ";; End of basic block %d, registers live:\n", 1673 bb->index); 1674 dump_regset (bb->il.rtl->global_live_at_end, outf); 1675 putc ('\n', outf); 1676 } 1677 1678 if (did_output) 1679 putc ('\n', outf); 1680 } 1681 1682 free (start); 1683 free (end); 1684 free (in_bb_p); 1685 } 1686 1687 if (current_function_epilogue_delay_list != 0) 1688 { 1689 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n"); 1690 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0; 1691 tmp_rtx = XEXP (tmp_rtx, 1)) 1692 print_rtl_single (outf, XEXP (tmp_rtx, 0)); 1693 } 1694 } 1695 1696 void 1697 update_br_prob_note (basic_block bb) 1698 { 1699 rtx note; 1700 if (!JUMP_P (BB_END (bb))) 1701 return; 1702 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX); 1703 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability) 1704 return; 1705 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability); 1706 } 1707 1708 /* Get the last insn associated with block BB (that includes barriers and 1709 tablejumps after BB). */ 1710 rtx 1711 get_last_bb_insn (basic_block bb) 1712 { 1713 rtx tmp; 1714 rtx end = BB_END (bb); 1715 1716 /* Include any jump table following the basic block. */ 1717 if (tablejump_p (end, NULL, &tmp)) 1718 end = tmp; 1719 1720 /* Include any barriers that may follow the basic block. */ 1721 tmp = next_nonnote_insn (end); 1722 while (tmp && BARRIER_P (tmp)) 1723 { 1724 end = tmp; 1725 tmp = next_nonnote_insn (end); 1726 } 1727 1728 return end; 1729 } 1730 1731 /* Verify the CFG and RTL consistency common for both underlying RTL and 1732 cfglayout RTL. 1733 1734 Currently it does following checks: 1735 1736 - test head/end pointers 1737 - overlapping of basic blocks 1738 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note) 1739 - tails of basic blocks (ensure that boundary is necessary) 1740 - scans body of the basic block for JUMP_INSN, CODE_LABEL 1741 and NOTE_INSN_BASIC_BLOCK 1742 - verify that no fall_thru edge crosses hot/cold partition boundaries 1743 1744 In future it can be extended check a lot of other stuff as well 1745 (reachability of basic blocks, life information, etc. etc.). */ 1746 1747 static int 1748 rtl_verify_flow_info_1 (void) 1749 { 1750 const int max_uid = get_max_uid (); 1751 rtx last_head = get_last_insn (); 1752 basic_block *bb_info; 1753 rtx x; 1754 int err = 0; 1755 basic_block bb; 1756 1757 bb_info = XCNEWVEC (basic_block, max_uid); 1758 1759 FOR_EACH_BB_REVERSE (bb) 1760 { 1761 rtx head = BB_HEAD (bb); 1762 rtx end = BB_END (bb); 1763 1764 /* Verify the end of the basic block is in the INSN chain. */ 1765 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x)) 1766 if (x == end) 1767 break; 1768 1769 if (!(bb->flags & BB_RTL)) 1770 { 1771 error ("BB_RTL flag not set for block %d", bb->index); 1772 err = 1; 1773 } 1774 1775 if (!x) 1776 { 1777 error ("end insn %d for block %d not found in the insn stream", 1778 INSN_UID (end), bb->index); 1779 err = 1; 1780 } 1781 1782 /* Work backwards from the end to the head of the basic block 1783 to verify the head is in the RTL chain. */ 1784 for (; x != NULL_RTX; x = PREV_INSN (x)) 1785 { 1786 /* While walking over the insn chain, verify insns appear 1787 in only one basic block and initialize the BB_INFO array 1788 used by other passes. */ 1789 if (bb_info[INSN_UID (x)] != NULL) 1790 { 1791 error ("insn %d is in multiple basic blocks (%d and %d)", 1792 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index); 1793 err = 1; 1794 } 1795 1796 bb_info[INSN_UID (x)] = bb; 1797 1798 if (x == head) 1799 break; 1800 } 1801 if (!x) 1802 { 1803 error ("head insn %d for block %d not found in the insn stream", 1804 INSN_UID (head), bb->index); 1805 err = 1; 1806 } 1807 1808 last_head = x; 1809 } 1810 1811 /* Now check the basic blocks (boundaries etc.) */ 1812 FOR_EACH_BB_REVERSE (bb) 1813 { 1814 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0; 1815 edge e, fallthru = NULL; 1816 rtx note; 1817 edge_iterator ei; 1818 1819 if (JUMP_P (BB_END (bb)) 1820 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX)) 1821 && EDGE_COUNT (bb->succs) >= 2 1822 && any_condjump_p (BB_END (bb))) 1823 { 1824 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability 1825 && profile_status != PROFILE_ABSENT) 1826 { 1827 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i", 1828 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability); 1829 err = 1; 1830 } 1831 } 1832 FOR_EACH_EDGE (e, ei, bb->succs) 1833 { 1834 if (e->flags & EDGE_FALLTHRU) 1835 { 1836 n_fallthru++, fallthru = e; 1837 if ((e->flags & EDGE_CROSSING) 1838 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest) 1839 && e->src != ENTRY_BLOCK_PTR 1840 && e->dest != EXIT_BLOCK_PTR)) 1841 { 1842 error ("fallthru edge crosses section boundary (bb %i)", 1843 e->src->index); 1844 err = 1; 1845 } 1846 } 1847 1848 if ((e->flags & ~(EDGE_DFS_BACK 1849 | EDGE_CAN_FALLTHRU 1850 | EDGE_IRREDUCIBLE_LOOP 1851 | EDGE_LOOP_EXIT 1852 | EDGE_CROSSING)) == 0) 1853 n_branch++; 1854 1855 if (e->flags & EDGE_ABNORMAL_CALL) 1856 n_call++; 1857 1858 if (e->flags & EDGE_EH) 1859 n_eh++; 1860 else if (e->flags & EDGE_ABNORMAL) 1861 n_abnormal++; 1862 } 1863 1864 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX 1865 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX)) 1866 { 1867 error ("missing REG_EH_REGION note in the end of bb %i", bb->index); 1868 err = 1; 1869 } 1870 if (n_branch 1871 && (!JUMP_P (BB_END (bb)) 1872 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb)) 1873 || any_condjump_p (BB_END (bb)))))) 1874 { 1875 error ("too many outgoing branch edges from bb %i", bb->index); 1876 err = 1; 1877 } 1878 if (n_fallthru && any_uncondjump_p (BB_END (bb))) 1879 { 1880 error ("fallthru edge after unconditional jump %i", bb->index); 1881 err = 1; 1882 } 1883 if (n_branch != 1 && any_uncondjump_p (BB_END (bb))) 1884 { 1885 error ("wrong amount of branch edges after unconditional jump %i", bb->index); 1886 err = 1; 1887 } 1888 if (n_branch != 1 && any_condjump_p (BB_END (bb)) 1889 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest)) 1890 { 1891 error ("wrong amount of branch edges after conditional jump %i", 1892 bb->index); 1893 err = 1; 1894 } 1895 if (n_call && !CALL_P (BB_END (bb))) 1896 { 1897 error ("call edges for non-call insn in bb %i", bb->index); 1898 err = 1; 1899 } 1900 if (n_abnormal 1901 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal) 1902 && (!JUMP_P (BB_END (bb)) 1903 || any_condjump_p (BB_END (bb)) 1904 || any_uncondjump_p (BB_END (bb)))) 1905 { 1906 error ("abnormal edges for no purpose in bb %i", bb->index); 1907 err = 1; 1908 } 1909 1910 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x)) 1911 /* We may have a barrier inside a basic block before dead code 1912 elimination. There is no BLOCK_FOR_INSN field in a barrier. */ 1913 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb) 1914 { 1915 debug_rtx (x); 1916 if (! BLOCK_FOR_INSN (x)) 1917 error 1918 ("insn %d inside basic block %d but block_for_insn is NULL", 1919 INSN_UID (x), bb->index); 1920 else 1921 error 1922 ("insn %d inside basic block %d but block_for_insn is %i", 1923 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index); 1924 1925 err = 1; 1926 } 1927 1928 /* OK pointers are correct. Now check the header of basic 1929 block. It ought to contain optional CODE_LABEL followed 1930 by NOTE_BASIC_BLOCK. */ 1931 x = BB_HEAD (bb); 1932 if (LABEL_P (x)) 1933 { 1934 if (BB_END (bb) == x) 1935 { 1936 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d", 1937 bb->index); 1938 err = 1; 1939 } 1940 1941 x = NEXT_INSN (x); 1942 } 1943 1944 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb) 1945 { 1946 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d", 1947 bb->index); 1948 err = 1; 1949 } 1950 1951 if (BB_END (bb) == x) 1952 /* Do checks for empty blocks here. */ 1953 ; 1954 else 1955 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x)) 1956 { 1957 if (NOTE_INSN_BASIC_BLOCK_P (x)) 1958 { 1959 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d", 1960 INSN_UID (x), bb->index); 1961 err = 1; 1962 } 1963 1964 if (x == BB_END (bb)) 1965 break; 1966 1967 if (control_flow_insn_p (x)) 1968 { 1969 error ("in basic block %d:", bb->index); 1970 fatal_insn ("flow control insn inside a basic block", x); 1971 } 1972 } 1973 } 1974 1975 /* Clean up. */ 1976 free (bb_info); 1977 return err; 1978 } 1979 1980 /* Verify the CFG and RTL consistency common for both underlying RTL and 1981 cfglayout RTL. 1982 1983 Currently it does following checks: 1984 - all checks of rtl_verify_flow_info_1 1985 - check that all insns are in the basic blocks 1986 (except the switch handling code, barriers and notes) 1987 - check that all returns are followed by barriers 1988 - check that all fallthru edge points to the adjacent blocks. */ 1989 static int 1990 rtl_verify_flow_info (void) 1991 { 1992 basic_block bb; 1993 int err = rtl_verify_flow_info_1 (); 1994 rtx x; 1995 int num_bb_notes; 1996 const rtx rtx_first = get_insns (); 1997 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL; 1998 1999 FOR_EACH_BB_REVERSE (bb) 2000 { 2001 edge e; 2002 edge_iterator ei; 2003 2004 if (bb->predictions) 2005 { 2006 error ("bb prediction set for block %i, but it is not used in RTL land", bb->index); 2007 err = 1; 2008 } 2009 2010 FOR_EACH_EDGE (e, ei, bb->succs) 2011 if (e->flags & EDGE_FALLTHRU) 2012 break; 2013 if (!e) 2014 { 2015 rtx insn; 2016 2017 /* Ensure existence of barrier in BB with no fallthru edges. */ 2018 for (insn = BB_END (bb); !insn || !BARRIER_P (insn); 2019 insn = NEXT_INSN (insn)) 2020 if (!insn 2021 || (NOTE_P (insn) 2022 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)) 2023 { 2024 error ("missing barrier after block %i", bb->index); 2025 err = 1; 2026 break; 2027 } 2028 } 2029 else if (e->src != ENTRY_BLOCK_PTR 2030 && e->dest != EXIT_BLOCK_PTR) 2031 { 2032 rtx insn; 2033 2034 if (e->src->next_bb != e->dest) 2035 { 2036 error 2037 ("verify_flow_info: Incorrect blocks for fallthru %i->%i", 2038 e->src->index, e->dest->index); 2039 err = 1; 2040 } 2041 else 2042 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest); 2043 insn = NEXT_INSN (insn)) 2044 if (BARRIER_P (insn) || INSN_P (insn)) 2045 { 2046 error ("verify_flow_info: Incorrect fallthru %i->%i", 2047 e->src->index, e->dest->index); 2048 fatal_insn ("wrong insn in the fallthru edge", insn); 2049 err = 1; 2050 } 2051 } 2052 } 2053 2054 num_bb_notes = 0; 2055 last_bb_seen = ENTRY_BLOCK_PTR; 2056 2057 for (x = rtx_first; x; x = NEXT_INSN (x)) 2058 { 2059 if (NOTE_INSN_BASIC_BLOCK_P (x)) 2060 { 2061 bb = NOTE_BASIC_BLOCK (x); 2062 2063 num_bb_notes++; 2064 if (bb != last_bb_seen->next_bb) 2065 internal_error ("basic blocks not laid down consecutively"); 2066 2067 curr_bb = last_bb_seen = bb; 2068 } 2069 2070 if (!curr_bb) 2071 { 2072 switch (GET_CODE (x)) 2073 { 2074 case BARRIER: 2075 case NOTE: 2076 break; 2077 2078 case CODE_LABEL: 2079 /* An addr_vec is placed outside any basic block. */ 2080 if (NEXT_INSN (x) 2081 && JUMP_P (NEXT_INSN (x)) 2082 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC 2083 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC)) 2084 x = NEXT_INSN (x); 2085 2086 /* But in any case, non-deletable labels can appear anywhere. */ 2087 break; 2088 2089 default: 2090 fatal_insn ("insn outside basic block", x); 2091 } 2092 } 2093 2094 if (JUMP_P (x) 2095 && returnjump_p (x) && ! condjump_p (x) 2096 && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x)))) 2097 fatal_insn ("return not followed by barrier", x); 2098 if (curr_bb && x == BB_END (curr_bb)) 2099 curr_bb = NULL; 2100 } 2101 2102 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS) 2103 internal_error 2104 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)", 2105 num_bb_notes, n_basic_blocks); 2106 2107 return err; 2108 } 2109 2110 /* Assume that the preceding pass has possibly eliminated jump instructions 2111 or converted the unconditional jumps. Eliminate the edges from CFG. 2112 Return true if any edges are eliminated. */ 2113 2114 bool 2115 purge_dead_edges (basic_block bb) 2116 { 2117 edge e; 2118 rtx insn = BB_END (bb), note; 2119 bool purged = false; 2120 bool found; 2121 edge_iterator ei; 2122 2123 /* If this instruction cannot trap, remove REG_EH_REGION notes. */ 2124 if (NONJUMP_INSN_P (insn) 2125 && (note = find_reg_note (insn, REG_EH_REGION, NULL))) 2126 { 2127 rtx eqnote; 2128 2129 if (! may_trap_p (PATTERN (insn)) 2130 || ((eqnote = find_reg_equal_equiv_note (insn)) 2131 && ! may_trap_p (XEXP (eqnote, 0)))) 2132 remove_note (insn, note); 2133 } 2134 2135 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */ 2136 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 2137 { 2138 /* There are three types of edges we need to handle correctly here: EH 2139 edges, abnormal call EH edges, and abnormal call non-EH edges. The 2140 latter can appear when nonlocal gotos are used. */ 2141 if (e->flags & EDGE_EH) 2142 { 2143 if (can_throw_internal (BB_END (bb)) 2144 /* If this is a call edge, verify that this is a call insn. */ 2145 && (! (e->flags & EDGE_ABNORMAL_CALL) 2146 || CALL_P (BB_END (bb)))) 2147 { 2148 ei_next (&ei); 2149 continue; 2150 } 2151 } 2152 else if (e->flags & EDGE_ABNORMAL_CALL) 2153 { 2154 if (CALL_P (BB_END (bb)) 2155 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL)) 2156 || INTVAL (XEXP (note, 0)) >= 0)) 2157 { 2158 ei_next (&ei); 2159 continue; 2160 } 2161 } 2162 else 2163 { 2164 ei_next (&ei); 2165 continue; 2166 } 2167 2168 remove_edge (e); 2169 bb->flags |= BB_DIRTY; 2170 purged = true; 2171 } 2172 2173 if (JUMP_P (insn)) 2174 { 2175 rtx note; 2176 edge b,f; 2177 edge_iterator ei; 2178 2179 /* We do care only about conditional jumps and simplejumps. */ 2180 if (!any_condjump_p (insn) 2181 && !returnjump_p (insn) 2182 && !simplejump_p (insn)) 2183 return purged; 2184 2185 /* Branch probability/prediction notes are defined only for 2186 condjumps. We've possibly turned condjump into simplejump. */ 2187 if (simplejump_p (insn)) 2188 { 2189 note = find_reg_note (insn, REG_BR_PROB, NULL); 2190 if (note) 2191 remove_note (insn, note); 2192 while ((note = find_reg_note (insn, REG_BR_PRED, NULL))) 2193 remove_note (insn, note); 2194 } 2195 2196 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 2197 { 2198 /* Avoid abnormal flags to leak from computed jumps turned 2199 into simplejumps. */ 2200 2201 e->flags &= ~EDGE_ABNORMAL; 2202 2203 /* See if this edge is one we should keep. */ 2204 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn)) 2205 /* A conditional jump can fall through into the next 2206 block, so we should keep the edge. */ 2207 { 2208 ei_next (&ei); 2209 continue; 2210 } 2211 else if (e->dest != EXIT_BLOCK_PTR 2212 && BB_HEAD (e->dest) == JUMP_LABEL (insn)) 2213 /* If the destination block is the target of the jump, 2214 keep the edge. */ 2215 { 2216 ei_next (&ei); 2217 continue; 2218 } 2219 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn)) 2220 /* If the destination block is the exit block, and this 2221 instruction is a return, then keep the edge. */ 2222 { 2223 ei_next (&ei); 2224 continue; 2225 } 2226 else if ((e->flags & EDGE_EH) && can_throw_internal (insn)) 2227 /* Keep the edges that correspond to exceptions thrown by 2228 this instruction and rematerialize the EDGE_ABNORMAL 2229 flag we just cleared above. */ 2230 { 2231 e->flags |= EDGE_ABNORMAL; 2232 ei_next (&ei); 2233 continue; 2234 } 2235 2236 /* We do not need this edge. */ 2237 bb->flags |= BB_DIRTY; 2238 purged = true; 2239 remove_edge (e); 2240 } 2241 2242 if (EDGE_COUNT (bb->succs) == 0 || !purged) 2243 return purged; 2244 2245 if (dump_file) 2246 fprintf (dump_file, "Purged edges from bb %i\n", bb->index); 2247 2248 if (!optimize) 2249 return purged; 2250 2251 /* Redistribute probabilities. */ 2252 if (single_succ_p (bb)) 2253 { 2254 single_succ_edge (bb)->probability = REG_BR_PROB_BASE; 2255 single_succ_edge (bb)->count = bb->count; 2256 } 2257 else 2258 { 2259 note = find_reg_note (insn, REG_BR_PROB, NULL); 2260 if (!note) 2261 return purged; 2262 2263 b = BRANCH_EDGE (bb); 2264 f = FALLTHRU_EDGE (bb); 2265 b->probability = INTVAL (XEXP (note, 0)); 2266 f->probability = REG_BR_PROB_BASE - b->probability; 2267 b->count = bb->count * b->probability / REG_BR_PROB_BASE; 2268 f->count = bb->count * f->probability / REG_BR_PROB_BASE; 2269 } 2270 2271 return purged; 2272 } 2273 else if (CALL_P (insn) && SIBLING_CALL_P (insn)) 2274 { 2275 /* First, there should not be any EH or ABCALL edges resulting 2276 from non-local gotos and the like. If there were, we shouldn't 2277 have created the sibcall in the first place. Second, there 2278 should of course never have been a fallthru edge. */ 2279 gcc_assert (single_succ_p (bb)); 2280 gcc_assert (single_succ_edge (bb)->flags 2281 == (EDGE_SIBCALL | EDGE_ABNORMAL)); 2282 2283 return 0; 2284 } 2285 2286 /* If we don't see a jump insn, we don't know exactly why the block would 2287 have been broken at this point. Look for a simple, non-fallthru edge, 2288 as these are only created by conditional branches. If we find such an 2289 edge we know that there used to be a jump here and can then safely 2290 remove all non-fallthru edges. */ 2291 found = false; 2292 FOR_EACH_EDGE (e, ei, bb->succs) 2293 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))) 2294 { 2295 found = true; 2296 break; 2297 } 2298 2299 if (!found) 2300 return purged; 2301 2302 /* Remove all but the fake and fallthru edges. The fake edge may be 2303 the only successor for this block in the case of noreturn 2304 calls. */ 2305 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 2306 { 2307 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE))) 2308 { 2309 bb->flags |= BB_DIRTY; 2310 remove_edge (e); 2311 purged = true; 2312 } 2313 else 2314 ei_next (&ei); 2315 } 2316 2317 gcc_assert (single_succ_p (bb)); 2318 2319 single_succ_edge (bb)->probability = REG_BR_PROB_BASE; 2320 single_succ_edge (bb)->count = bb->count; 2321 2322 if (dump_file) 2323 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n", 2324 bb->index); 2325 return purged; 2326 } 2327 2328 /* Search all basic blocks for potentially dead edges and purge them. Return 2329 true if some edge has been eliminated. */ 2330 2331 bool 2332 purge_all_dead_edges (void) 2333 { 2334 int purged = false; 2335 basic_block bb; 2336 2337 FOR_EACH_BB (bb) 2338 { 2339 bool purged_here = purge_dead_edges (bb); 2340 2341 purged |= purged_here; 2342 } 2343 2344 return purged; 2345 } 2346 2347 /* Same as split_block but update cfg_layout structures. */ 2348 2349 static basic_block 2350 cfg_layout_split_block (basic_block bb, void *insnp) 2351 { 2352 rtx insn = insnp; 2353 basic_block new_bb = rtl_split_block (bb, insn); 2354 2355 new_bb->il.rtl->footer = bb->il.rtl->footer; 2356 bb->il.rtl->footer = NULL; 2357 2358 return new_bb; 2359 } 2360 2361 2362 /* Redirect Edge to DEST. */ 2363 static edge 2364 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest) 2365 { 2366 basic_block src = e->src; 2367 edge ret; 2368 2369 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)) 2370 return NULL; 2371 2372 if (e->dest == dest) 2373 return e; 2374 2375 if (e->src != ENTRY_BLOCK_PTR 2376 && (ret = try_redirect_by_replacing_jump (e, dest, true))) 2377 { 2378 src->flags |= BB_DIRTY; 2379 return ret; 2380 } 2381 2382 if (e->src == ENTRY_BLOCK_PTR 2383 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX)) 2384 { 2385 if (dump_file) 2386 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n", 2387 e->src->index, dest->index); 2388 2389 e->src->flags |= BB_DIRTY; 2390 redirect_edge_succ (e, dest); 2391 return e; 2392 } 2393 2394 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge 2395 in the case the basic block appears to be in sequence. Avoid this 2396 transformation. */ 2397 2398 if (e->flags & EDGE_FALLTHRU) 2399 { 2400 /* Redirect any branch edges unified with the fallthru one. */ 2401 if (JUMP_P (BB_END (src)) 2402 && label_is_jump_target_p (BB_HEAD (e->dest), 2403 BB_END (src))) 2404 { 2405 edge redirected; 2406 2407 if (dump_file) 2408 fprintf (dump_file, "Fallthru edge unified with branch " 2409 "%i->%i redirected to %i\n", 2410 e->src->index, e->dest->index, dest->index); 2411 e->flags &= ~EDGE_FALLTHRU; 2412 redirected = redirect_branch_edge (e, dest); 2413 gcc_assert (redirected); 2414 e->flags |= EDGE_FALLTHRU; 2415 e->src->flags |= BB_DIRTY; 2416 return e; 2417 } 2418 /* In case we are redirecting fallthru edge to the branch edge 2419 of conditional jump, remove it. */ 2420 if (EDGE_COUNT (src->succs) == 2) 2421 { 2422 /* Find the edge that is different from E. */ 2423 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e); 2424 2425 if (s->dest == dest 2426 && any_condjump_p (BB_END (src)) 2427 && onlyjump_p (BB_END (src))) 2428 delete_insn (BB_END (src)); 2429 } 2430 ret = redirect_edge_succ_nodup (e, dest); 2431 if (dump_file) 2432 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n", 2433 e->src->index, e->dest->index, dest->index); 2434 } 2435 else 2436 ret = redirect_branch_edge (e, dest); 2437 2438 /* We don't want simplejumps in the insn stream during cfglayout. */ 2439 gcc_assert (!simplejump_p (BB_END (src))); 2440 2441 src->flags |= BB_DIRTY; 2442 return ret; 2443 } 2444 2445 /* Simple wrapper as we always can redirect fallthru edges. */ 2446 static basic_block 2447 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest) 2448 { 2449 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest); 2450 2451 gcc_assert (redirected); 2452 return NULL; 2453 } 2454 2455 /* Same as delete_basic_block but update cfg_layout structures. */ 2456 2457 static void 2458 cfg_layout_delete_block (basic_block bb) 2459 { 2460 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints; 2461 2462 if (bb->il.rtl->header) 2463 { 2464 next = BB_HEAD (bb); 2465 if (prev) 2466 NEXT_INSN (prev) = bb->il.rtl->header; 2467 else 2468 set_first_insn (bb->il.rtl->header); 2469 PREV_INSN (bb->il.rtl->header) = prev; 2470 insn = bb->il.rtl->header; 2471 while (NEXT_INSN (insn)) 2472 insn = NEXT_INSN (insn); 2473 NEXT_INSN (insn) = next; 2474 PREV_INSN (next) = insn; 2475 } 2476 next = NEXT_INSN (BB_END (bb)); 2477 if (bb->il.rtl->footer) 2478 { 2479 insn = bb->il.rtl->footer; 2480 while (insn) 2481 { 2482 if (BARRIER_P (insn)) 2483 { 2484 if (PREV_INSN (insn)) 2485 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn); 2486 else 2487 bb->il.rtl->footer = NEXT_INSN (insn); 2488 if (NEXT_INSN (insn)) 2489 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn); 2490 } 2491 if (LABEL_P (insn)) 2492 break; 2493 insn = NEXT_INSN (insn); 2494 } 2495 if (bb->il.rtl->footer) 2496 { 2497 insn = BB_END (bb); 2498 NEXT_INSN (insn) = bb->il.rtl->footer; 2499 PREV_INSN (bb->il.rtl->footer) = insn; 2500 while (NEXT_INSN (insn)) 2501 insn = NEXT_INSN (insn); 2502 NEXT_INSN (insn) = next; 2503 if (next) 2504 PREV_INSN (next) = insn; 2505 else 2506 set_last_insn (insn); 2507 } 2508 } 2509 if (bb->next_bb != EXIT_BLOCK_PTR) 2510 to = &bb->next_bb->il.rtl->header; 2511 else 2512 to = &cfg_layout_function_footer; 2513 2514 rtl_delete_block (bb); 2515 2516 if (prev) 2517 prev = NEXT_INSN (prev); 2518 else 2519 prev = get_insns (); 2520 if (next) 2521 next = PREV_INSN (next); 2522 else 2523 next = get_last_insn (); 2524 2525 if (next && NEXT_INSN (next) != prev) 2526 { 2527 remaints = unlink_insn_chain (prev, next); 2528 insn = remaints; 2529 while (NEXT_INSN (insn)) 2530 insn = NEXT_INSN (insn); 2531 NEXT_INSN (insn) = *to; 2532 if (*to) 2533 PREV_INSN (*to) = insn; 2534 *to = remaints; 2535 } 2536 } 2537 2538 /* Return true when blocks A and B can be safely merged. */ 2539 static bool 2540 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b) 2541 { 2542 /* If we are partitioning hot/cold basic blocks, we don't want to 2543 mess up unconditional or indirect jumps that cross between hot 2544 and cold sections. 2545 2546 Basic block partitioning may result in some jumps that appear to 2547 be optimizable (or blocks that appear to be mergeable), but which really 2548 must be left untouched (they are required to make it safely across 2549 partition boundaries). See the comments at the top of 2550 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */ 2551 2552 if (BB_PARTITION (a) != BB_PARTITION (b)) 2553 return false; 2554 2555 /* There must be exactly one edge in between the blocks. */ 2556 return (single_succ_p (a) 2557 && single_succ (a) == b 2558 && single_pred_p (b) == 1 2559 && a != b 2560 /* Must be simple edge. */ 2561 && !(single_succ_edge (a)->flags & EDGE_COMPLEX) 2562 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR 2563 /* If the jump insn has side effects, 2564 we can't kill the edge. */ 2565 && (!JUMP_P (BB_END (a)) 2566 || (reload_completed 2567 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a))))); 2568 } 2569 2570 /* Merge block A and B. The blocks must be mergeable. */ 2571 2572 static void 2573 cfg_layout_merge_blocks (basic_block a, basic_block b) 2574 { 2575 #ifdef ENABLE_CHECKING 2576 gcc_assert (cfg_layout_can_merge_blocks_p (a, b)); 2577 #endif 2578 2579 /* If there was a CODE_LABEL beginning B, delete it. */ 2580 if (LABEL_P (BB_HEAD (b))) 2581 { 2582 /* This might have been an EH label that no longer has incoming 2583 EH edges. Update data structures to match. */ 2584 maybe_remove_eh_handler (BB_HEAD (b)); 2585 2586 delete_insn (BB_HEAD (b)); 2587 } 2588 2589 /* We should have fallthru edge in a, or we can do dummy redirection to get 2590 it cleaned up. */ 2591 if (JUMP_P (BB_END (a))) 2592 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true); 2593 gcc_assert (!JUMP_P (BB_END (a))); 2594 2595 /* Possible line number notes should appear in between. */ 2596 if (b->il.rtl->header) 2597 { 2598 rtx first = BB_END (a), last; 2599 2600 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a)); 2601 delete_insn_chain (NEXT_INSN (first), last); 2602 b->il.rtl->header = NULL; 2603 } 2604 2605 /* In the case basic blocks are not adjacent, move them around. */ 2606 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b)) 2607 { 2608 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b)); 2609 2610 emit_insn_after_noloc (first, BB_END (a)); 2611 /* Skip possible DELETED_LABEL insn. */ 2612 if (!NOTE_INSN_BASIC_BLOCK_P (first)) 2613 first = NEXT_INSN (first); 2614 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first)); 2615 BB_HEAD (b) = NULL; 2616 delete_insn (first); 2617 } 2618 /* Otherwise just re-associate the instructions. */ 2619 else 2620 { 2621 rtx insn; 2622 2623 for (insn = BB_HEAD (b); 2624 insn != NEXT_INSN (BB_END (b)); 2625 insn = NEXT_INSN (insn)) 2626 set_block_for_insn (insn, a); 2627 insn = BB_HEAD (b); 2628 /* Skip possible DELETED_LABEL insn. */ 2629 if (!NOTE_INSN_BASIC_BLOCK_P (insn)) 2630 insn = NEXT_INSN (insn); 2631 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn)); 2632 BB_HEAD (b) = NULL; 2633 BB_END (a) = BB_END (b); 2634 delete_insn (insn); 2635 } 2636 2637 /* Possible tablejumps and barriers should appear after the block. */ 2638 if (b->il.rtl->footer) 2639 { 2640 if (!a->il.rtl->footer) 2641 a->il.rtl->footer = b->il.rtl->footer; 2642 else 2643 { 2644 rtx last = a->il.rtl->footer; 2645 2646 while (NEXT_INSN (last)) 2647 last = NEXT_INSN (last); 2648 NEXT_INSN (last) = b->il.rtl->footer; 2649 PREV_INSN (b->il.rtl->footer) = last; 2650 } 2651 b->il.rtl->footer = NULL; 2652 } 2653 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end; 2654 2655 if (dump_file) 2656 fprintf (dump_file, "Merged blocks %d and %d.\n", 2657 a->index, b->index); 2658 } 2659 2660 /* Split edge E. */ 2661 2662 static basic_block 2663 cfg_layout_split_edge (edge e) 2664 { 2665 basic_block new_bb = 2666 create_basic_block (e->src != ENTRY_BLOCK_PTR 2667 ? NEXT_INSN (BB_END (e->src)) : get_insns (), 2668 NULL_RTX, e->src); 2669 2670 /* ??? This info is likely going to be out of date very soon, but we must 2671 create it to avoid getting an ICE later. */ 2672 if (e->dest->il.rtl->global_live_at_start) 2673 { 2674 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack); 2675 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack); 2676 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, 2677 e->dest->il.rtl->global_live_at_start); 2678 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, 2679 e->dest->il.rtl->global_live_at_start); 2680 } 2681 2682 make_edge (new_bb, e->dest, EDGE_FALLTHRU); 2683 redirect_edge_and_branch_force (e, new_bb); 2684 2685 return new_bb; 2686 } 2687 2688 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */ 2689 2690 static void 2691 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED) 2692 { 2693 } 2694 2695 /* Return 1 if BB ends with a call, possibly followed by some 2696 instructions that must stay with the call, 0 otherwise. */ 2697 2698 static bool 2699 rtl_block_ends_with_call_p (basic_block bb) 2700 { 2701 rtx insn = BB_END (bb); 2702 2703 while (!CALL_P (insn) 2704 && insn != BB_HEAD (bb) 2705 && keep_with_call_p (insn)) 2706 insn = PREV_INSN (insn); 2707 return (CALL_P (insn)); 2708 } 2709 2710 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */ 2711 2712 static bool 2713 rtl_block_ends_with_condjump_p (basic_block bb) 2714 { 2715 return any_condjump_p (BB_END (bb)); 2716 } 2717 2718 /* Return true if we need to add fake edge to exit. 2719 Helper function for rtl_flow_call_edges_add. */ 2720 2721 static bool 2722 need_fake_edge_p (rtx insn) 2723 { 2724 if (!INSN_P (insn)) 2725 return false; 2726 2727 if ((CALL_P (insn) 2728 && !SIBLING_CALL_P (insn) 2729 && !find_reg_note (insn, REG_NORETURN, NULL) 2730 && !CONST_OR_PURE_CALL_P (insn))) 2731 return true; 2732 2733 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS 2734 && MEM_VOLATILE_P (PATTERN (insn))) 2735 || (GET_CODE (PATTERN (insn)) == PARALLEL 2736 && asm_noperands (insn) != -1 2737 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0))) 2738 || GET_CODE (PATTERN (insn)) == ASM_INPUT); 2739 } 2740 2741 /* Add fake edges to the function exit for any non constant and non noreturn 2742 calls, volatile inline assembly in the bitmap of blocks specified by 2743 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks 2744 that were split. 2745 2746 The goal is to expose cases in which entering a basic block does not imply 2747 that all subsequent instructions must be executed. */ 2748 2749 static int 2750 rtl_flow_call_edges_add (sbitmap blocks) 2751 { 2752 int i; 2753 int blocks_split = 0; 2754 int last_bb = last_basic_block; 2755 bool check_last_block = false; 2756 2757 if (n_basic_blocks == NUM_FIXED_BLOCKS) 2758 return 0; 2759 2760 if (! blocks) 2761 check_last_block = true; 2762 else 2763 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index); 2764 2765 /* In the last basic block, before epilogue generation, there will be 2766 a fallthru edge to EXIT. Special care is required if the last insn 2767 of the last basic block is a call because make_edge folds duplicate 2768 edges, which would result in the fallthru edge also being marked 2769 fake, which would result in the fallthru edge being removed by 2770 remove_fake_edges, which would result in an invalid CFG. 2771 2772 Moreover, we can't elide the outgoing fake edge, since the block 2773 profiler needs to take this into account in order to solve the minimal 2774 spanning tree in the case that the call doesn't return. 2775 2776 Handle this by adding a dummy instruction in a new last basic block. */ 2777 if (check_last_block) 2778 { 2779 basic_block bb = EXIT_BLOCK_PTR->prev_bb; 2780 rtx insn = BB_END (bb); 2781 2782 /* Back up past insns that must be kept in the same block as a call. */ 2783 while (insn != BB_HEAD (bb) 2784 && keep_with_call_p (insn)) 2785 insn = PREV_INSN (insn); 2786 2787 if (need_fake_edge_p (insn)) 2788 { 2789 edge e; 2790 2791 e = find_edge (bb, EXIT_BLOCK_PTR); 2792 if (e) 2793 { 2794 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e); 2795 commit_edge_insertions (); 2796 } 2797 } 2798 } 2799 2800 /* Now add fake edges to the function exit for any non constant 2801 calls since there is no way that we can determine if they will 2802 return or not... */ 2803 2804 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++) 2805 { 2806 basic_block bb = BASIC_BLOCK (i); 2807 rtx insn; 2808 rtx prev_insn; 2809 2810 if (!bb) 2811 continue; 2812 2813 if (blocks && !TEST_BIT (blocks, i)) 2814 continue; 2815 2816 for (insn = BB_END (bb); ; insn = prev_insn) 2817 { 2818 prev_insn = PREV_INSN (insn); 2819 if (need_fake_edge_p (insn)) 2820 { 2821 edge e; 2822 rtx split_at_insn = insn; 2823 2824 /* Don't split the block between a call and an insn that should 2825 remain in the same block as the call. */ 2826 if (CALL_P (insn)) 2827 while (split_at_insn != BB_END (bb) 2828 && keep_with_call_p (NEXT_INSN (split_at_insn))) 2829 split_at_insn = NEXT_INSN (split_at_insn); 2830 2831 /* The handling above of the final block before the epilogue 2832 should be enough to verify that there is no edge to the exit 2833 block in CFG already. Calling make_edge in such case would 2834 cause us to mark that edge as fake and remove it later. */ 2835 2836 #ifdef ENABLE_CHECKING 2837 if (split_at_insn == BB_END (bb)) 2838 { 2839 e = find_edge (bb, EXIT_BLOCK_PTR); 2840 gcc_assert (e == NULL); 2841 } 2842 #endif 2843 2844 /* Note that the following may create a new basic block 2845 and renumber the existing basic blocks. */ 2846 if (split_at_insn != BB_END (bb)) 2847 { 2848 e = split_block (bb, split_at_insn); 2849 if (e) 2850 blocks_split++; 2851 } 2852 2853 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); 2854 } 2855 2856 if (insn == BB_HEAD (bb)) 2857 break; 2858 } 2859 } 2860 2861 if (blocks_split) 2862 verify_flow_info (); 2863 2864 return blocks_split; 2865 } 2866 2867 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is 2868 the conditional branch target, SECOND_HEAD should be the fall-thru 2869 there is no need to handle this here the loop versioning code handles 2870 this. the reason for SECON_HEAD is that it is needed for condition 2871 in trees, and this should be of the same type since it is a hook. */ 2872 static void 2873 rtl_lv_add_condition_to_bb (basic_block first_head , 2874 basic_block second_head ATTRIBUTE_UNUSED, 2875 basic_block cond_bb, void *comp_rtx) 2876 { 2877 rtx label, seq, jump; 2878 rtx op0 = XEXP ((rtx)comp_rtx, 0); 2879 rtx op1 = XEXP ((rtx)comp_rtx, 1); 2880 enum rtx_code comp = GET_CODE ((rtx)comp_rtx); 2881 enum machine_mode mode; 2882 2883 2884 label = block_label (first_head); 2885 mode = GET_MODE (op0); 2886 if (mode == VOIDmode) 2887 mode = GET_MODE (op1); 2888 2889 start_sequence (); 2890 op0 = force_operand (op0, NULL_RTX); 2891 op1 = force_operand (op1, NULL_RTX); 2892 do_compare_rtx_and_jump (op0, op1, comp, 0, 2893 mode, NULL_RTX, NULL_RTX, label); 2894 jump = get_last_insn (); 2895 JUMP_LABEL (jump) = label; 2896 LABEL_NUSES (label)++; 2897 seq = get_insns (); 2898 end_sequence (); 2899 2900 /* Add the new cond , in the new head. */ 2901 emit_insn_after(seq, BB_END(cond_bb)); 2902 } 2903 2904 2905 /* Given a block B with unconditional branch at its end, get the 2906 store the return the branch edge and the fall-thru edge in 2907 BRANCH_EDGE and FALLTHRU_EDGE respectively. */ 2908 static void 2909 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge, 2910 edge *fallthru_edge) 2911 { 2912 edge e = EDGE_SUCC (b, 0); 2913 2914 if (e->flags & EDGE_FALLTHRU) 2915 { 2916 *fallthru_edge = e; 2917 *branch_edge = EDGE_SUCC (b, 1); 2918 } 2919 else 2920 { 2921 *branch_edge = e; 2922 *fallthru_edge = EDGE_SUCC (b, 1); 2923 } 2924 } 2925 2926 void 2927 init_rtl_bb_info (basic_block bb) 2928 { 2929 gcc_assert (!bb->il.rtl); 2930 bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info)); 2931 } 2932 2933 2934 /* Add EXPR to the end of basic block BB. */ 2935 2936 rtx 2937 insert_insn_end_bb_new (rtx pat, basic_block bb) 2938 { 2939 rtx insn = BB_END (bb); 2940 rtx new_insn; 2941 rtx pat_end = pat; 2942 2943 while (NEXT_INSN (pat_end) != NULL_RTX) 2944 pat_end = NEXT_INSN (pat_end); 2945 2946 /* If the last insn is a jump, insert EXPR in front [taking care to 2947 handle cc0, etc. properly]. Similarly we need to care trapping 2948 instructions in presence of non-call exceptions. */ 2949 2950 if (JUMP_P (insn) 2951 || (NONJUMP_INSN_P (insn) 2952 && (!single_succ_p (bb) 2953 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))) 2954 { 2955 #ifdef HAVE_cc0 2956 rtx note; 2957 #endif 2958 /* If this is a jump table, then we can't insert stuff here. Since 2959 we know the previous real insn must be the tablejump, we insert 2960 the new instruction just before the tablejump. */ 2961 if (GET_CODE (PATTERN (insn)) == ADDR_VEC 2962 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) 2963 insn = prev_real_insn (insn); 2964 2965 #ifdef HAVE_cc0 2966 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts 2967 if cc0 isn't set. */ 2968 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); 2969 if (note) 2970 insn = XEXP (note, 0); 2971 else 2972 { 2973 rtx maybe_cc0_setter = prev_nonnote_insn (insn); 2974 if (maybe_cc0_setter 2975 && INSN_P (maybe_cc0_setter) 2976 && sets_cc0_p (PATTERN (maybe_cc0_setter))) 2977 insn = maybe_cc0_setter; 2978 } 2979 #endif 2980 /* FIXME: What if something in cc0/jump uses value set in new 2981 insn? */ 2982 new_insn = emit_insn_before_noloc (pat, insn); 2983 } 2984 2985 /* Likewise if the last insn is a call, as will happen in the presence 2986 of exception handling. */ 2987 else if (CALL_P (insn) 2988 && (!single_succ_p (bb) 2989 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)) 2990 { 2991 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, 2992 we search backward and place the instructions before the first 2993 parameter is loaded. Do this for everyone for consistency and a 2994 presumption that we'll get better code elsewhere as well. */ 2995 2996 /* Since different machines initialize their parameter registers 2997 in different orders, assume nothing. Collect the set of all 2998 parameter registers. */ 2999 insn = find_first_parameter_load (insn, BB_HEAD (bb)); 3000 3001 /* If we found all the parameter loads, then we want to insert 3002 before the first parameter load. 3003 3004 If we did not find all the parameter loads, then we might have 3005 stopped on the head of the block, which could be a CODE_LABEL. 3006 If we inserted before the CODE_LABEL, then we would be putting 3007 the insn in the wrong basic block. In that case, put the insn 3008 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */ 3009 while (LABEL_P (insn) 3010 || NOTE_INSN_BASIC_BLOCK_P (insn)) 3011 insn = NEXT_INSN (insn); 3012 3013 new_insn = emit_insn_before_noloc (pat, insn); 3014 } 3015 else 3016 new_insn = emit_insn_after_noloc (pat, insn); 3017 3018 return new_insn; 3019 } 3020 3021 /* Implementation of CFG manipulation for linearized RTL. */ 3022 struct cfg_hooks rtl_cfg_hooks = { 3023 "rtl", 3024 rtl_verify_flow_info, 3025 rtl_dump_bb, 3026 rtl_create_basic_block, 3027 rtl_redirect_edge_and_branch, 3028 rtl_redirect_edge_and_branch_force, 3029 rtl_delete_block, 3030 rtl_split_block, 3031 rtl_move_block_after, 3032 rtl_can_merge_blocks, /* can_merge_blocks_p */ 3033 rtl_merge_blocks, 3034 rtl_predict_edge, 3035 rtl_predicted_by_p, 3036 NULL, /* can_duplicate_block_p */ 3037 NULL, /* duplicate_block */ 3038 rtl_split_edge, 3039 rtl_make_forwarder_block, 3040 rtl_tidy_fallthru_edge, 3041 rtl_block_ends_with_call_p, 3042 rtl_block_ends_with_condjump_p, 3043 rtl_flow_call_edges_add, 3044 NULL, /* execute_on_growing_pred */ 3045 NULL, /* execute_on_shrinking_pred */ 3046 NULL, /* duplicate loop for trees */ 3047 NULL, /* lv_add_condition_to_bb */ 3048 NULL, /* lv_adjust_loop_header_phi*/ 3049 NULL, /* extract_cond_bb_edges */ 3050 NULL /* flush_pending_stmts */ 3051 }; 3052 3053 /* Implementation of CFG manipulation for cfg layout RTL, where 3054 basic block connected via fallthru edges does not have to be adjacent. 3055 This representation will hopefully become the default one in future 3056 version of the compiler. */ 3057 3058 /* We do not want to declare these functions in a header file, since they 3059 should only be used through the cfghooks interface, and we do not want to 3060 move them here since it would require also moving quite a lot of related 3061 code. */ 3062 extern bool cfg_layout_can_duplicate_bb_p (basic_block); 3063 extern basic_block cfg_layout_duplicate_bb (basic_block); 3064 3065 struct cfg_hooks cfg_layout_rtl_cfg_hooks = { 3066 "cfglayout mode", 3067 rtl_verify_flow_info_1, 3068 rtl_dump_bb, 3069 cfg_layout_create_basic_block, 3070 cfg_layout_redirect_edge_and_branch, 3071 cfg_layout_redirect_edge_and_branch_force, 3072 cfg_layout_delete_block, 3073 cfg_layout_split_block, 3074 rtl_move_block_after, 3075 cfg_layout_can_merge_blocks_p, 3076 cfg_layout_merge_blocks, 3077 rtl_predict_edge, 3078 rtl_predicted_by_p, 3079 cfg_layout_can_duplicate_bb_p, 3080 cfg_layout_duplicate_bb, 3081 cfg_layout_split_edge, 3082 rtl_make_forwarder_block, 3083 NULL, 3084 rtl_block_ends_with_call_p, 3085 rtl_block_ends_with_condjump_p, 3086 rtl_flow_call_edges_add, 3087 NULL, /* execute_on_growing_pred */ 3088 NULL, /* execute_on_shrinking_pred */ 3089 duplicate_loop_to_header_edge, /* duplicate loop for trees */ 3090 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */ 3091 NULL, /* lv_adjust_loop_header_phi*/ 3092 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */ 3093 NULL /* flush_pending_stmts */ 3094 }; 3095