1 /* Basic block reordering routines for the GNU compiler. 2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 3 2011 Free Software Foundation, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "rtl.h" 27 #include "hard-reg-set.h" 28 #include "obstack.h" 29 #include "basic-block.h" 30 #include "insn-config.h" 31 #include "output.h" 32 #include "function.h" 33 #include "cfglayout.h" 34 #include "cfgloop.h" 35 #include "target.h" 36 #include "common/common-target.h" 37 #include "ggc.h" 38 #include "alloc-pool.h" 39 #include "flags.h" 40 #include "tree-pass.h" 41 #include "df.h" 42 #include "vecprim.h" 43 #include "emit-rtl.h" 44 45 /* Holds the interesting trailing notes for the function. */ 46 rtx cfg_layout_function_footer; 47 rtx cfg_layout_function_header; 48 49 static rtx skip_insns_after_block (basic_block); 50 static void record_effective_endpoints (void); 51 static rtx label_for_bb (basic_block); 52 static void fixup_reorder_chain (void); 53 54 static void change_scope (rtx, tree, tree); 55 56 void verify_insn_chain (void); 57 static void fixup_fallthru_exit_predecessor (void); 58 59 rtx 60 unlink_insn_chain (rtx first, rtx last) 61 { 62 rtx prevfirst = PREV_INSN (first); 63 rtx nextlast = NEXT_INSN (last); 64 65 PREV_INSN (first) = NULL; 66 NEXT_INSN (last) = NULL; 67 if (prevfirst) 68 NEXT_INSN (prevfirst) = nextlast; 69 if (nextlast) 70 PREV_INSN (nextlast) = prevfirst; 71 else 72 set_last_insn (prevfirst); 73 if (!prevfirst) 74 set_first_insn (nextlast); 75 return first; 76 } 77 78 /* Skip over inter-block insns occurring after BB which are typically 79 associated with BB (e.g., barriers). If there are any such insns, 80 we return the last one. Otherwise, we return the end of BB. */ 81 82 static rtx 83 skip_insns_after_block (basic_block bb) 84 { 85 rtx insn, last_insn, next_head, prev; 86 87 next_head = NULL_RTX; 88 if (bb->next_bb != EXIT_BLOCK_PTR) 89 next_head = BB_HEAD (bb->next_bb); 90 91 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; ) 92 { 93 if (insn == next_head) 94 break; 95 96 switch (GET_CODE (insn)) 97 { 98 case BARRIER: 99 last_insn = insn; 100 continue; 101 102 case NOTE: 103 switch (NOTE_KIND (insn)) 104 { 105 case NOTE_INSN_BLOCK_END: 106 gcc_unreachable (); 107 continue; 108 default: 109 continue; 110 break; 111 } 112 break; 113 114 case CODE_LABEL: 115 if (NEXT_INSN (insn) 116 && JUMP_TABLE_DATA_P (NEXT_INSN (insn))) 117 { 118 insn = NEXT_INSN (insn); 119 last_insn = insn; 120 continue; 121 } 122 break; 123 124 default: 125 break; 126 } 127 128 break; 129 } 130 131 /* It is possible to hit contradictory sequence. For instance: 132 133 jump_insn 134 NOTE_INSN_BLOCK_BEG 135 barrier 136 137 Where barrier belongs to jump_insn, but the note does not. This can be 138 created by removing the basic block originally following 139 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */ 140 141 for (insn = last_insn; insn != BB_END (bb); insn = prev) 142 { 143 prev = PREV_INSN (insn); 144 if (NOTE_P (insn)) 145 switch (NOTE_KIND (insn)) 146 { 147 case NOTE_INSN_BLOCK_END: 148 gcc_unreachable (); 149 break; 150 case NOTE_INSN_DELETED: 151 case NOTE_INSN_DELETED_LABEL: 152 case NOTE_INSN_DELETED_DEBUG_LABEL: 153 continue; 154 default: 155 reorder_insns (insn, insn, last_insn); 156 } 157 } 158 159 return last_insn; 160 } 161 162 /* Locate or create a label for a given basic block. */ 163 164 static rtx 165 label_for_bb (basic_block bb) 166 { 167 rtx label = BB_HEAD (bb); 168 169 if (!LABEL_P (label)) 170 { 171 if (dump_file) 172 fprintf (dump_file, "Emitting label for block %d\n", bb->index); 173 174 label = block_label (bb); 175 } 176 177 return label; 178 } 179 180 /* Locate the effective beginning and end of the insn chain for each 181 block, as defined by skip_insns_after_block above. */ 182 183 static void 184 record_effective_endpoints (void) 185 { 186 rtx next_insn; 187 basic_block bb; 188 rtx insn; 189 190 for (insn = get_insns (); 191 insn 192 && NOTE_P (insn) 193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK; 194 insn = NEXT_INSN (insn)) 195 continue; 196 /* No basic blocks at all? */ 197 gcc_assert (insn); 198 199 if (PREV_INSN (insn)) 200 cfg_layout_function_header = 201 unlink_insn_chain (get_insns (), PREV_INSN (insn)); 202 else 203 cfg_layout_function_header = NULL_RTX; 204 205 next_insn = get_insns (); 206 FOR_EACH_BB (bb) 207 { 208 rtx end; 209 210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb)) 211 bb->il.rtl->header = unlink_insn_chain (next_insn, 212 PREV_INSN (BB_HEAD (bb))); 213 end = skip_insns_after_block (bb); 214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end) 215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end); 216 next_insn = NEXT_INSN (BB_END (bb)); 217 } 218 219 cfg_layout_function_footer = next_insn; 220 if (cfg_layout_function_footer) 221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ()); 222 } 223 224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line 225 numbers and files. In order to be GGC friendly we need to use separate 226 varrays. This also slightly improve the memory locality in binary search. 227 The _locs array contains locators where the given property change. The 228 block_locators_blocks contains the scope block that is used for all insn 229 locator greater than corresponding block_locators_locs value and smaller 230 than the following one. Similarly for the other properties. */ 231 static VEC(int,heap) *block_locators_locs; 232 static GTY(()) VEC(tree,gc) *block_locators_blocks; 233 static VEC(int,heap) *locations_locators_locs; 234 DEF_VEC_O(location_t); 235 DEF_VEC_ALLOC_O(location_t,heap); 236 static VEC(location_t,heap) *locations_locators_vals; 237 int prologue_locator; 238 int epilogue_locator; 239 240 /* Hold current location information and last location information, so the 241 datastructures are built lazily only when some instructions in given 242 place are needed. */ 243 static location_t curr_location, last_location; 244 static tree curr_block, last_block; 245 static int curr_rtl_loc = -1; 246 247 /* Allocate insn locator datastructure. */ 248 void 249 insn_locators_alloc (void) 250 { 251 prologue_locator = epilogue_locator = 0; 252 253 block_locators_locs = VEC_alloc (int, heap, 32); 254 block_locators_blocks = VEC_alloc (tree, gc, 32); 255 locations_locators_locs = VEC_alloc (int, heap, 32); 256 locations_locators_vals = VEC_alloc (location_t, heap, 32); 257 258 curr_location = UNKNOWN_LOCATION; 259 last_location = UNKNOWN_LOCATION; 260 curr_block = NULL; 261 last_block = NULL; 262 curr_rtl_loc = 0; 263 } 264 265 /* At the end of emit stage, clear current location. */ 266 void 267 insn_locators_finalize (void) 268 { 269 if (curr_rtl_loc >= 0) 270 epilogue_locator = curr_insn_locator (); 271 curr_rtl_loc = -1; 272 } 273 274 /* Allocate insn locator datastructure. */ 275 void 276 insn_locators_free (void) 277 { 278 prologue_locator = epilogue_locator = 0; 279 280 VEC_free (int, heap, block_locators_locs); 281 VEC_free (tree,gc, block_locators_blocks); 282 VEC_free (int, heap, locations_locators_locs); 283 VEC_free (location_t, heap, locations_locators_vals); 284 } 285 286 287 /* Set current location. */ 288 void 289 set_curr_insn_source_location (location_t location) 290 { 291 /* IV opts calls into RTL expansion to compute costs of operations. At this 292 time locators are not initialized. */ 293 if (curr_rtl_loc == -1) 294 return; 295 curr_location = location; 296 } 297 298 /* Get current location. */ 299 location_t 300 get_curr_insn_source_location (void) 301 { 302 return curr_location; 303 } 304 305 /* Set current scope block. */ 306 void 307 set_curr_insn_block (tree b) 308 { 309 /* IV opts calls into RTL expansion to compute costs of operations. At this 310 time locators are not initialized. */ 311 if (curr_rtl_loc == -1) 312 return; 313 if (b) 314 curr_block = b; 315 } 316 317 /* Get current scope block. */ 318 tree 319 get_curr_insn_block (void) 320 { 321 return curr_block; 322 } 323 324 /* Return current insn locator. */ 325 int 326 curr_insn_locator (void) 327 { 328 if (curr_rtl_loc == -1 || curr_location == UNKNOWN_LOCATION) 329 return 0; 330 if (last_block != curr_block) 331 { 332 curr_rtl_loc++; 333 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc); 334 VEC_safe_push (tree, gc, block_locators_blocks, curr_block); 335 last_block = curr_block; 336 } 337 if (last_location != curr_location) 338 { 339 curr_rtl_loc++; 340 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc); 341 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location); 342 last_location = curr_location; 343 } 344 return curr_rtl_loc; 345 } 346 347 static unsigned int 348 into_cfg_layout_mode (void) 349 { 350 cfg_layout_initialize (0); 351 return 0; 352 } 353 354 static unsigned int 355 outof_cfg_layout_mode (void) 356 { 357 basic_block bb; 358 359 FOR_EACH_BB (bb) 360 if (bb->next_bb != EXIT_BLOCK_PTR) 361 bb->aux = bb->next_bb; 362 363 cfg_layout_finalize (); 364 365 return 0; 366 } 367 368 struct rtl_opt_pass pass_into_cfg_layout_mode = 369 { 370 { 371 RTL_PASS, 372 "into_cfglayout", /* name */ 373 NULL, /* gate */ 374 into_cfg_layout_mode, /* execute */ 375 NULL, /* sub */ 376 NULL, /* next */ 377 0, /* static_pass_number */ 378 TV_CFG, /* tv_id */ 379 0, /* properties_required */ 380 PROP_cfglayout, /* properties_provided */ 381 0, /* properties_destroyed */ 382 0, /* todo_flags_start */ 383 0 /* todo_flags_finish */ 384 } 385 }; 386 387 struct rtl_opt_pass pass_outof_cfg_layout_mode = 388 { 389 { 390 RTL_PASS, 391 "outof_cfglayout", /* name */ 392 NULL, /* gate */ 393 outof_cfg_layout_mode, /* execute */ 394 NULL, /* sub */ 395 NULL, /* next */ 396 0, /* static_pass_number */ 397 TV_CFG, /* tv_id */ 398 0, /* properties_required */ 399 0, /* properties_provided */ 400 PROP_cfglayout, /* properties_destroyed */ 401 0, /* todo_flags_start */ 402 0 /* todo_flags_finish */ 403 } 404 }; 405 406 /* Return scope resulting from combination of S1 and S2. */ 407 static tree 408 choose_inner_scope (tree s1, tree s2) 409 { 410 if (!s1) 411 return s2; 412 if (!s2) 413 return s1; 414 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2)) 415 return s1; 416 return s2; 417 } 418 419 /* Emit lexical block notes needed to change scope from S1 to S2. */ 420 421 static void 422 change_scope (rtx orig_insn, tree s1, tree s2) 423 { 424 rtx insn = orig_insn; 425 tree com = NULL_TREE; 426 tree ts1 = s1, ts2 = s2; 427 tree s; 428 429 while (ts1 != ts2) 430 { 431 gcc_assert (ts1 && ts2); 432 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2)) 433 ts1 = BLOCK_SUPERCONTEXT (ts1); 434 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2)) 435 ts2 = BLOCK_SUPERCONTEXT (ts2); 436 else 437 { 438 ts1 = BLOCK_SUPERCONTEXT (ts1); 439 ts2 = BLOCK_SUPERCONTEXT (ts2); 440 } 441 } 442 com = ts1; 443 444 /* Close scopes. */ 445 s = s1; 446 while (s != com) 447 { 448 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn); 449 NOTE_BLOCK (note) = s; 450 s = BLOCK_SUPERCONTEXT (s); 451 } 452 453 /* Open scopes. */ 454 s = s2; 455 while (s != com) 456 { 457 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn); 458 NOTE_BLOCK (insn) = s; 459 s = BLOCK_SUPERCONTEXT (s); 460 } 461 } 462 463 /* Return lexical scope block locator belongs to. */ 464 static tree 465 locator_scope (int loc) 466 { 467 int max = VEC_length (int, block_locators_locs); 468 int min = 0; 469 470 /* When block_locators_locs was initialized, the pro- and epilogue 471 insns didn't exist yet and can therefore not be found this way. 472 But we know that they belong to the outer most block of the 473 current function. 474 Without this test, the prologue would be put inside the block of 475 the first valid instruction in the function and when that first 476 insn is part of an inlined function then the low_pc of that 477 inlined function is messed up. Likewise for the epilogue and 478 the last valid instruction. */ 479 if (loc == prologue_locator || loc == epilogue_locator) 480 return DECL_INITIAL (cfun->decl); 481 482 if (!max || !loc) 483 return NULL; 484 while (1) 485 { 486 int pos = (min + max) / 2; 487 int tmp = VEC_index (int, block_locators_locs, pos); 488 489 if (tmp <= loc && min != pos) 490 min = pos; 491 else if (tmp > loc && max != pos) 492 max = pos; 493 else 494 { 495 min = pos; 496 break; 497 } 498 } 499 return VEC_index (tree, block_locators_blocks, min); 500 } 501 502 /* Return lexical scope block insn belongs to. */ 503 tree 504 insn_scope (const_rtx insn) 505 { 506 return locator_scope (INSN_LOCATOR (insn)); 507 } 508 509 /* Return line number of the statement specified by the locator. */ 510 location_t 511 locator_location (int loc) 512 { 513 int max = VEC_length (int, locations_locators_locs); 514 int min = 0; 515 516 while (1) 517 { 518 int pos = (min + max) / 2; 519 int tmp = VEC_index (int, locations_locators_locs, pos); 520 521 if (tmp <= loc && min != pos) 522 min = pos; 523 else if (tmp > loc && max != pos) 524 max = pos; 525 else 526 { 527 min = pos; 528 break; 529 } 530 } 531 return *VEC_index (location_t, locations_locators_vals, min); 532 } 533 534 /* Return source line of the statement that produced this insn. */ 535 int 536 locator_line (int loc) 537 { 538 expanded_location xloc; 539 if (!loc) 540 return 0; 541 else 542 xloc = expand_location (locator_location (loc)); 543 return xloc.line; 544 } 545 546 /* Return line number of the statement that produced this insn. */ 547 int 548 insn_line (const_rtx insn) 549 { 550 return locator_line (INSN_LOCATOR (insn)); 551 } 552 553 /* Return source file of the statement specified by LOC. */ 554 const char * 555 locator_file (int loc) 556 { 557 expanded_location xloc; 558 if (!loc) 559 return 0; 560 else 561 xloc = expand_location (locator_location (loc)); 562 return xloc.file; 563 } 564 565 /* Return source file of the statement that produced this insn. */ 566 const char * 567 insn_file (const_rtx insn) 568 { 569 return locator_file (INSN_LOCATOR (insn)); 570 } 571 572 /* Return true if LOC1 and LOC2 locators have the same location and scope. */ 573 bool 574 locator_eq (int loc1, int loc2) 575 { 576 if (loc1 == loc2) 577 return true; 578 if (locator_location (loc1) != locator_location (loc2)) 579 return false; 580 return locator_scope (loc1) == locator_scope (loc2); 581 } 582 583 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based 584 on the scope tree and the newly reordered instructions. */ 585 586 void 587 reemit_insn_block_notes (void) 588 { 589 tree cur_block = DECL_INITIAL (cfun->decl); 590 rtx insn, note; 591 592 insn = get_insns (); 593 if (!active_insn_p (insn)) 594 insn = next_active_insn (insn); 595 for (; insn; insn = next_active_insn (insn)) 596 { 597 tree this_block; 598 599 /* Avoid putting scope notes between jump table and its label. */ 600 if (JUMP_TABLE_DATA_P (insn)) 601 continue; 602 603 this_block = insn_scope (insn); 604 /* For sequences compute scope resulting from merging all scopes 605 of instructions nested inside. */ 606 if (GET_CODE (PATTERN (insn)) == SEQUENCE) 607 { 608 int i; 609 rtx body = PATTERN (insn); 610 611 this_block = NULL; 612 for (i = 0; i < XVECLEN (body, 0); i++) 613 this_block = choose_inner_scope (this_block, 614 insn_scope (XVECEXP (body, 0, i))); 615 } 616 if (! this_block) 617 continue; 618 619 if (this_block != cur_block) 620 { 621 change_scope (insn, cur_block, this_block); 622 cur_block = this_block; 623 } 624 } 625 626 /* change_scope emits before the insn, not after. */ 627 note = emit_note (NOTE_INSN_DELETED); 628 change_scope (note, cur_block, DECL_INITIAL (cfun->decl)); 629 delete_insn (note); 630 631 reorder_blocks (); 632 } 633 634 635 /* Link the basic blocks in the correct order, compacting the basic 636 block queue while at it. This also clears the visited flag on 637 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function 638 also clears the basic block header and footer fields. 639 640 This function is usually called after a pass (e.g. tracer) finishes 641 some transformations while in cfglayout mode. The required sequence 642 of the basic blocks is in a linked list along the bb->aux field. 643 This functions re-links the basic block prev_bb and next_bb pointers 644 accordingly, and it compacts and renumbers the blocks. */ 645 646 void 647 relink_block_chain (bool stay_in_cfglayout_mode) 648 { 649 basic_block bb, prev_bb; 650 int index; 651 652 /* Maybe dump the re-ordered sequence. */ 653 if (dump_file) 654 { 655 fprintf (dump_file, "Reordered sequence:\n"); 656 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS; 657 bb; 658 bb = (basic_block) bb->aux, index++) 659 { 660 fprintf (dump_file, " %i ", index); 661 if (get_bb_original (bb)) 662 fprintf (dump_file, "duplicate of %i ", 663 get_bb_original (bb)->index); 664 else if (forwarder_block_p (bb) 665 && !LABEL_P (BB_HEAD (bb))) 666 fprintf (dump_file, "compensation "); 667 else 668 fprintf (dump_file, "bb %i ", bb->index); 669 fprintf (dump_file, " [%i]\n", bb->frequency); 670 } 671 } 672 673 /* Now reorder the blocks. */ 674 prev_bb = ENTRY_BLOCK_PTR; 675 bb = ENTRY_BLOCK_PTR->next_bb; 676 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux) 677 { 678 bb->prev_bb = prev_bb; 679 prev_bb->next_bb = bb; 680 } 681 prev_bb->next_bb = EXIT_BLOCK_PTR; 682 EXIT_BLOCK_PTR->prev_bb = prev_bb; 683 684 /* Then, clean up the aux and visited fields. */ 685 FOR_ALL_BB (bb) 686 { 687 bb->aux = NULL; 688 bb->il.rtl->visited = 0; 689 if (!stay_in_cfglayout_mode) 690 bb->il.rtl->header = bb->il.rtl->footer = NULL; 691 } 692 693 /* Maybe reset the original copy tables, they are not valid anymore 694 when we renumber the basic blocks in compact_blocks. If we are 695 are going out of cfglayout mode, don't re-allocate the tables. */ 696 free_original_copy_tables (); 697 if (stay_in_cfglayout_mode) 698 initialize_original_copy_tables (); 699 700 /* Finally, put basic_block_info in the new order. */ 701 compact_blocks (); 702 } 703 704 705 /* Given a reorder chain, rearrange the code to match. */ 706 707 static void 708 fixup_reorder_chain (void) 709 { 710 basic_block bb; 711 rtx insn = NULL; 712 713 if (cfg_layout_function_header) 714 { 715 set_first_insn (cfg_layout_function_header); 716 insn = cfg_layout_function_header; 717 while (NEXT_INSN (insn)) 718 insn = NEXT_INSN (insn); 719 } 720 721 /* First do the bulk reordering -- rechain the blocks without regard to 722 the needed changes to jumps and labels. */ 723 724 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux) 725 { 726 if (bb->il.rtl->header) 727 { 728 if (insn) 729 NEXT_INSN (insn) = bb->il.rtl->header; 730 else 731 set_first_insn (bb->il.rtl->header); 732 PREV_INSN (bb->il.rtl->header) = insn; 733 insn = bb->il.rtl->header; 734 while (NEXT_INSN (insn)) 735 insn = NEXT_INSN (insn); 736 } 737 if (insn) 738 NEXT_INSN (insn) = BB_HEAD (bb); 739 else 740 set_first_insn (BB_HEAD (bb)); 741 PREV_INSN (BB_HEAD (bb)) = insn; 742 insn = BB_END (bb); 743 if (bb->il.rtl->footer) 744 { 745 NEXT_INSN (insn) = bb->il.rtl->footer; 746 PREV_INSN (bb->il.rtl->footer) = insn; 747 while (NEXT_INSN (insn)) 748 insn = NEXT_INSN (insn); 749 } 750 } 751 752 NEXT_INSN (insn) = cfg_layout_function_footer; 753 if (cfg_layout_function_footer) 754 PREV_INSN (cfg_layout_function_footer) = insn; 755 756 while (NEXT_INSN (insn)) 757 insn = NEXT_INSN (insn); 758 759 set_last_insn (insn); 760 #ifdef ENABLE_CHECKING 761 verify_insn_chain (); 762 #endif 763 764 /* Now add jumps and labels as needed to match the blocks new 765 outgoing edges. */ 766 767 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux) 768 { 769 edge e_fall, e_taken, e; 770 rtx bb_end_insn; 771 rtx ret_label = NULL_RTX; 772 basic_block nb, src_bb; 773 edge_iterator ei; 774 775 if (EDGE_COUNT (bb->succs) == 0) 776 continue; 777 778 /* Find the old fallthru edge, and another non-EH edge for 779 a taken jump. */ 780 e_taken = e_fall = NULL; 781 782 FOR_EACH_EDGE (e, ei, bb->succs) 783 if (e->flags & EDGE_FALLTHRU) 784 e_fall = e; 785 else if (! (e->flags & EDGE_EH)) 786 e_taken = e; 787 788 bb_end_insn = BB_END (bb); 789 if (JUMP_P (bb_end_insn)) 790 { 791 ret_label = JUMP_LABEL (bb_end_insn); 792 if (any_condjump_p (bb_end_insn)) 793 { 794 /* This might happen if the conditional jump has side 795 effects and could therefore not be optimized away. 796 Make the basic block to end with a barrier in order 797 to prevent rtl_verify_flow_info from complaining. */ 798 if (!e_fall) 799 { 800 gcc_assert (!onlyjump_p (bb_end_insn) 801 || returnjump_p (bb_end_insn)); 802 bb->il.rtl->footer = emit_barrier_after (bb_end_insn); 803 continue; 804 } 805 806 /* If the old fallthru is still next, nothing to do. */ 807 if (bb->aux == e_fall->dest 808 || e_fall->dest == EXIT_BLOCK_PTR) 809 continue; 810 811 /* The degenerated case of conditional jump jumping to the next 812 instruction can happen for jumps with side effects. We need 813 to construct a forwarder block and this will be done just 814 fine by force_nonfallthru below. */ 815 if (!e_taken) 816 ; 817 818 /* There is another special case: if *neither* block is next, 819 such as happens at the very end of a function, then we'll 820 need to add a new unconditional jump. Choose the taken 821 edge based on known or assumed probability. */ 822 else if (bb->aux != e_taken->dest) 823 { 824 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0); 825 826 if (note 827 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2 828 && invert_jump (bb_end_insn, 829 (e_fall->dest == EXIT_BLOCK_PTR 830 ? NULL_RTX 831 : label_for_bb (e_fall->dest)), 0)) 832 { 833 e_fall->flags &= ~EDGE_FALLTHRU; 834 gcc_checking_assert (could_fall_through 835 (e_taken->src, e_taken->dest)); 836 e_taken->flags |= EDGE_FALLTHRU; 837 update_br_prob_note (bb); 838 e = e_fall, e_fall = e_taken, e_taken = e; 839 } 840 } 841 842 /* If the "jumping" edge is a crossing edge, and the fall 843 through edge is non-crossing, leave things as they are. */ 844 else if ((e_taken->flags & EDGE_CROSSING) 845 && !(e_fall->flags & EDGE_CROSSING)) 846 continue; 847 848 /* Otherwise we can try to invert the jump. This will 849 basically never fail, however, keep up the pretense. */ 850 else if (invert_jump (bb_end_insn, 851 (e_fall->dest == EXIT_BLOCK_PTR 852 ? NULL_RTX 853 : label_for_bb (e_fall->dest)), 0)) 854 { 855 e_fall->flags &= ~EDGE_FALLTHRU; 856 gcc_checking_assert (could_fall_through 857 (e_taken->src, e_taken->dest)); 858 e_taken->flags |= EDGE_FALLTHRU; 859 update_br_prob_note (bb); 860 continue; 861 } 862 } 863 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL) 864 { 865 /* If the old fallthru is still next or if 866 asm goto doesn't have a fallthru (e.g. when followed by 867 __builtin_unreachable ()), nothing to do. */ 868 if (! e_fall 869 || bb->aux == e_fall->dest 870 || e_fall->dest == EXIT_BLOCK_PTR) 871 continue; 872 873 /* Otherwise we'll have to use the fallthru fixup below. */ 874 } 875 else 876 { 877 /* Otherwise we have some return, switch or computed 878 jump. In the 99% case, there should not have been a 879 fallthru edge. */ 880 gcc_assert (returnjump_p (bb_end_insn) || !e_fall); 881 continue; 882 } 883 } 884 else 885 { 886 /* No fallthru implies a noreturn function with EH edges, or 887 something similarly bizarre. In any case, we don't need to 888 do anything. */ 889 if (! e_fall) 890 continue; 891 892 /* If the fallthru block is still next, nothing to do. */ 893 if (bb->aux == e_fall->dest) 894 continue; 895 896 /* A fallthru to exit block. */ 897 if (e_fall->dest == EXIT_BLOCK_PTR) 898 continue; 899 } 900 901 /* We got here if we need to add a new jump insn. 902 Note force_nonfallthru can delete E_FALL and thus we have to 903 save E_FALL->src prior to the call to force_nonfallthru. */ 904 src_bb = e_fall->src; 905 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label); 906 if (nb) 907 { 908 nb->il.rtl->visited = 1; 909 nb->aux = bb->aux; 910 bb->aux = nb; 911 /* Don't process this new block. */ 912 bb = nb; 913 914 /* Make sure new bb is tagged for correct section (same as 915 fall-thru source, since you cannot fall-thru across 916 section boundaries). */ 917 BB_COPY_PARTITION (src_bb, single_pred (bb)); 918 if (flag_reorder_blocks_and_partition 919 && targetm_common.have_named_sections 920 && JUMP_P (BB_END (bb)) 921 && !any_condjump_p (BB_END (bb)) 922 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING)) 923 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX); 924 } 925 } 926 927 relink_block_chain (/*stay_in_cfglayout_mode=*/false); 928 929 /* Annoying special case - jump around dead jumptables left in the code. */ 930 FOR_EACH_BB (bb) 931 { 932 edge e = find_fallthru_edge (bb->succs); 933 934 if (e && !can_fallthru (e->src, e->dest)) 935 force_nonfallthru (e); 936 } 937 938 /* Ensure goto_locus from edges has some instructions with that locus 939 in RTL. */ 940 if (!optimize) 941 FOR_EACH_BB (bb) 942 { 943 edge e; 944 edge_iterator ei; 945 946 FOR_EACH_EDGE (e, ei, bb->succs) 947 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL)) 948 { 949 edge e2; 950 edge_iterator ei2; 951 basic_block dest, nb; 952 rtx end; 953 954 insn = BB_END (e->src); 955 end = PREV_INSN (BB_HEAD (e->src)); 956 while (insn != end 957 && (!NONDEBUG_INSN_P (insn) || INSN_LOCATOR (insn) == 0)) 958 insn = PREV_INSN (insn); 959 if (insn != end 960 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus)) 961 continue; 962 if (simplejump_p (BB_END (e->src)) 963 && INSN_LOCATOR (BB_END (e->src)) == 0) 964 { 965 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus; 966 continue; 967 } 968 dest = e->dest; 969 if (dest == EXIT_BLOCK_PTR) 970 { 971 /* Non-fallthru edges to the exit block cannot be split. */ 972 if (!(e->flags & EDGE_FALLTHRU)) 973 continue; 974 } 975 else 976 { 977 insn = BB_HEAD (dest); 978 end = NEXT_INSN (BB_END (dest)); 979 while (insn != end && !NONDEBUG_INSN_P (insn)) 980 insn = NEXT_INSN (insn); 981 if (insn != end && INSN_LOCATOR (insn) 982 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus)) 983 continue; 984 } 985 nb = split_edge (e); 986 if (!INSN_P (BB_END (nb))) 987 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb), 988 nb); 989 INSN_LOCATOR (BB_END (nb)) = e->goto_locus; 990 991 /* If there are other incoming edges to the destination block 992 with the same goto locus, redirect them to the new block as 993 well, this can prevent other such blocks from being created 994 in subsequent iterations of the loop. */ 995 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); ) 996 if (e2->goto_locus 997 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU)) 998 && locator_eq (e->goto_locus, e2->goto_locus)) 999 redirect_edge_and_branch (e2, nb); 1000 else 1001 ei_next (&ei2); 1002 } 1003 } 1004 } 1005 1006 /* Perform sanity checks on the insn chain. 1007 1. Check that next/prev pointers are consistent in both the forward and 1008 reverse direction. 1009 2. Count insns in chain, going both directions, and check if equal. 1010 3. Check that get_last_insn () returns the actual end of chain. */ 1011 1012 DEBUG_FUNCTION void 1013 verify_insn_chain (void) 1014 { 1015 rtx x, prevx, nextx; 1016 int insn_cnt1, insn_cnt2; 1017 1018 for (prevx = NULL, insn_cnt1 = 1, x = get_insns (); 1019 x != 0; 1020 prevx = x, insn_cnt1++, x = NEXT_INSN (x)) 1021 gcc_assert (PREV_INSN (x) == prevx); 1022 1023 gcc_assert (prevx == get_last_insn ()); 1024 1025 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn (); 1026 x != 0; 1027 nextx = x, insn_cnt2++, x = PREV_INSN (x)) 1028 gcc_assert (NEXT_INSN (x) == nextx); 1029 1030 gcc_assert (insn_cnt1 == insn_cnt2); 1031 } 1032 1033 /* If we have assembler epilogues, the block falling through to exit must 1034 be the last one in the reordered chain when we reach final. Ensure 1035 that this condition is met. */ 1036 static void 1037 fixup_fallthru_exit_predecessor (void) 1038 { 1039 edge e; 1040 basic_block bb = NULL; 1041 1042 /* This transformation is not valid before reload, because we might 1043 separate a call from the instruction that copies the return 1044 value. */ 1045 gcc_assert (reload_completed); 1046 1047 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds); 1048 if (e) 1049 bb = e->src; 1050 1051 if (bb && bb->aux) 1052 { 1053 basic_block c = ENTRY_BLOCK_PTR->next_bb; 1054 1055 /* If the very first block is the one with the fall-through exit 1056 edge, we have to split that block. */ 1057 if (c == bb) 1058 { 1059 bb = split_block (bb, NULL)->dest; 1060 bb->aux = c->aux; 1061 c->aux = bb; 1062 bb->il.rtl->footer = c->il.rtl->footer; 1063 c->il.rtl->footer = NULL; 1064 } 1065 1066 while (c->aux != bb) 1067 c = (basic_block) c->aux; 1068 1069 c->aux = bb->aux; 1070 while (c->aux) 1071 c = (basic_block) c->aux; 1072 1073 c->aux = bb; 1074 bb->aux = NULL; 1075 } 1076 } 1077 1078 /* In case there are more than one fallthru predecessors of exit, force that 1079 there is only one. */ 1080 1081 static void 1082 force_one_exit_fallthru (void) 1083 { 1084 edge e, predecessor = NULL; 1085 bool more = false; 1086 edge_iterator ei; 1087 basic_block forwarder, bb; 1088 1089 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) 1090 if (e->flags & EDGE_FALLTHRU) 1091 { 1092 if (predecessor == NULL) 1093 predecessor = e; 1094 else 1095 { 1096 more = true; 1097 break; 1098 } 1099 } 1100 1101 if (!more) 1102 return; 1103 1104 /* Exit has several fallthru predecessors. Create a forwarder block for 1105 them. */ 1106 forwarder = split_edge (predecessor); 1107 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); ) 1108 { 1109 if (e->src == forwarder 1110 || !(e->flags & EDGE_FALLTHRU)) 1111 ei_next (&ei); 1112 else 1113 redirect_edge_and_branch_force (e, forwarder); 1114 } 1115 1116 /* Fix up the chain of blocks -- make FORWARDER immediately precede the 1117 exit block. */ 1118 FOR_EACH_BB (bb) 1119 { 1120 if (bb->aux == NULL && bb != forwarder) 1121 { 1122 bb->aux = forwarder; 1123 break; 1124 } 1125 } 1126 } 1127 1128 /* Return true in case it is possible to duplicate the basic block BB. */ 1129 1130 /* We do not want to declare the function in a header file, since it should 1131 only be used through the cfghooks interface, and we do not want to move 1132 it to cfgrtl.c since it would require also moving quite a lot of related 1133 code. */ 1134 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block); 1135 1136 bool 1137 cfg_layout_can_duplicate_bb_p (const_basic_block bb) 1138 { 1139 /* Do not attempt to duplicate tablejumps, as we need to unshare 1140 the dispatch table. This is difficult to do, as the instructions 1141 computing jump destination may be hoisted outside the basic block. */ 1142 if (tablejump_p (BB_END (bb), NULL, NULL)) 1143 return false; 1144 1145 /* Do not duplicate blocks containing insns that can't be copied. */ 1146 if (targetm.cannot_copy_insn_p) 1147 { 1148 rtx insn = BB_HEAD (bb); 1149 while (1) 1150 { 1151 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn)) 1152 return false; 1153 if (insn == BB_END (bb)) 1154 break; 1155 insn = NEXT_INSN (insn); 1156 } 1157 } 1158 1159 return true; 1160 } 1161 1162 rtx 1163 duplicate_insn_chain (rtx from, rtx to) 1164 { 1165 rtx insn, last, copy; 1166 1167 /* Avoid updating of boundaries of previous basic block. The 1168 note will get removed from insn stream in fixup. */ 1169 last = emit_note (NOTE_INSN_DELETED); 1170 1171 /* Create copy at the end of INSN chain. The chain will 1172 be reordered later. */ 1173 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn)) 1174 { 1175 switch (GET_CODE (insn)) 1176 { 1177 case DEBUG_INSN: 1178 /* Don't duplicate label debug insns. */ 1179 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL) 1180 break; 1181 /* FALLTHRU */ 1182 case INSN: 1183 case CALL_INSN: 1184 case JUMP_INSN: 1185 /* Avoid copying of dispatch tables. We never duplicate 1186 tablejumps, so this can hit only in case the table got 1187 moved far from original jump. */ 1188 if (GET_CODE (PATTERN (insn)) == ADDR_VEC 1189 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) 1190 { 1191 /* Avoid copying following barrier as well if any 1192 (and debug insns in between). */ 1193 rtx next; 1194 1195 for (next = NEXT_INSN (insn); 1196 next != NEXT_INSN (to); 1197 next = NEXT_INSN (next)) 1198 if (!DEBUG_INSN_P (next)) 1199 break; 1200 if (next != NEXT_INSN (to) && BARRIER_P (next)) 1201 insn = next; 1202 break; 1203 } 1204 copy = emit_copy_of_insn_after (insn, get_last_insn ()); 1205 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX 1206 && ANY_RETURN_P (JUMP_LABEL (insn))) 1207 JUMP_LABEL (copy) = JUMP_LABEL (insn); 1208 maybe_copy_prologue_epilogue_insn (insn, copy); 1209 break; 1210 1211 case CODE_LABEL: 1212 break; 1213 1214 case BARRIER: 1215 emit_barrier (); 1216 break; 1217 1218 case NOTE: 1219 switch (NOTE_KIND (insn)) 1220 { 1221 /* In case prologue is empty and function contain label 1222 in first BB, we may want to copy the block. */ 1223 case NOTE_INSN_PROLOGUE_END: 1224 1225 case NOTE_INSN_DELETED: 1226 case NOTE_INSN_DELETED_LABEL: 1227 case NOTE_INSN_DELETED_DEBUG_LABEL: 1228 /* No problem to strip these. */ 1229 case NOTE_INSN_FUNCTION_BEG: 1230 /* There is always just single entry to function. */ 1231 case NOTE_INSN_BASIC_BLOCK: 1232 break; 1233 1234 case NOTE_INSN_EPILOGUE_BEG: 1235 case NOTE_INSN_SWITCH_TEXT_SECTIONS: 1236 emit_note_copy (insn); 1237 break; 1238 1239 default: 1240 /* All other notes should have already been eliminated. */ 1241 gcc_unreachable (); 1242 } 1243 break; 1244 default: 1245 gcc_unreachable (); 1246 } 1247 } 1248 insn = NEXT_INSN (last); 1249 delete_insn (last); 1250 return insn; 1251 } 1252 /* Create a duplicate of the basic block BB. */ 1253 1254 /* We do not want to declare the function in a header file, since it should 1255 only be used through the cfghooks interface, and we do not want to move 1256 it to cfgrtl.c since it would require also moving quite a lot of related 1257 code. */ 1258 extern basic_block cfg_layout_duplicate_bb (basic_block); 1259 1260 basic_block 1261 cfg_layout_duplicate_bb (basic_block bb) 1262 { 1263 rtx insn; 1264 basic_block new_bb; 1265 1266 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb)); 1267 new_bb = create_basic_block (insn, 1268 insn ? get_last_insn () : NULL, 1269 EXIT_BLOCK_PTR->prev_bb); 1270 1271 BB_COPY_PARTITION (new_bb, bb); 1272 if (bb->il.rtl->header) 1273 { 1274 insn = bb->il.rtl->header; 1275 while (NEXT_INSN (insn)) 1276 insn = NEXT_INSN (insn); 1277 insn = duplicate_insn_chain (bb->il.rtl->header, insn); 1278 if (insn) 1279 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ()); 1280 } 1281 1282 if (bb->il.rtl->footer) 1283 { 1284 insn = bb->il.rtl->footer; 1285 while (NEXT_INSN (insn)) 1286 insn = NEXT_INSN (insn); 1287 insn = duplicate_insn_chain (bb->il.rtl->footer, insn); 1288 if (insn) 1289 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ()); 1290 } 1291 1292 return new_bb; 1293 } 1294 1295 1296 /* Main entry point to this module - initialize the datastructures for 1297 CFG layout changes. It keeps LOOPS up-to-date if not null. 1298 1299 FLAGS is a set of additional flags to pass to cleanup_cfg(). */ 1300 1301 void 1302 cfg_layout_initialize (unsigned int flags) 1303 { 1304 rtx x; 1305 basic_block bb; 1306 1307 initialize_original_copy_tables (); 1308 1309 cfg_layout_rtl_register_cfg_hooks (); 1310 1311 record_effective_endpoints (); 1312 1313 /* Make sure that the targets of non local gotos are marked. */ 1314 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1)) 1315 { 1316 bb = BLOCK_FOR_INSN (XEXP (x, 0)); 1317 bb->flags |= BB_NON_LOCAL_GOTO_TARGET; 1318 } 1319 1320 cleanup_cfg (CLEANUP_CFGLAYOUT | flags); 1321 } 1322 1323 /* Splits superblocks. */ 1324 void 1325 break_superblocks (void) 1326 { 1327 sbitmap superblocks; 1328 bool need = false; 1329 basic_block bb; 1330 1331 superblocks = sbitmap_alloc (last_basic_block); 1332 sbitmap_zero (superblocks); 1333 1334 FOR_EACH_BB (bb) 1335 if (bb->flags & BB_SUPERBLOCK) 1336 { 1337 bb->flags &= ~BB_SUPERBLOCK; 1338 SET_BIT (superblocks, bb->index); 1339 need = true; 1340 } 1341 1342 if (need) 1343 { 1344 rebuild_jump_labels (get_insns ()); 1345 find_many_sub_basic_blocks (superblocks); 1346 } 1347 1348 free (superblocks); 1349 } 1350 1351 /* Finalize the changes: reorder insn list according to the sequence specified 1352 by aux pointers, enter compensation code, rebuild scope forest. */ 1353 1354 void 1355 cfg_layout_finalize (void) 1356 { 1357 #ifdef ENABLE_CHECKING 1358 verify_flow_info (); 1359 #endif 1360 force_one_exit_fallthru (); 1361 rtl_register_cfg_hooks (); 1362 if (reload_completed 1363 #ifdef HAVE_epilogue 1364 && !HAVE_epilogue 1365 #endif 1366 ) 1367 fixup_fallthru_exit_predecessor (); 1368 fixup_reorder_chain (); 1369 1370 rebuild_jump_labels (get_insns ()); 1371 delete_dead_jumptables (); 1372 1373 #ifdef ENABLE_CHECKING 1374 verify_insn_chain (); 1375 verify_flow_info (); 1376 #endif 1377 } 1378 1379 /* Checks whether all N blocks in BBS array can be copied. */ 1380 bool 1381 can_copy_bbs_p (basic_block *bbs, unsigned n) 1382 { 1383 unsigned i; 1384 edge e; 1385 int ret = true; 1386 1387 for (i = 0; i < n; i++) 1388 bbs[i]->flags |= BB_DUPLICATED; 1389 1390 for (i = 0; i < n; i++) 1391 { 1392 /* In case we should redirect abnormal edge during duplication, fail. */ 1393 edge_iterator ei; 1394 FOR_EACH_EDGE (e, ei, bbs[i]->succs) 1395 if ((e->flags & EDGE_ABNORMAL) 1396 && (e->dest->flags & BB_DUPLICATED)) 1397 { 1398 ret = false; 1399 goto end; 1400 } 1401 1402 if (!can_duplicate_block_p (bbs[i])) 1403 { 1404 ret = false; 1405 break; 1406 } 1407 } 1408 1409 end: 1410 for (i = 0; i < n; i++) 1411 bbs[i]->flags &= ~BB_DUPLICATED; 1412 1413 return ret; 1414 } 1415 1416 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks 1417 are placed into array NEW_BBS in the same order. Edges from basic blocks 1418 in BBS are also duplicated and copies of those of them 1419 that lead into BBS are redirected to appropriate newly created block. The 1420 function assigns bbs into loops (copy of basic block bb is assigned to 1421 bb->loop_father->copy loop, so this must be set up correctly in advance) 1422 and updates dominators locally (LOOPS structure that contains the information 1423 about dominators is passed to enable this). 1424 1425 BASE is the superloop to that basic block belongs; if its header or latch 1426 is copied, we do not set the new blocks as header or latch. 1427 1428 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES, 1429 also in the same order. 1430 1431 Newly created basic blocks are put after the basic block AFTER in the 1432 instruction stream, and the order of the blocks in BBS array is preserved. */ 1433 1434 void 1435 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs, 1436 edge *edges, unsigned num_edges, edge *new_edges, 1437 struct loop *base, basic_block after) 1438 { 1439 unsigned i, j; 1440 basic_block bb, new_bb, dom_bb; 1441 edge e; 1442 1443 /* Duplicate bbs, update dominators, assign bbs to loops. */ 1444 for (i = 0; i < n; i++) 1445 { 1446 /* Duplicate. */ 1447 bb = bbs[i]; 1448 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after); 1449 after = new_bb; 1450 bb->flags |= BB_DUPLICATED; 1451 /* Possibly set loop header. */ 1452 if (bb->loop_father->header == bb && bb->loop_father != base) 1453 new_bb->loop_father->header = new_bb; 1454 /* Or latch. */ 1455 if (bb->loop_father->latch == bb && bb->loop_father != base) 1456 new_bb->loop_father->latch = new_bb; 1457 } 1458 1459 /* Set dominators. */ 1460 for (i = 0; i < n; i++) 1461 { 1462 bb = bbs[i]; 1463 new_bb = new_bbs[i]; 1464 1465 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb); 1466 if (dom_bb->flags & BB_DUPLICATED) 1467 { 1468 dom_bb = get_bb_copy (dom_bb); 1469 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb); 1470 } 1471 } 1472 1473 /* Redirect edges. */ 1474 for (j = 0; j < num_edges; j++) 1475 new_edges[j] = NULL; 1476 for (i = 0; i < n; i++) 1477 { 1478 edge_iterator ei; 1479 new_bb = new_bbs[i]; 1480 bb = bbs[i]; 1481 1482 FOR_EACH_EDGE (e, ei, new_bb->succs) 1483 { 1484 for (j = 0; j < num_edges; j++) 1485 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest) 1486 new_edges[j] = e; 1487 1488 if (!(e->dest->flags & BB_DUPLICATED)) 1489 continue; 1490 redirect_edge_and_branch_force (e, get_bb_copy (e->dest)); 1491 } 1492 } 1493 1494 /* Clear information about duplicates. */ 1495 for (i = 0; i < n; i++) 1496 bbs[i]->flags &= ~BB_DUPLICATED; 1497 } 1498 1499 #include "gt-cfglayout.h" 1500