1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
26
27 find_basic_blocks also finds any unreachable loops and deletes them.
28
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
34
35 #include "config.h"
36 #include "system.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
48
49 static int count_basic_blocks PARAMS ((rtx));
50 static void find_basic_blocks_1 PARAMS ((rtx));
51 static rtx find_label_refs PARAMS ((rtx, rtx));
52 static void make_edges PARAMS ((rtx, basic_block,
53 basic_block, int));
54 static void make_label_edge PARAMS ((sbitmap *, basic_block,
55 rtx, int));
56 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
57 static void find_bb_boundaries PARAMS ((basic_block));
58 static void compute_outgoing_frequencies PARAMS ((basic_block));
59 static bool inside_basic_block_p PARAMS ((rtx));
60
61 /* Return true if insn is something that should be contained inside basic
62 block. */
63
64 static bool
inside_basic_block_p(insn)65 inside_basic_block_p (insn)
66 rtx insn;
67 {
68 switch (GET_CODE (insn))
69 {
70 case CODE_LABEL:
71 /* Avoid creating of basic block for jumptables. */
72 return (NEXT_INSN (insn) == 0
73 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
74 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
75 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
76
77 case JUMP_INSN:
78 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
79 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
80
81 case CALL_INSN:
82 case INSN:
83 return true;
84
85 case BARRIER:
86 case NOTE:
87 return false;
88
89 default:
90 abort ();
91 }
92 }
93
94 /* Return true if INSN may cause control flow transfer, so it should be last in
95 the basic block. */
96
97 bool
control_flow_insn_p(insn)98 control_flow_insn_p (insn)
99 rtx insn;
100 {
101 rtx note;
102
103 switch (GET_CODE (insn))
104 {
105 case NOTE:
106 case CODE_LABEL:
107 return false;
108
109 case JUMP_INSN:
110 /* Jump insn always causes control transfer except for tablejumps. */
111 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
112 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
113
114 case CALL_INSN:
115 /* Call insn may return to the nonlocal goto handler. */
116 return ((nonlocal_goto_handler_labels
117 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
118 NULL_RTX))
119 || INTVAL (XEXP (note, 0)) >= 0))
120 /* Or may trap. */
121 || can_throw_internal (insn));
122
123 case INSN:
124 return (flag_non_call_exceptions && can_throw_internal (insn));
125
126 case BARRIER:
127 /* It is nonsence to reach barrier when looking for the
128 end of basic block, but before dead code is eliminated
129 this may happen. */
130 return false;
131
132 default:
133 abort ();
134 }
135 }
136
137 /* Count the basic blocks of the function. */
138
139 static int
count_basic_blocks(f)140 count_basic_blocks (f)
141 rtx f;
142 {
143 int count = 0;
144 bool saw_insn = false;
145 rtx insn;
146
147 for (insn = f; insn; insn = NEXT_INSN (insn))
148 {
149 /* Code labels and barriers causes curent basic block to be
150 terminated at previous real insn. */
151 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
152 && saw_insn)
153 count++, saw_insn = false;
154
155 /* Start basic block if needed. */
156 if (!saw_insn && inside_basic_block_p (insn))
157 saw_insn = true;
158
159 /* Control flow insn causes current basic block to be terminated. */
160 if (saw_insn && control_flow_insn_p (insn))
161 count++, saw_insn = false;
162 }
163
164 if (saw_insn)
165 count++;
166
167 /* The rest of the compiler works a bit smoother when we don't have to
168 check for the edge case of do-nothing functions with no basic blocks. */
169 if (count == 0)
170 {
171 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
172 count = 1;
173 }
174
175 return count;
176 }
177
178 /* Scan a list of insns for labels referred to other than by jumps.
179 This is used to scan the alternatives of a call placeholder. */
180
181 static rtx
find_label_refs(f,lvl)182 find_label_refs (f, lvl)
183 rtx f;
184 rtx lvl;
185 {
186 rtx insn;
187
188 for (insn = f; insn; insn = NEXT_INSN (insn))
189 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
190 {
191 rtx note;
192
193 /* Make a list of all labels referred to other than by jumps
194 (which just don't have the REG_LABEL notes).
195
196 Make a special exception for labels followed by an ADDR*VEC,
197 as this would be a part of the tablejump setup code.
198
199 Make a special exception to registers loaded with label
200 values just before jump insns that use them. */
201
202 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
203 if (REG_NOTE_KIND (note) == REG_LABEL)
204 {
205 rtx lab = XEXP (note, 0), next;
206
207 if ((next = next_nonnote_insn (lab)) != NULL
208 && GET_CODE (next) == JUMP_INSN
209 && (GET_CODE (PATTERN (next)) == ADDR_VEC
210 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
211 ;
212 else if (GET_CODE (lab) == NOTE)
213 ;
214 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
215 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
216 ;
217 else
218 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
219 }
220 }
221
222 return lvl;
223 }
224
225 /* Create an edge between two basic blocks. FLAGS are auxiliary information
226 about the edge that is accumulated between calls. */
227
228 /* Create an edge from a basic block to a label. */
229
230 static void
make_label_edge(edge_cache,src,label,flags)231 make_label_edge (edge_cache, src, label, flags)
232 sbitmap *edge_cache;
233 basic_block src;
234 rtx label;
235 int flags;
236 {
237 if (GET_CODE (label) != CODE_LABEL)
238 abort ();
239
240 /* If the label was never emitted, this insn is junk, but avoid a
241 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
242 as a result of a syntax error and a diagnostic has already been
243 printed. */
244
245 if (INSN_UID (label) == 0)
246 return;
247
248 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
249 }
250
251 /* Create the edges generated by INSN in REGION. */
252
253 static void
make_eh_edge(edge_cache,src,insn)254 make_eh_edge (edge_cache, src, insn)
255 sbitmap *edge_cache;
256 basic_block src;
257 rtx insn;
258 {
259 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
260 rtx handlers, i;
261
262 handlers = reachable_handlers (insn);
263
264 for (i = handlers; i; i = XEXP (i, 1))
265 make_label_edge (edge_cache, src, XEXP (i, 0),
266 EDGE_ABNORMAL | EDGE_EH | is_call);
267
268 free_INSN_LIST_list (&handlers);
269 }
270
271 /* Identify the edges between basic blocks MIN to MAX.
272
273 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
274 that are otherwise unreachable may be reachable with a non-local goto.
275
276 BB_EH_END is an array indexed by basic block number in which we record
277 the list of exception regions active at the end of the basic block. */
278
279 static void
make_edges(label_value_list,min,max,update_p)280 make_edges (label_value_list, min, max, update_p)
281 rtx label_value_list;
282 basic_block min, max;
283 int update_p;
284 {
285 basic_block bb;
286 sbitmap *edge_cache = NULL;
287
288 /* Assume no computed jump; revise as we create edges. */
289 current_function_has_computed_jump = 0;
290
291 /* Heavy use of computed goto in machine-generated code can lead to
292 nearly fully-connected CFGs. In that case we spend a significant
293 amount of time searching the edge lists for duplicates. */
294 if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100)
295 {
296 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
297 sbitmap_vector_zero (edge_cache, last_basic_block);
298
299 if (update_p)
300 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
301 {
302 edge e;
303
304 for (e = bb->succ; e ; e = e->succ_next)
305 if (e->dest != EXIT_BLOCK_PTR)
306 SET_BIT (edge_cache[bb->index], e->dest->index);
307 }
308 }
309
310 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
311 is always the entry. */
312 if (min == ENTRY_BLOCK_PTR->next_bb)
313 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
314 EDGE_FALLTHRU);
315
316 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
317 {
318 rtx insn, x;
319 enum rtx_code code;
320 int force_fallthru = 0;
321
322 if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALT_ENTRY_P (bb->head))
323 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
324
325 /* Examine the last instruction of the block, and discover the
326 ways we can leave the block. */
327
328 insn = bb->end;
329 code = GET_CODE (insn);
330
331 /* A branch. */
332 if (code == JUMP_INSN)
333 {
334 rtx tmp;
335
336 /* Recognize exception handling placeholders. */
337 if (GET_CODE (PATTERN (insn)) == RESX)
338 make_eh_edge (edge_cache, bb, insn);
339
340 /* Recognize a non-local goto as a branch outside the
341 current function. */
342 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
343 ;
344
345 /* ??? Recognize a tablejump and do the right thing. */
346 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
347 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
348 && GET_CODE (tmp) == JUMP_INSN
349 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
350 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
351 {
352 rtvec vec;
353 int j;
354
355 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
356 vec = XVEC (PATTERN (tmp), 0);
357 else
358 vec = XVEC (PATTERN (tmp), 1);
359
360 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
361 make_label_edge (edge_cache, bb,
362 XEXP (RTVEC_ELT (vec, j), 0), 0);
363
364 /* Some targets (eg, ARM) emit a conditional jump that also
365 contains the out-of-range target. Scan for these and
366 add an edge if necessary. */
367 if ((tmp = single_set (insn)) != NULL
368 && SET_DEST (tmp) == pc_rtx
369 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
370 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
371 make_label_edge (edge_cache, bb,
372 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
373
374 #ifdef CASE_DROPS_THROUGH
375 /* Silly VAXen. The ADDR_VEC is going to be in the way of
376 us naturally detecting fallthru into the next block. */
377 force_fallthru = 1;
378 #endif
379 }
380
381 /* If this is a computed jump, then mark it as reaching
382 everything on the label_value_list and forced_labels list. */
383 else if (computed_jump_p (insn))
384 {
385 current_function_has_computed_jump = 1;
386
387 for (x = label_value_list; x; x = XEXP (x, 1))
388 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
389
390 for (x = forced_labels; x; x = XEXP (x, 1))
391 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
392 }
393
394 /* Returns create an exit out. */
395 else if (returnjump_p (insn))
396 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
397
398 /* Otherwise, we have a plain conditional or unconditional jump. */
399 else
400 {
401 if (! JUMP_LABEL (insn))
402 abort ();
403 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
404 }
405 }
406
407 /* If this is a sibling call insn, then this is in effect a combined call
408 and return, and so we need an edge to the exit block. No need to
409 worry about EH edges, since we wouldn't have created the sibling call
410 in the first place. */
411 if (code == CALL_INSN && SIBLING_CALL_P (insn))
412 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
413 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
414
415 /* If this is a CALL_INSN, then mark it as reaching the active EH
416 handler for this CALL_INSN. If we're handling non-call
417 exceptions then any insn can reach any of the active handlers.
418 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
419 else if (code == CALL_INSN || flag_non_call_exceptions)
420 {
421 /* Add any appropriate EH edges. */
422 make_eh_edge (edge_cache, bb, insn);
423
424 if (code == CALL_INSN && nonlocal_goto_handler_labels)
425 {
426 /* ??? This could be made smarter: in some cases it's possible
427 to tell that certain calls will not do a nonlocal goto.
428 For example, if the nested functions that do the nonlocal
429 gotos do not have their addresses taken, then only calls to
430 those functions or to other nested functions that use them
431 could possibly do nonlocal gotos. */
432
433 /* We do know that a REG_EH_REGION note with a value less
434 than 0 is guaranteed not to perform a non-local goto. */
435 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
436
437 if (!note || INTVAL (XEXP (note, 0)) >= 0)
438 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
439 make_label_edge (edge_cache, bb, XEXP (x, 0),
440 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
441 }
442 }
443
444 /* Find out if we can drop through to the next block. */
445 insn = next_nonnote_insn (insn);
446 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
447 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
448 else if (bb->next_bb != EXIT_BLOCK_PTR)
449 {
450 rtx tmp = bb->next_bb->head;
451 if (GET_CODE (tmp) == NOTE)
452 tmp = next_nonnote_insn (tmp);
453 if (force_fallthru || insn == tmp)
454 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
455 }
456 }
457
458 if (edge_cache)
459 sbitmap_vector_free (edge_cache);
460 }
461
462 /* Find all basic blocks of the function whose first insn is F.
463
464 Collect and return a list of labels whose addresses are taken. This
465 will be used in make_edges for use with computed gotos. */
466
467 static void
find_basic_blocks_1(f)468 find_basic_blocks_1 (f)
469 rtx f;
470 {
471 rtx insn, next;
472 rtx bb_note = NULL_RTX;
473 rtx lvl = NULL_RTX;
474 rtx trll = NULL_RTX;
475 rtx head = NULL_RTX;
476 rtx end = NULL_RTX;
477 basic_block prev = ENTRY_BLOCK_PTR;
478
479 /* We process the instructions in a slightly different way than we did
480 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
481 closed out the previous block, so that it gets attached at the proper
482 place. Since this form should be equivalent to the previous,
483 count_basic_blocks continues to use the old form as a check. */
484
485 for (insn = f; insn; insn = next)
486 {
487 enum rtx_code code = GET_CODE (insn);
488
489 next = NEXT_INSN (insn);
490
491 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
492 && head)
493 {
494 prev = create_basic_block_structure (head, end, bb_note, prev);
495 head = end = NULL_RTX;
496 bb_note = NULL_RTX;
497 }
498
499 if (inside_basic_block_p (insn))
500 {
501 if (head == NULL_RTX)
502 head = insn;
503 end = insn;
504 }
505
506 if (head && control_flow_insn_p (insn))
507 {
508 prev = create_basic_block_structure (head, end, bb_note, prev);
509 head = end = NULL_RTX;
510 bb_note = NULL_RTX;
511 }
512
513 switch (code)
514 {
515 case NOTE:
516 {
517 int kind = NOTE_LINE_NUMBER (insn);
518
519 /* Look for basic block notes with which to keep the
520 basic_block_info pointers stable. Unthread the note now;
521 we'll put it back at the right place in create_basic_block.
522 Or not at all if we've already found a note in this block. */
523 if (kind == NOTE_INSN_BASIC_BLOCK)
524 {
525 if (bb_note == NULL_RTX)
526 bb_note = insn;
527 else
528 next = delete_insn (insn);
529 }
530 break;
531 }
532
533 case CODE_LABEL:
534 case JUMP_INSN:
535 case INSN:
536 case BARRIER:
537 break;
538
539 case CALL_INSN:
540 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
541 {
542 /* Scan each of the alternatives for label refs. */
543 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
544 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
545 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
546 /* Record its tail recursion label, if any. */
547 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
548 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
549 }
550 break;
551
552 default:
553 abort ();
554 }
555
556 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
557 {
558 rtx note;
559
560 /* Make a list of all labels referred to other than by jumps.
561
562 Make a special exception for labels followed by an ADDR*VEC,
563 as this would be a part of the tablejump setup code.
564
565 Make a special exception to registers loaded with label
566 values just before jump insns that use them. */
567
568 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
569 if (REG_NOTE_KIND (note) == REG_LABEL)
570 {
571 rtx lab = XEXP (note, 0), next;
572
573 if ((next = next_nonnote_insn (lab)) != NULL
574 && GET_CODE (next) == JUMP_INSN
575 && (GET_CODE (PATTERN (next)) == ADDR_VEC
576 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
577 ;
578 else if (GET_CODE (lab) == NOTE)
579 ;
580 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
581 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
582 ;
583 else
584 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
585 }
586 }
587 }
588
589 if (head != NULL_RTX)
590 create_basic_block_structure (head, end, bb_note, prev);
591 else if (bb_note)
592 delete_insn (bb_note);
593
594 if (last_basic_block != n_basic_blocks)
595 abort ();
596
597 label_value_list = lvl;
598 tail_recursion_label_list = trll;
599 clear_aux_for_blocks ();
600 }
601
602
603 /* Find basic blocks of the current function.
604 F is the first insn of the function and NREGS the number of register
605 numbers in use. */
606
607 void
find_basic_blocks(f,nregs,file)608 find_basic_blocks (f, nregs, file)
609 rtx f;
610 int nregs ATTRIBUTE_UNUSED;
611 FILE *file ATTRIBUTE_UNUSED;
612 {
613 basic_block bb;
614
615 timevar_push (TV_CFG);
616
617 /* Flush out existing data. */
618 if (basic_block_info != NULL)
619 {
620 clear_edges ();
621
622 /* Clear bb->aux on all extant basic blocks. We'll use this as a
623 tag for reuse during create_basic_block, just in case some pass
624 copies around basic block notes improperly. */
625 FOR_EACH_BB (bb)
626 bb->aux = NULL;
627
628 VARRAY_FREE (basic_block_info);
629 }
630
631 n_basic_blocks = count_basic_blocks (f);
632 last_basic_block = 0;
633 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
634 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
635
636 /* Size the basic block table. The actual structures will be allocated
637 by find_basic_blocks_1, since we want to keep the structure pointers
638 stable across calls to find_basic_blocks. */
639 /* ??? This whole issue would be much simpler if we called find_basic_blocks
640 exactly once, and thereafter we don't have a single long chain of
641 instructions at all until close to the end of compilation when we
642 actually lay them out. */
643
644 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
645
646 find_basic_blocks_1 (f);
647
648 /* Discover the edges of our cfg. */
649 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
650
651 /* Do very simple cleanup now, for the benefit of code that runs between
652 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
653 tidy_fallthru_edges ();
654
655 #ifdef ENABLE_CHECKING
656 verify_flow_info ();
657 #endif
658 timevar_pop (TV_CFG);
659 }
660
661 /* State of basic block as seen by find_sub_basic_blocks. */
662 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
663
664 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
665 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
666
667 /* Scan basic block BB for possible BB boundaries inside the block
668 and create new basic blocks in the progress. */
669
670 static void
find_bb_boundaries(bb)671 find_bb_boundaries (bb)
672 basic_block bb;
673 {
674 rtx insn = bb->head;
675 rtx end = bb->end;
676 rtx flow_transfer_insn = NULL_RTX;
677 edge fallthru = NULL;
678
679 if (insn == bb->end)
680 return;
681
682 if (GET_CODE (insn) == CODE_LABEL)
683 insn = NEXT_INSN (insn);
684
685 /* Scan insn chain and try to find new basic block boundaries. */
686 while (1)
687 {
688 enum rtx_code code = GET_CODE (insn);
689
690 /* On code label, split current basic block. */
691 if (code == CODE_LABEL)
692 {
693 fallthru = split_block (bb, PREV_INSN (insn));
694 if (flow_transfer_insn)
695 bb->end = flow_transfer_insn;
696
697 bb = fallthru->dest;
698 remove_edge (fallthru);
699 flow_transfer_insn = NULL_RTX;
700 if (LABEL_ALT_ENTRY_P (insn))
701 make_edge (ENTRY_BLOCK_PTR, bb, 0);
702 }
703
704 /* In case we've previously seen an insn that effects a control
705 flow transfer, split the block. */
706 if (flow_transfer_insn && inside_basic_block_p (insn))
707 {
708 fallthru = split_block (bb, PREV_INSN (insn));
709 bb->end = flow_transfer_insn;
710 bb = fallthru->dest;
711 remove_edge (fallthru);
712 flow_transfer_insn = NULL_RTX;
713 }
714
715 if (control_flow_insn_p (insn))
716 flow_transfer_insn = insn;
717 if (insn == end)
718 break;
719 insn = NEXT_INSN (insn);
720 }
721
722 /* In case expander replaced normal insn by sequence terminating by
723 return and barrier, or possibly other sequence not behaving like
724 ordinary jump, we need to take care and move basic block boundary. */
725 if (flow_transfer_insn)
726 bb->end = flow_transfer_insn;
727
728 /* We've possibly replaced the conditional jump by conditional jump
729 followed by cleanup at fallthru edge, so the outgoing edges may
730 be dead. */
731 purge_dead_edges (bb);
732 }
733
734 /* Assume that frequency of basic block B is known. Compute frequencies
735 and probabilities of outgoing edges. */
736
737 static void
compute_outgoing_frequencies(b)738 compute_outgoing_frequencies (b)
739 basic_block b;
740 {
741 edge e, f;
742
743 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
744 {
745 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
746 int probability;
747
748 if (!note)
749 return;
750
751 probability = INTVAL (XEXP (find_reg_note (b->end,
752 REG_BR_PROB, NULL),
753 0));
754 e = BRANCH_EDGE (b);
755 e->probability = probability;
756 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
757 / REG_BR_PROB_BASE);
758 f = FALLTHRU_EDGE (b);
759 f->probability = REG_BR_PROB_BASE - probability;
760 f->count = b->count - e->count;
761 }
762
763 if (b->succ && !b->succ->succ_next)
764 {
765 e = b->succ;
766 e->probability = REG_BR_PROB_BASE;
767 e->count = b->count;
768 }
769 }
770
771 /* Assume that someone emitted code with control flow instructions to the
772 basic block. Update the data structure. */
773
774 void
find_many_sub_basic_blocks(blocks)775 find_many_sub_basic_blocks (blocks)
776 sbitmap blocks;
777 {
778 basic_block bb, min, max;
779
780 FOR_EACH_BB (bb)
781 SET_STATE (bb,
782 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
783
784 FOR_EACH_BB (bb)
785 if (STATE (bb) == BLOCK_TO_SPLIT)
786 find_bb_boundaries (bb);
787
788 FOR_EACH_BB (bb)
789 if (STATE (bb) != BLOCK_ORIGINAL)
790 break;
791
792 min = max = bb;
793 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
794 if (STATE (bb) != BLOCK_ORIGINAL)
795 max = bb;
796
797 /* Now re-scan and wire in all edges. This expect simple (conditional)
798 jumps at the end of each new basic blocks. */
799 make_edges (NULL, min, max, 1);
800
801 /* Update branch probabilities. Expect only (un)conditional jumps
802 to be created with only the forward edges. */
803 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
804 {
805 edge e;
806
807 if (STATE (bb) == BLOCK_ORIGINAL)
808 continue;
809 if (STATE (bb) == BLOCK_NEW)
810 {
811 bb->count = 0;
812 bb->frequency = 0;
813 for (e = bb->pred; e; e=e->pred_next)
814 {
815 bb->count += e->count;
816 bb->frequency += EDGE_FREQUENCY (e);
817 }
818 }
819
820 compute_outgoing_frequencies (bb);
821 }
822
823 FOR_EACH_BB (bb)
824 SET_STATE (bb, 0);
825 }
826
827 /* Like above but for single basic block only. */
828
829 void
find_sub_basic_blocks(bb)830 find_sub_basic_blocks (bb)
831 basic_block bb;
832 {
833 basic_block min, max, b;
834 basic_block next = bb->next_bb;
835
836 min = bb;
837 find_bb_boundaries (bb);
838 max = next->prev_bb;
839
840 /* Now re-scan and wire in all edges. This expect simple (conditional)
841 jumps at the end of each new basic blocks. */
842 make_edges (NULL, min, max, 1);
843
844 /* Update branch probabilities. Expect only (un)conditional jumps
845 to be created with only the forward edges. */
846 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
847 {
848 edge e;
849
850 if (b != min)
851 {
852 b->count = 0;
853 b->frequency = 0;
854 for (e = b->pred; e; e=e->pred_next)
855 {
856 b->count += e->count;
857 b->frequency += EDGE_FREQUENCY (e);
858 }
859 }
860
861 compute_outgoing_frequencies (b);
862 }
863 }
864