xref: /openbsd/gnu/usr.bin/gcc/gcc/cfglayout.c (revision c87b03e5)
1 /* Basic block reordering routines for the GNU compiler.
2    Copyright (C) 2000, 2001 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "tree.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "function.h"
30 #include "obstack.h"
31 #include "cfglayout.h"
32 
33 /* The contents of the current function definition are allocated
34    in this obstack, and all are freed at the end of the function.  */
35 extern struct obstack flow_obstack;
36 
37 /* Holds the interesting trailing notes for the function.  */
38 static rtx function_footer;
39 
40 static rtx skip_insns_after_block	PARAMS ((basic_block));
41 static void record_effective_endpoints	PARAMS ((void));
42 static rtx label_for_bb			PARAMS ((basic_block));
43 static void fixup_reorder_chain		PARAMS ((void));
44 
45 static void set_block_levels		PARAMS ((tree, int));
46 static void change_scope		PARAMS ((rtx, tree, tree));
47 
48 void verify_insn_chain			PARAMS ((void));
49 static void cleanup_unconditional_jumps	PARAMS ((void));
50 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
52 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
53 
54 static rtx
unlink_insn_chain(first,last)55 unlink_insn_chain (first, last)
56      rtx first;
57      rtx last;
58 {
59   rtx prevfirst = PREV_INSN (first);
60   rtx nextlast = NEXT_INSN (last);
61 
62   PREV_INSN (first) = NULL;
63   NEXT_INSN (last) = NULL;
64   if (prevfirst)
65     NEXT_INSN (prevfirst) = nextlast;
66   if (nextlast)
67     PREV_INSN (nextlast) = prevfirst;
68   else
69     set_last_insn (prevfirst);
70   if (!prevfirst)
71     set_first_insn (nextlast);
72   return first;
73 }
74 
75 /* Skip over inter-block insns occurring after BB which are typically
76    associated with BB (e.g., barriers). If there are any such insns,
77    we return the last one. Otherwise, we return the end of BB.  */
78 
79 static rtx
skip_insns_after_block(bb)80 skip_insns_after_block (bb)
81      basic_block bb;
82 {
83   rtx insn, last_insn, next_head, prev;
84 
85   next_head = NULL_RTX;
86   if (bb->next_bb != EXIT_BLOCK_PTR)
87     next_head = bb->next_bb->head;
88 
89   for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
90     {
91       if (insn == next_head)
92 	break;
93 
94       switch (GET_CODE (insn))
95 	{
96 	case BARRIER:
97 	  last_insn = insn;
98 	  continue;
99 
100 	case NOTE:
101 	  switch (NOTE_LINE_NUMBER (insn))
102 	    {
103 	    case NOTE_INSN_LOOP_END:
104 	    case NOTE_INSN_BLOCK_END:
105 	      last_insn = insn;
106 	      continue;
107 	    case NOTE_INSN_DELETED:
108 	    case NOTE_INSN_DELETED_LABEL:
109 	      continue;
110 
111 	    default:
112 	      continue;
113 	      break;
114 	    }
115 	  break;
116 
117 	case CODE_LABEL:
118 	  if (NEXT_INSN (insn)
119 	      && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
120 	      && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
121 	          || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
122 	    {
123 	      insn = NEXT_INSN (insn);
124 	      last_insn = insn;
125 	      continue;
126 	    }
127 	  break;
128 
129 	default:
130 	  break;
131 	}
132 
133       break;
134     }
135 
136   /* It is possible to hit contradictory sequence.  For instance:
137 
138      jump_insn
139      NOTE_INSN_LOOP_BEG
140      barrier
141 
142      Where barrier belongs to jump_insn, but the note does not.  This can be
143      created by removing the basic block originally following
144      NOTE_INSN_LOOP_BEG.  In such case reorder the notes.  */
145 
146   for (insn = last_insn; insn != bb->end; insn = prev)
147     {
148       prev = PREV_INSN (insn);
149       if (GET_CODE (insn) == NOTE)
150 	switch (NOTE_LINE_NUMBER (insn))
151 	  {
152 	  case NOTE_INSN_LOOP_END:
153 	  case NOTE_INSN_BLOCK_END:
154 	  case NOTE_INSN_DELETED:
155 	  case NOTE_INSN_DELETED_LABEL:
156 	    continue;
157 	  default:
158 	    reorder_insns (insn, insn, last_insn);
159 	  }
160     }
161 
162   return last_insn;
163 }
164 
165 /* Locate or create a label for a given basic block.  */
166 
167 static rtx
label_for_bb(bb)168 label_for_bb (bb)
169      basic_block bb;
170 {
171   rtx label = bb->head;
172 
173   if (GET_CODE (label) != CODE_LABEL)
174     {
175       if (rtl_dump_file)
176 	fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
177 
178       label = block_label (bb);
179     }
180 
181   return label;
182 }
183 
184 /* Locate the effective beginning and end of the insn chain for each
185    block, as defined by skip_insns_after_block above.  */
186 
187 static void
record_effective_endpoints()188 record_effective_endpoints ()
189 {
190   rtx next_insn = get_insns ();
191   basic_block bb;
192 
193   FOR_EACH_BB (bb)
194     {
195       rtx end;
196 
197       if (PREV_INSN (bb->head) && next_insn != bb->head)
198 	RBI (bb)->header = unlink_insn_chain (next_insn,
199 					      PREV_INSN (bb->head));
200       end = skip_insns_after_block (bb);
201       if (NEXT_INSN (bb->end) && bb->end != end)
202 	RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
203       next_insn = NEXT_INSN (bb->end);
204     }
205 
206   function_footer = next_insn;
207   if (function_footer)
208     function_footer = unlink_insn_chain (function_footer, get_last_insn ());
209 }
210 
211 /* Build a varray mapping INSN_UID to lexical block.  Return it.  */
212 
213 void
scope_to_insns_initialize()214 scope_to_insns_initialize ()
215 {
216   tree block = NULL;
217   rtx insn, next;
218 
219   for (insn = get_insns (); insn; insn = next)
220     {
221       next = NEXT_INSN (insn);
222 
223       if (active_insn_p (insn)
224 	  && GET_CODE (PATTERN (insn)) != ADDR_VEC
225 	  && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
226         INSN_SCOPE (insn) = block;
227       else if (GET_CODE (insn) == NOTE)
228 	{
229 	  switch (NOTE_LINE_NUMBER (insn))
230 	    {
231 	    case NOTE_INSN_BLOCK_BEG:
232 	      block = NOTE_BLOCK (insn);
233 	      delete_insn (insn);
234 	      break;
235 	    case NOTE_INSN_BLOCK_END:
236 	      block = BLOCK_SUPERCONTEXT (block);
237 	      delete_insn (insn);
238 	      break;
239 	    default:
240 	      break;
241 	    }
242 	}
243     }
244 
245   /* Tag the blocks with a depth number so that change_scope can find
246      the common parent easily.  */
247   set_block_levels (DECL_INITIAL (cfun->decl), 0);
248 }
249 
250 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
251    found in the block tree.  */
252 
253 static void
set_block_levels(block,level)254 set_block_levels (block, level)
255      tree block;
256      int level;
257 {
258   while (block)
259     {
260       BLOCK_NUMBER (block) = level;
261       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
262       block = BLOCK_CHAIN (block);
263     }
264 }
265 
266 /* Return sope resulting from combination of S1 and S2.  */
267 tree
choose_inner_scope(s1,s2)268 choose_inner_scope (s1, s2)
269      tree s1, s2;
270 {
271    if (!s1)
272      return s2;
273    if (!s2)
274      return s1;
275    if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
276      return s1;
277    return s2;
278 }
279 
280 /* Emit lexical block notes needed to change scope from S1 to S2.  */
281 
282 static void
change_scope(orig_insn,s1,s2)283 change_scope (orig_insn, s1, s2)
284      rtx orig_insn;
285      tree s1, s2;
286 {
287   rtx insn = orig_insn;
288   tree com = NULL_TREE;
289   tree ts1 = s1, ts2 = s2;
290   tree s;
291 
292   while (ts1 != ts2)
293     {
294       if (ts1 == NULL || ts2 == NULL)
295 	abort ();
296       if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
297 	ts1 = BLOCK_SUPERCONTEXT (ts1);
298       else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
299 	ts2 = BLOCK_SUPERCONTEXT (ts2);
300       else
301 	{
302 	  ts1 = BLOCK_SUPERCONTEXT (ts1);
303 	  ts2 = BLOCK_SUPERCONTEXT (ts2);
304 	}
305     }
306   com = ts1;
307 
308   /* Close scopes.  */
309   s = s1;
310   while (s != com)
311     {
312       rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
313       NOTE_BLOCK (note) = s;
314       s = BLOCK_SUPERCONTEXT (s);
315     }
316 
317   /* Open scopes.  */
318   s = s2;
319   while (s != com)
320     {
321       insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
322       NOTE_BLOCK (insn) = s;
323       s = BLOCK_SUPERCONTEXT (s);
324     }
325 }
326 
327 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
328    on the scope tree and the newly reordered instructions.  */
329 
330 void
scope_to_insns_finalize()331 scope_to_insns_finalize ()
332 {
333   tree cur_block = DECL_INITIAL (cfun->decl);
334   rtx insn, note;
335 
336   insn = get_insns ();
337   if (!active_insn_p (insn))
338     insn = next_active_insn (insn);
339   for (; insn; insn = next_active_insn (insn))
340     {
341       tree this_block;
342 
343       this_block = INSN_SCOPE (insn);
344       /* For sequences compute scope resulting from merging all scopes
345          of instructions nested inside.  */
346       if (GET_CODE (PATTERN (insn)) == SEQUENCE)
347 	{
348 	  int i;
349 	  rtx body = PATTERN (insn);
350 
351 	  this_block = NULL;
352 	  for (i = 0; i < XVECLEN (body, 0); i++)
353 	    this_block = choose_inner_scope (this_block,
354 			    		 INSN_SCOPE (XVECEXP (body, 0, i)));
355 	}
356       if (! this_block)
357 	continue;
358 
359       if (this_block != cur_block)
360 	{
361 	  change_scope (insn, cur_block, this_block);
362 	  cur_block = this_block;
363 	}
364     }
365 
366   /* change_scope emits before the insn, not after.  */
367   note = emit_note (NULL, NOTE_INSN_DELETED);
368   change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
369   delete_insn (note);
370 
371   reorder_blocks ();
372 }
373 
374 /* Given a reorder chain, rearrange the code to match.  */
375 
376 static void
fixup_reorder_chain()377 fixup_reorder_chain ()
378 {
379   basic_block bb, prev_bb;
380   int index;
381   rtx insn = NULL;
382 
383   /* First do the bulk reordering -- rechain the blocks without regard to
384      the needed changes to jumps and labels.  */
385 
386   for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
387        bb != 0;
388        bb = RBI (bb)->next, index++)
389     {
390       if (RBI (bb)->header)
391 	{
392 	  if (insn)
393 	    NEXT_INSN (insn) = RBI (bb)->header;
394 	  else
395 	    set_first_insn (RBI (bb)->header);
396 	  PREV_INSN (RBI (bb)->header) = insn;
397 	  insn = RBI (bb)->header;
398 	  while (NEXT_INSN (insn))
399 	    insn = NEXT_INSN (insn);
400 	}
401       if (insn)
402 	NEXT_INSN (insn) = bb->head;
403       else
404 	set_first_insn (bb->head);
405       PREV_INSN (bb->head) = insn;
406       insn = bb->end;
407       if (RBI (bb)->footer)
408 	{
409 	  NEXT_INSN (insn) = RBI (bb)->footer;
410 	  PREV_INSN (RBI (bb)->footer) = insn;
411 	  while (NEXT_INSN (insn))
412 	    insn = NEXT_INSN (insn);
413 	}
414     }
415 
416   if (index != n_basic_blocks)
417     abort ();
418 
419   NEXT_INSN (insn) = function_footer;
420   if (function_footer)
421     PREV_INSN (function_footer) = insn;
422 
423   while (NEXT_INSN (insn))
424     insn = NEXT_INSN (insn);
425 
426   set_last_insn (insn);
427 #ifdef ENABLE_CHECKING
428   verify_insn_chain ();
429 #endif
430 
431   /* Now add jumps and labels as needed to match the blocks new
432      outgoing edges.  */
433 
434   for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
435     {
436       edge e_fall, e_taken, e;
437       rtx bb_end_insn;
438       basic_block nb;
439 
440       if (bb->succ == NULL)
441 	continue;
442 
443       /* Find the old fallthru edge, and another non-EH edge for
444 	 a taken jump.  */
445       e_taken = e_fall = NULL;
446       for (e = bb->succ; e ; e = e->succ_next)
447 	if (e->flags & EDGE_FALLTHRU)
448 	  e_fall = e;
449 	else if (! (e->flags & EDGE_EH))
450 	  e_taken = e;
451 
452       bb_end_insn = bb->end;
453       if (GET_CODE (bb_end_insn) == JUMP_INSN)
454 	{
455 	  if (any_condjump_p (bb_end_insn))
456 	    {
457 	      /* If the old fallthru is still next, nothing to do.  */
458 	      if (RBI (bb)->next == e_fall->dest
459 	          || (!RBI (bb)->next
460 		      && e_fall->dest == EXIT_BLOCK_PTR))
461 		continue;
462 
463 	      if (!e_taken)
464 		e_taken = e_fall;
465 
466 	      /* The degenerated case of conditional jump jumping to the next
467 		 instruction can happen on target having jumps with side
468 		 effects.
469 
470 		 Create temporarily the duplicated edge representing branch.
471 		 It will get unidentified by force_nonfallthru_and_redirect
472 		 that would otherwise get confused by fallthru edge not pointing
473 		 to the next basic block.  */
474 	      if (!e_taken)
475 		{
476 		  rtx note;
477 		  edge e_fake;
478 
479 		  e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
480 
481 		  note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
482 		  if (note)
483 		    {
484 		      int prob = INTVAL (XEXP (note, 0));
485 
486 		      e_fake->probability = prob;
487 		      e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
488 		      e_fall->probability -= e_fall->probability;
489 		      e_fall->count -= e_fake->count;
490 		      if (e_fall->probability < 0)
491 			e_fall->probability = 0;
492 		      if (e_fall->count < 0)
493 			e_fall->count = 0;
494 		    }
495 		}
496 	      /* There is one special case: if *neither* block is next,
497 		 such as happens at the very end of a function, then we'll
498 		 need to add a new unconditional jump.  Choose the taken
499 		 edge based on known or assumed probability.  */
500 	      else if (RBI (bb)->next != e_taken->dest)
501 		{
502 		  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
503 
504 		  if (note
505 		      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
506 		      && invert_jump (bb_end_insn,
507 				      label_for_bb (e_fall->dest), 0))
508 		    {
509 		      e_fall->flags &= ~EDGE_FALLTHRU;
510 		      e_taken->flags |= EDGE_FALLTHRU;
511 		      update_br_prob_note (bb);
512 		      e = e_fall, e_fall = e_taken, e_taken = e;
513 		    }
514 		}
515 
516 	      /* Otherwise we can try to invert the jump.  This will
517 		 basically never fail, however, keep up the pretense.  */
518 	      else if (invert_jump (bb_end_insn,
519 				    label_for_bb (e_fall->dest), 0))
520 		{
521 		  e_fall->flags &= ~EDGE_FALLTHRU;
522 		  e_taken->flags |= EDGE_FALLTHRU;
523 		  update_br_prob_note (bb);
524 		  continue;
525 		}
526 	    }
527 	  else if (returnjump_p (bb_end_insn))
528 	    continue;
529 	  else
530 	    {
531 	      /* Otherwise we have some switch or computed jump.  In the
532 		 99% case, there should not have been a fallthru edge.  */
533 	      if (! e_fall)
534 		continue;
535 
536 #ifdef CASE_DROPS_THROUGH
537 	      /* Except for VAX.  Since we didn't have predication for the
538 		 tablejump, the fallthru block should not have moved.  */
539 	      if (RBI (bb)->next == e_fall->dest)
540 		continue;
541 	      bb_end_insn = skip_insns_after_block (bb);
542 #else
543 	      abort ();
544 #endif
545 	    }
546 	}
547       else
548 	{
549 	  /* No fallthru implies a noreturn function with EH edges, or
550 	     something similarly bizarre.  In any case, we don't need to
551 	     do anything.  */
552 	  if (! e_fall)
553 	    continue;
554 
555 	  /* If the fallthru block is still next, nothing to do.  */
556 	  if (RBI (bb)->next == e_fall->dest)
557 	    continue;
558 
559 	  /* A fallthru to exit block.  */
560 	  if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
561 	    continue;
562 	}
563 
564       /* We got here if we need to add a new jump insn.  */
565       nb = force_nonfallthru (e_fall);
566       if (nb)
567 	{
568 	  alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
569 	  RBI (nb)->visited = 1;
570 	  RBI (nb)->next = RBI (bb)->next;
571 	  RBI (bb)->next = nb;
572 	  /* Don't process this new block.  */
573 	  bb = nb;
574 	}
575     }
576 
577   /* Put basic_block_info in the new order.  */
578 
579   if (rtl_dump_file)
580     {
581       fprintf (rtl_dump_file, "Reordered sequence:\n");
582       for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
583 	{
584 	  fprintf (rtl_dump_file, " %i ", index);
585 	  if (RBI (bb)->original)
586 	    fprintf (rtl_dump_file, "duplicate of %i ",
587 		     RBI (bb)->original->index);
588 	  else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
589 	    fprintf (rtl_dump_file, "compensation ");
590 	  else
591 	    fprintf (rtl_dump_file, "bb %i ", bb->index);
592 	  fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
593 	}
594     }
595 
596   prev_bb = ENTRY_BLOCK_PTR;
597   bb = ENTRY_BLOCK_PTR->next_bb;
598   index = 0;
599 
600   for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
601     {
602       bb->index = index;
603       BASIC_BLOCK (index) = bb;
604 
605       bb->prev_bb = prev_bb;
606       prev_bb->next_bb = bb;
607     }
608   prev_bb->next_bb = EXIT_BLOCK_PTR;
609   EXIT_BLOCK_PTR->prev_bb = prev_bb;
610 }
611 
612 /* Perform sanity checks on the insn chain.
613    1. Check that next/prev pointers are consistent in both the forward and
614       reverse direction.
615    2. Count insns in chain, going both directions, and check if equal.
616    3. Check that get_last_insn () returns the actual end of chain.  */
617 
618 void
verify_insn_chain()619 verify_insn_chain ()
620 {
621   rtx x, prevx, nextx;
622   int insn_cnt1, insn_cnt2;
623 
624   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
625        x != 0;
626        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
627     if (PREV_INSN (x) != prevx)
628       abort ();
629 
630   if (prevx != get_last_insn ())
631     abort ();
632 
633   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
634        x != 0;
635        nextx = x, insn_cnt2++, x = PREV_INSN (x))
636     if (NEXT_INSN (x) != nextx)
637       abort ();
638 
639   if (insn_cnt1 != insn_cnt2)
640     abort ();
641 }
642 
643 /* Remove any unconditional jumps and forwarder block creating fallthru
644    edges instead.  During BB reordering, fallthru edges are not required
645    to target next basic block in the linear CFG layout, so the unconditional
646    jumps are not needed.  */
647 
648 static void
cleanup_unconditional_jumps()649 cleanup_unconditional_jumps ()
650 {
651   basic_block bb;
652 
653   FOR_EACH_BB (bb)
654     {
655       if (!bb->succ)
656 	continue;
657       if (bb->succ->flags & EDGE_FALLTHRU)
658 	continue;
659       if (!bb->succ->succ_next)
660 	{
661 	  rtx insn;
662 	  if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
663 	      && bb->prev_bb != ENTRY_BLOCK_PTR)
664 	    {
665 	      basic_block prev = bb->prev_bb;
666 
667 	      if (rtl_dump_file)
668 		fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
669 			 bb->index);
670 
671 	      redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
672 	      flow_delete_block (bb);
673 	      bb = prev;
674 	    }
675 	  else if (simplejump_p (bb->end))
676 	    {
677 	      rtx jump = bb->end;
678 
679 	      if (rtl_dump_file)
680 		fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
681 			 INSN_UID (jump), bb->index);
682 	      delete_insn (jump);
683 	      bb->succ->flags |= EDGE_FALLTHRU;
684 	    }
685 	  else
686 	    continue;
687 
688 	  insn = NEXT_INSN (bb->end);
689 	  while (insn
690 		 && (GET_CODE (insn) != NOTE
691 		     || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
692 	    {
693 	      rtx next = NEXT_INSN (insn);
694 
695 	      if (GET_CODE (insn) == BARRIER)
696 		delete_barrier (insn);
697 
698 	      insn = next;
699 	    }
700 	}
701     }
702 }
703 
704 /* The block falling through to exit must be the last one in the
705    reordered chain.  Ensure that this condition is met.  */
706 static void
fixup_fallthru_exit_predecessor()707 fixup_fallthru_exit_predecessor ()
708 {
709   edge e;
710   basic_block bb = NULL;
711 
712   for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
713     if (e->flags & EDGE_FALLTHRU)
714       bb = e->src;
715 
716   if (bb && RBI (bb)->next)
717     {
718       basic_block c = ENTRY_BLOCK_PTR->next_bb;
719 
720       while (RBI (c)->next != bb)
721 	c = RBI (c)->next;
722 
723       RBI (c)->next = RBI (bb)->next;
724       while (RBI (c)->next)
725 	c = RBI (c)->next;
726 
727       RBI (c)->next = bb;
728       RBI (bb)->next = NULL;
729     }
730 }
731 
732 /* Return true in case it is possible to duplicate the basic block BB.  */
733 
734 bool
cfg_layout_can_duplicate_bb_p(bb)735 cfg_layout_can_duplicate_bb_p (bb)
736      basic_block bb;
737 {
738   rtx next;
739   edge s;
740 
741   if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
742     return false;
743 
744   /* Duplicating fallthru block to exit would require adding a jump
745      and splitting the real last BB.  */
746   for (s = bb->succ; s; s = s->succ_next)
747     if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
748        return false;
749 
750   /* Do not attempt to duplicate tablejumps, as we need to unshare
751      the dispatch table.  This is dificult to do, as the instructions
752      computing jump destination may be hoisted outside the basic block.  */
753   if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
754       && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
755       && GET_CODE (next) == JUMP_INSN
756       && (GET_CODE (PATTERN (next)) == ADDR_VEC
757 	  || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
758     return false;
759   return true;
760 }
761 
762 static rtx
duplicate_insn_chain(from,to)763 duplicate_insn_chain (from, to)
764      rtx from, to;
765 {
766   rtx insn, last;
767 
768   /* Avoid updating of boundaries of previous basic block.  The
769      note will get removed from insn stream in fixup.  */
770   last = emit_note (NULL, NOTE_INSN_DELETED);
771 
772   /* Create copy at the end of INSN chain.  The chain will
773      be reordered later.  */
774   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
775     {
776       rtx new;
777       switch (GET_CODE (insn))
778 	{
779 	case INSN:
780 	case CALL_INSN:
781 	case JUMP_INSN:
782 	  /* Avoid copying of dispatch tables.  We never duplicate
783 	     tablejumps, so this can hit only in case the table got
784 	     moved far from original jump.  */
785 	  if (GET_CODE (PATTERN (insn)) == ADDR_VEC
786 	      || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
787 	    break;
788 	  new = emit_copy_of_insn_after (insn, get_last_insn ());
789 	  break;
790 
791 	case CODE_LABEL:
792 	  break;
793 
794 	case BARRIER:
795 	  emit_barrier ();
796 	  break;
797 
798 	case NOTE:
799 	  switch (NOTE_LINE_NUMBER (insn))
800 	    {
801 	      /* In case prologue is empty and function contain label
802 	         in first BB, we may want to copy the block.  */
803 	    case NOTE_INSN_PROLOGUE_END:
804 
805 	    case NOTE_INSN_LOOP_VTOP:
806 	    case NOTE_INSN_LOOP_CONT:
807 	    case NOTE_INSN_LOOP_BEG:
808 	    case NOTE_INSN_LOOP_END:
809 	      /* Strip down the loop notes - we don't really want to keep
810 	         them consistent in loop copies.  */
811 	    case NOTE_INSN_DELETED:
812 	    case NOTE_INSN_DELETED_LABEL:
813 	      /* No problem to strip these.  */
814 	    case NOTE_INSN_EPILOGUE_BEG:
815 	    case NOTE_INSN_FUNCTION_END:
816 	      /* Debug code expect these notes to exist just once.
817 	         Keep them in the master copy.
818 	         ??? It probably makes more sense to duplicate them for each
819 	         epilogue copy.  */
820 	    case NOTE_INSN_FUNCTION_BEG:
821 	      /* There is always just single entry to function.  */
822 	    case NOTE_INSN_BASIC_BLOCK:
823 	      break;
824 
825 	      /* There is no purpose to duplicate prologue.  */
826 	    case NOTE_INSN_BLOCK_BEG:
827 	    case NOTE_INSN_BLOCK_END:
828 	      /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
829 	         reordering is in the progress.  */
830 	    case NOTE_INSN_EH_REGION_BEG:
831 	    case NOTE_INSN_EH_REGION_END:
832 	      /* Should never exist at BB duplication time.  */
833 	      abort ();
834 	      break;
835 	    case NOTE_INSN_REPEATED_LINE_NUMBER:
836 	      emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
837 	      break;
838 
839 	    default:
840 	      if (NOTE_LINE_NUMBER (insn) < 0)
841 		abort ();
842 	      /* It is possible that no_line_number is set and the note
843 	         won't be emitted.  */
844 	      emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
845 	    }
846 	  break;
847 	default:
848 	  abort ();
849 	}
850     }
851   insn = NEXT_INSN (last);
852   delete_insn (last);
853   return insn;
854 }
855 
856 /* Redirect Edge to DEST.  */
857 void
cfg_layout_redirect_edge(e,dest)858 cfg_layout_redirect_edge (e, dest)
859      edge e;
860      basic_block dest;
861 {
862   basic_block src = e->src;
863   basic_block old_next_bb = src->next_bb;
864 
865   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
866      in the case the basic block appears to be in sequence.  Avoid this
867      transformation.  */
868 
869   src->next_bb = NULL;
870   if (e->flags & EDGE_FALLTHRU)
871     {
872       /* Redirect any branch edges unified with the fallthru one.  */
873       if (GET_CODE (src->end) == JUMP_INSN
874 	  && JUMP_LABEL (src->end) == e->dest->head)
875 	{
876           if (!redirect_jump (src->end, block_label (dest), 0))
877 	    abort ();
878 	}
879       /* In case we are redirecting fallthru edge to the branch edge
880          of conditional jump, remove it.  */
881       if (src->succ->succ_next
882 	  && !src->succ->succ_next->succ_next)
883 	{
884 	  edge s = e->succ_next ? e->succ_next : src->succ;
885 	  if (s->dest == dest
886 	      && any_condjump_p (src->end)
887 	      && onlyjump_p (src->end))
888 	    delete_insn (src->end);
889 	}
890       redirect_edge_succ_nodup (e, dest);
891     }
892   else
893     redirect_edge_and_branch (e, dest);
894 
895   /* We don't want simplejumps in the insn stream during cfglayout.  */
896   if (simplejump_p (src->end))
897     {
898       delete_insn (src->end);
899       delete_barrier (NEXT_INSN (src->end));
900       src->succ->flags |= EDGE_FALLTHRU;
901     }
902   src->next_bb = old_next_bb;
903 }
904 
905 /* Create a duplicate of the basic block BB and redirect edge E into it.  */
906 
907 basic_block
cfg_layout_duplicate_bb(bb,e)908 cfg_layout_duplicate_bb (bb, e)
909      basic_block bb;
910      edge e;
911 {
912   rtx insn;
913   edge s, n;
914   basic_block new_bb;
915   gcov_type new_count = e ? e->count : 0;
916 
917   if (bb->count < new_count)
918     new_count = bb->count;
919   if (!bb->pred)
920     abort ();
921 #ifdef ENABLE_CHECKING
922   if (!cfg_layout_can_duplicate_bb_p (bb))
923     abort ();
924 #endif
925 
926   insn = duplicate_insn_chain (bb->head, bb->end);
927   new_bb = create_basic_block (insn,
928 			       insn ? get_last_insn () : NULL,
929 			       EXIT_BLOCK_PTR->prev_bb);
930   alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
931 
932   if (RBI (bb)->header)
933     {
934       insn = RBI (bb)->header;
935       while (NEXT_INSN (insn))
936 	insn = NEXT_INSN (insn);
937       insn = duplicate_insn_chain (RBI (bb)->header, insn);
938       if (insn)
939 	RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
940     }
941 
942   if (RBI (bb)->footer)
943     {
944       insn = RBI (bb)->footer;
945       while (NEXT_INSN (insn))
946 	insn = NEXT_INSN (insn);
947       insn = duplicate_insn_chain (RBI (bb)->footer, insn);
948       if (insn)
949 	RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
950     }
951 
952   if (bb->global_live_at_start)
953     {
954       new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
955       new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
956       COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
957       COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
958     }
959 
960   new_bb->loop_depth = bb->loop_depth;
961   new_bb->flags = bb->flags;
962   for (s = bb->succ; s; s = s->succ_next)
963     {
964       n = make_edge (new_bb, s->dest, s->flags);
965       n->probability = s->probability;
966       if (new_count)
967 	/* Take care for overflows!  */
968 	n->count = s->count * (new_count * 10000 / bb->count) / 10000;
969       else
970 	n->count = 0;
971       s->count -= n->count;
972     }
973 
974   new_bb->count = new_count;
975   bb->count -= new_count;
976 
977   if (e)
978     {
979       new_bb->frequency = EDGE_FREQUENCY (e);
980       bb->frequency -= EDGE_FREQUENCY (e);
981 
982       cfg_layout_redirect_edge (e, new_bb);
983     }
984 
985   if (bb->count < 0)
986     bb->count = 0;
987   if (bb->frequency < 0)
988     bb->frequency = 0;
989 
990   RBI (new_bb)->original = bb;
991   return new_bb;
992 }
993 
994 /* Main entry point to this module - initialize the datastructures for
995    CFG layout changes.  It keeps LOOPS up-to-date if not null.  */
996 
997 void
cfg_layout_initialize()998 cfg_layout_initialize ()
999 {
1000   /* Our algorithm depends on fact that there are now dead jumptables
1001      around the code.  */
1002   alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1003 
1004   cleanup_unconditional_jumps ();
1005 
1006   record_effective_endpoints ();
1007 }
1008 
1009 /* Finalize the changes: reorder insn list according to the sequence, enter
1010    compensation code, rebuild scope forest.  */
1011 
1012 void
cfg_layout_finalize()1013 cfg_layout_finalize ()
1014 {
1015   fixup_fallthru_exit_predecessor ();
1016   fixup_reorder_chain ();
1017 
1018 #ifdef ENABLE_CHECKING
1019   verify_insn_chain ();
1020 #endif
1021 
1022   free_aux_for_blocks ();
1023 
1024 #ifdef ENABLE_CHECKING
1025   verify_flow_info ();
1026 #endif
1027 }
1028