1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This file contains low level functions to manipulate the CFG and analyze it
23    that are aware of the RTL intermediate language.
24 
25    Available functionality:
26      - Basic CFG/RTL manipulation API documented in cfghooks.h
27      - CFG-aware instruction chain manipulation
28 	 delete_insn, delete_insn_chain
29      - Edge splitting and committing to edges
30 	 insert_insn_on_edge, commit_edge_insertions
31      - CFG updating after insn simplification
32 	 purge_dead_edges, purge_all_dead_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
59 
60 /* Stubs in case we don't have a return insn.  */
61 #ifndef HAVE_return
62 #define HAVE_return 0
63 #define gen_return() NULL_RTX
64 #endif
65 
66 /* The labels mentioned in non-jump rtl.  Valid during find_basic_blocks.  */
67 /* ??? Should probably be using LABEL_NUSES instead.  It would take a
68    bit of surgery to be able to use or co-opt the routines in jump.  */
69 rtx label_value_list;
70 rtx tail_recursion_label_list;
71 
72 static int can_delete_note_p (rtx);
73 static int can_delete_label_p (rtx);
74 static void commit_one_edge_insertion (edge, int);
75 static rtx last_loop_beg_note (rtx);
76 static bool back_edge_of_syntactic_loop_p (basic_block, basic_block);
77 basic_block force_nonfallthru_and_redirect (edge, basic_block);
78 static basic_block rtl_split_edge (edge);
79 static int rtl_verify_flow_info (void);
80 static edge cfg_layout_split_block (basic_block, void *);
81 static bool cfg_layout_redirect_edge_and_branch (edge, basic_block);
82 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
83 static void cfg_layout_delete_block (basic_block);
84 static void rtl_delete_block (basic_block);
85 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
86 static bool rtl_redirect_edge_and_branch (edge, basic_block);
87 static edge rtl_split_block (basic_block, void *);
88 static void rtl_dump_bb (basic_block, FILE *);
89 static int rtl_verify_flow_info_1 (void);
90 static void mark_killed_regs (rtx, rtx, void *);
91 
92 /* Return true if NOTE is not one of the ones that must be kept paired,
93    so that we may simply delete it.  */
94 
95 static int
can_delete_note_p(rtx note)96 can_delete_note_p (rtx note)
97 {
98   return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
99 	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
100 	  || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
101 }
102 
103 /* True if a given label can be deleted.  */
104 
105 static int
can_delete_label_p(rtx label)106 can_delete_label_p (rtx label)
107 {
108   return (!LABEL_PRESERVE_P (label)
109 	  /* User declared labels must be preserved.  */
110 	  && LABEL_NAME (label) == 0
111 	  && !in_expr_list_p (forced_labels, label)
112 	  && !in_expr_list_p (label_value_list, label));
113 }
114 
115 /* Delete INSN by patching it out.  Return the next insn.  */
116 
117 rtx
delete_insn(rtx insn)118 delete_insn (rtx insn)
119 {
120   rtx next = NEXT_INSN (insn);
121   rtx note;
122   bool really_delete = true;
123 
124   if (GET_CODE (insn) == CODE_LABEL)
125     {
126       /* Some labels can't be directly removed from the INSN chain, as they
127          might be references via variables, constant pool etc.
128          Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
129       if (! can_delete_label_p (insn))
130 	{
131 	  const char *name = LABEL_NAME (insn);
132 
133 	  really_delete = false;
134 	  PUT_CODE (insn, NOTE);
135 	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
136 	  NOTE_SOURCE_FILE (insn) = name;
137 	}
138 
139       remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
140     }
141 
142   if (really_delete)
143     {
144       /* If this insn has already been deleted, something is very wrong.  */
145       if (INSN_DELETED_P (insn))
146 	abort ();
147       remove_insn (insn);
148       INSN_DELETED_P (insn) = 1;
149     }
150 
151   /* If deleting a jump, decrement the use count of the label.  Deleting
152      the label itself should happen in the normal course of block merging.  */
153   if (GET_CODE (insn) == JUMP_INSN
154       && JUMP_LABEL (insn)
155       && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
156     LABEL_NUSES (JUMP_LABEL (insn))--;
157 
158   /* Also if deleting an insn that references a label.  */
159   else
160     {
161       while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
162 	     && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
163 	{
164 	  LABEL_NUSES (XEXP (note, 0))--;
165 	  remove_note (insn, note);
166 	}
167     }
168 
169   if (GET_CODE (insn) == JUMP_INSN
170       && (GET_CODE (PATTERN (insn)) == ADDR_VEC
171 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
172     {
173       rtx pat = PATTERN (insn);
174       int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
175       int len = XVECLEN (pat, diff_vec_p);
176       int i;
177 
178       for (i = 0; i < len; i++)
179 	{
180 	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
181 
182 	  /* When deleting code in bulk (e.g. removing many unreachable
183 	     blocks) we can delete a label that's a target of the vector
184 	     before deleting the vector itself.  */
185 	  if (GET_CODE (label) != NOTE)
186 	    LABEL_NUSES (label)--;
187 	}
188     }
189 
190   return next;
191 }
192 
193 /* Like delete_insn but also purge dead edges from BB.  */
194 rtx
delete_insn_and_edges(rtx insn)195 delete_insn_and_edges (rtx insn)
196 {
197   rtx x;
198   bool purge = false;
199 
200   if (INSN_P (insn)
201       && BLOCK_FOR_INSN (insn)
202       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
203     purge = true;
204   x = delete_insn (insn);
205   if (purge)
206     purge_dead_edges (BLOCK_FOR_INSN (insn));
207   return x;
208 }
209 
210 /* Unlink a chain of insns between START and FINISH, leaving notes
211    that must be paired.  */
212 
213 void
delete_insn_chain(rtx start,rtx finish)214 delete_insn_chain (rtx start, rtx finish)
215 {
216   rtx next;
217 
218   /* Unchain the insns one by one.  It would be quicker to delete all of these
219      with a single unchaining, rather than one at a time, but we need to keep
220      the NOTE's.  */
221   while (1)
222     {
223       next = NEXT_INSN (start);
224       if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
225 	;
226       else
227 	next = delete_insn (start);
228 
229       if (start == finish)
230 	break;
231       start = next;
232     }
233 }
234 
235 /* Like delete_insn but also purge dead edges from BB.  */
236 void
delete_insn_chain_and_edges(rtx first,rtx last)237 delete_insn_chain_and_edges (rtx first, rtx last)
238 {
239   bool purge = false;
240 
241   if (INSN_P (last)
242       && BLOCK_FOR_INSN (last)
243       && BB_END (BLOCK_FOR_INSN (last)) == last)
244     purge = true;
245   delete_insn_chain (first, last);
246   if (purge)
247     purge_dead_edges (BLOCK_FOR_INSN (last));
248 }
249 
250 /* Create a new basic block consisting of the instructions between HEAD and END
251    inclusive.  This function is designed to allow fast BB construction - reuses
252    the note and basic block struct in BB_NOTE, if any and do not grow
253    BASIC_BLOCK chain and should be used directly only by CFG construction code.
254    END can be NULL in to create new empty basic block before HEAD.  Both END
255    and HEAD can be NULL to create basic block at the end of INSN chain.
256    AFTER is the basic block we should be put after.  */
257 
258 basic_block
create_basic_block_structure(rtx head,rtx end,rtx bb_note,basic_block after)259 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
260 {
261   basic_block bb;
262 
263   if (bb_note
264       && ! RTX_INTEGRATED_P (bb_note)
265       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
266       && bb->aux == NULL)
267     {
268       /* If we found an existing note, thread it back onto the chain.  */
269 
270       rtx after;
271 
272       if (GET_CODE (head) == CODE_LABEL)
273 	after = head;
274       else
275 	{
276 	  after = PREV_INSN (head);
277 	  head = bb_note;
278 	}
279 
280       if (after != bb_note && NEXT_INSN (after) != bb_note)
281 	reorder_insns_nobb (bb_note, bb_note, after);
282     }
283   else
284     {
285       /* Otherwise we must create a note and a basic block structure.  */
286 
287       bb = alloc_block ();
288 
289       if (!head && !end)
290 	head = end = bb_note
291 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
292       else if (GET_CODE (head) == CODE_LABEL && end)
293 	{
294 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
295 	  if (head == end)
296 	    end = bb_note;
297 	}
298       else
299 	{
300 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
301 	  head = bb_note;
302 	  if (!end)
303 	    end = head;
304 	}
305 
306       NOTE_BASIC_BLOCK (bb_note) = bb;
307     }
308 
309   /* Always include the bb note in the block.  */
310   if (NEXT_INSN (end) == bb_note)
311     end = bb_note;
312 
313   BB_HEAD (bb) = head;
314   BB_END (bb) = end;
315   bb->index = last_basic_block++;
316   bb->flags = BB_NEW;
317   link_block (bb, after);
318   BASIC_BLOCK (bb->index) = bb;
319   update_bb_for_insn (bb);
320 
321   /* Tag the block so that we know it has been used when considering
322      other basic block notes.  */
323   bb->aux = bb;
324 
325   return bb;
326 }
327 
328 /* Create new basic block consisting of instructions in between HEAD and END
329    and place it to the BB chain after block AFTER.  END can be NULL in to
330    create new empty basic block before HEAD.  Both END and HEAD can be NULL to
331    create basic block at the end of INSN chain.  */
332 
333 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)334 rtl_create_basic_block (void *headp, void *endp, basic_block after)
335 {
336   rtx head = headp, end = endp;
337   basic_block bb;
338 
339   /* Place the new block just after the end.  */
340   VARRAY_GROW (basic_block_info, last_basic_block+1);
341 
342   n_basic_blocks++;
343 
344   bb = create_basic_block_structure (head, end, NULL, after);
345   bb->aux = NULL;
346   return bb;
347 }
348 
349 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)350 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
351 {
352   basic_block newbb = rtl_create_basic_block (head, end, after);
353 
354   cfg_layout_initialize_rbi (newbb);
355   return newbb;
356 }
357 
358 /* Delete the insns in a (non-live) block.  We physically delete every
359    non-deleted-note insn, and update the flow graph appropriately.
360 
361    Return nonzero if we deleted an exception handler.  */
362 
363 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
364    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
365 
366 static void
rtl_delete_block(basic_block b)367 rtl_delete_block (basic_block b)
368 {
369   rtx insn, end, tmp;
370 
371   /* If the head of this block is a CODE_LABEL, then it might be the
372      label for an exception handler which can't be reached.
373 
374      We need to remove the label from the exception_handler_label list
375      and remove the associated NOTE_INSN_EH_REGION_BEG and
376      NOTE_INSN_EH_REGION_END notes.  */
377 
378   /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
379      hanging before the block.  */
380 
381   for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
382     {
383       if (GET_CODE (insn) != NOTE)
384 	break;
385       if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
386 	  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
387 	NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
388     }
389 
390   insn = BB_HEAD (b);
391 
392   never_reached_warning (insn, BB_END (b));
393 
394   if (GET_CODE (insn) == CODE_LABEL)
395     maybe_remove_eh_handler (insn);
396 
397   /* Include any jump table following the basic block.  */
398   end = BB_END (b);
399   if (tablejump_p (end, NULL, &tmp))
400     end = tmp;
401 
402   /* Include any barrier that may follow the basic block.  */
403   tmp = next_nonnote_insn (end);
404   if (tmp && GET_CODE (tmp) == BARRIER)
405     end = tmp;
406 
407   /* Selectively delete the entire chain.  */
408   BB_HEAD (b) = NULL;
409   delete_insn_chain (insn, end);
410 
411   /* Remove the edges into and out of this block.  Note that there may
412      indeed be edges in, if we are removing an unreachable loop.  */
413   while (b->pred != NULL)
414     remove_edge (b->pred);
415   while (b->succ != NULL)
416     remove_edge (b->succ);
417 
418   b->pred = NULL;
419   b->succ = NULL;
420 
421   /* Remove the basic block from the array.  */
422   expunge_block (b);
423 }
424 
425 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
426 
427 void
compute_bb_for_insn(void)428 compute_bb_for_insn (void)
429 {
430   basic_block bb;
431 
432   FOR_EACH_BB (bb)
433     {
434       rtx end = BB_END (bb);
435       rtx insn;
436 
437       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
438 	{
439 	  BLOCK_FOR_INSN (insn) = bb;
440 	  if (insn == end)
441 	    break;
442 	}
443     }
444 }
445 
446 /* Release the basic_block_for_insn array.  */
447 
448 void
free_bb_for_insn(void)449 free_bb_for_insn (void)
450 {
451   rtx insn;
452   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
453     if (GET_CODE (insn) != BARRIER)
454       BLOCK_FOR_INSN (insn) = NULL;
455 }
456 
457 /* Update insns block within BB.  */
458 
459 void
update_bb_for_insn(basic_block bb)460 update_bb_for_insn (basic_block bb)
461 {
462   rtx insn;
463 
464   for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
465     {
466       if (GET_CODE (insn) != BARRIER)
467 	set_block_for_insn (insn, bb);
468       if (insn == BB_END (bb))
469 	break;
470     }
471 }
472 
473 /* Split a block BB after insn INSN creating a new fallthru edge.
474    Return the new edge.  Note that to keep other parts of the compiler happy,
475    this function renumbers all the basic blocks so that the new
476    one has a number one greater than the block split.  */
477 
478 static edge
rtl_split_block(basic_block bb,void * insnp)479 rtl_split_block (basic_block bb, void *insnp)
480 {
481   basic_block new_bb;
482   edge new_edge;
483   edge e;
484   rtx insn = insnp;
485 
486   /* There is no point splitting the block after its end.  */
487   if (BB_END (bb) == insn)
488     return 0;
489 
490   /* Create the new basic block.  */
491   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
492   new_bb->count = bb->count;
493   new_bb->frequency = bb->frequency;
494   new_bb->loop_depth = bb->loop_depth;
495   BB_END (bb) = insn;
496 
497   /* Redirect the outgoing edges.  */
498   new_bb->succ = bb->succ;
499   bb->succ = NULL;
500   for (e = new_bb->succ; e; e = e->succ_next)
501     e->src = new_bb;
502 
503   new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
504 
505   if (bb->global_live_at_start)
506     {
507       new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
508       new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
509       COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
510 
511       /* We now have to calculate which registers are live at the end
512 	 of the split basic block and at the start of the new basic
513 	 block.  Start with those registers that are known to be live
514 	 at the end of the original basic block and get
515 	 propagate_block to determine which registers are live.  */
516       COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
517       propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
518       COPY_REG_SET (bb->global_live_at_end,
519 		    new_bb->global_live_at_start);
520 #ifdef HAVE_conditional_execution
521       /* In the presence of conditional execution we are not able to update
522 	 liveness precisely.  */
523       if (reload_completed)
524 	{
525 	  bb->flags |= BB_DIRTY;
526 	  new_bb->flags |= BB_DIRTY;
527 	}
528 #endif
529     }
530 
531   return new_edge;
532 }
533 
534 /* Assume that the code of basic block B has been merged into A.
535    Do corresponding CFG updates:  redirect edges accordingly etc.  */
536 static void
update_cfg_after_block_merging(basic_block a,basic_block b)537 update_cfg_after_block_merging (basic_block a, basic_block b)
538 {
539   edge e;
540 
541   /* Normally there should only be one successor of A and that is B, but
542      partway though the merge of blocks for conditional_execution we'll
543      be merging a TEST block with THEN and ELSE successors.  Free the
544      whole lot of them and hope the caller knows what they're doing.  */
545   while (a->succ)
546     remove_edge (a->succ);
547 
548   /* Adjust the edges out of B for the new owner.  */
549   for (e = b->succ; e; e = e->succ_next)
550     e->src = a;
551   a->succ = b->succ;
552   a->flags |= b->flags;
553 
554   /* B hasn't quite yet ceased to exist.  Attempt to prevent mishap.  */
555   b->pred = b->succ = NULL;
556   a->global_live_at_end = b->global_live_at_end;
557 
558   expunge_block (b);
559 }
560 
561 /* Blocks A and B are to be merged into a single block A.  The insns
562    are already contiguous.  */
563 
564 static void
rtl_merge_blocks(basic_block a,basic_block b)565 rtl_merge_blocks (basic_block a, basic_block b)
566 {
567   rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
568   rtx del_first = NULL_RTX, del_last = NULL_RTX;
569   int b_empty = 0;
570 
571   /* If there was a CODE_LABEL beginning B, delete it.  */
572   if (GET_CODE (b_head) == CODE_LABEL)
573     {
574       /* Detect basic blocks with nothing but a label.  This can happen
575 	 in particular at the end of a function.  */
576       if (b_head == b_end)
577 	b_empty = 1;
578 
579       del_first = del_last = b_head;
580       b_head = NEXT_INSN (b_head);
581     }
582 
583   /* Delete the basic block note and handle blocks containing just that
584      note.  */
585   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
586     {
587       if (b_head == b_end)
588 	b_empty = 1;
589       if (! del_last)
590 	del_first = b_head;
591 
592       del_last = b_head;
593       b_head = NEXT_INSN (b_head);
594     }
595 
596   /* If there was a jump out of A, delete it.  */
597   if (GET_CODE (a_end) == JUMP_INSN)
598     {
599       rtx prev;
600 
601       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
602 	if (GET_CODE (prev) != NOTE
603 	    || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
604 	    || prev == BB_HEAD (a))
605 	  break;
606 
607       del_first = a_end;
608 
609 #ifdef HAVE_cc0
610       /* If this was a conditional jump, we need to also delete
611 	 the insn that set cc0.  */
612       if (only_sets_cc0_p (prev))
613 	{
614 	  rtx tmp = prev;
615 
616 	  prev = prev_nonnote_insn (prev);
617 	  if (!prev)
618 	    prev = BB_HEAD (a);
619 	  del_first = tmp;
620 	}
621 #endif
622 
623       a_end = PREV_INSN (del_first);
624     }
625   else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
626     del_first = NEXT_INSN (a_end);
627 
628   update_cfg_after_block_merging (a, b);
629 
630   /* Delete everything marked above as well as crap that might be
631      hanging out between the two blocks.  */
632   delete_insn_chain (del_first, del_last);
633 
634   /* Reassociate the insns of B with A.  */
635   if (!b_empty)
636     {
637       rtx x;
638 
639       for (x = a_end; x != b_end; x = NEXT_INSN (x))
640 	set_block_for_insn (x, a);
641 
642       set_block_for_insn (b_end, a);
643 
644       a_end = b_end;
645     }
646 
647   BB_END (a) = a_end;
648 }
649 
650 /* Return true when block A and B can be merged.  */
651 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)652 rtl_can_merge_blocks (basic_block a,basic_block b)
653 {
654   /* There must be exactly one edge in between the blocks.  */
655   return (a->succ && !a->succ->succ_next && a->succ->dest == b
656 	  && !b->pred->pred_next && a != b
657 	  /* Must be simple edge.  */
658 	  && !(a->succ->flags & EDGE_COMPLEX)
659 	  && a->next_bb == b
660 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
661 	  /* If the jump insn has side effects,
662 	     we can't kill the edge.  */
663 	  && (GET_CODE (BB_END (a)) != JUMP_INSN
664 	      || (reload_completed
665 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
666 }
667 
668 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
669    exist.  */
670 
671 rtx
block_label(basic_block block)672 block_label (basic_block block)
673 {
674   if (block == EXIT_BLOCK_PTR)
675     return NULL_RTX;
676 
677   if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
678     {
679       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
680     }
681 
682   return BB_HEAD (block);
683 }
684 
685 /* Attempt to perform edge redirection by replacing possibly complex jump
686    instruction by unconditional jump or removing jump completely.  This can
687    apply only if all edges now point to the same block.  The parameters and
688    return values are equivalent to redirect_edge_and_branch.  */
689 
690 bool
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)691 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
692 {
693   basic_block src = e->src;
694   rtx insn = BB_END (src), kill_from;
695   edge tmp;
696   rtx set;
697   int fallthru = 0;
698 
699   /* Verify that all targets will be TARGET.  */
700   for (tmp = src->succ; tmp; tmp = tmp->succ_next)
701     if (tmp->dest != target && tmp != e)
702       break;
703 
704   if (tmp || !onlyjump_p (insn))
705     return false;
706   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
707     return false;
708 
709   /* Avoid removing branch with side effects.  */
710   set = single_set (insn);
711   if (!set || side_effects_p (set))
712     return false;
713 
714   /* In case we zap a conditional jump, we'll need to kill
715      the cc0 setter too.  */
716   kill_from = insn;
717 #ifdef HAVE_cc0
718   if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
719     kill_from = PREV_INSN (insn);
720 #endif
721 
722   /* See if we can create the fallthru edge.  */
723   if (in_cfglayout || can_fallthru (src, target))
724     {
725       if (rtl_dump_file)
726 	fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
727       fallthru = 1;
728 
729       /* Selectively unlink whole insn chain.  */
730       if (in_cfglayout)
731 	{
732 	  rtx insn = src->rbi->footer;
733 
734           delete_insn_chain (kill_from, BB_END (src));
735 
736 	  /* Remove barriers but keep jumptables.  */
737 	  while (insn)
738 	    {
739 	      if (GET_CODE (insn) == BARRIER)
740 		{
741 		  if (PREV_INSN (insn))
742 		    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
743 		  else
744 		    src->rbi->footer = NEXT_INSN (insn);
745 		  if (NEXT_INSN (insn))
746 		    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
747 		}
748 	      if (GET_CODE (insn) == CODE_LABEL)
749 		break;
750 	      insn = NEXT_INSN (insn);
751 	    }
752 	}
753       else
754         delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
755     }
756 
757   /* If this already is simplejump, redirect it.  */
758   else if (simplejump_p (insn))
759     {
760       if (e->dest == target)
761 	return false;
762       if (rtl_dump_file)
763 	fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
764 		 INSN_UID (insn), e->dest->index, target->index);
765       if (!redirect_jump (insn, block_label (target), 0))
766 	{
767 	  if (target == EXIT_BLOCK_PTR)
768 	    return false;
769 	  abort ();
770 	}
771     }
772 
773   /* Cannot do anything for target exit block.  */
774   else if (target == EXIT_BLOCK_PTR)
775     return false;
776 
777   /* Or replace possibly complicated jump insn by simple jump insn.  */
778   else
779     {
780       rtx target_label = block_label (target);
781       rtx barrier, label, table;
782 
783       emit_jump_insn_after_noloc (gen_jump (target_label), insn);
784       JUMP_LABEL (BB_END (src)) = target_label;
785       LABEL_NUSES (target_label)++;
786       if (rtl_dump_file)
787 	fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
788 		 INSN_UID (insn), INSN_UID (BB_END (src)));
789 
790 
791       delete_insn_chain (kill_from, insn);
792 
793       /* Recognize a tablejump that we are converting to a
794 	 simple jump and remove its associated CODE_LABEL
795 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
796       if (tablejump_p (insn, &label, &table))
797 	delete_insn_chain (label, table);
798 
799       barrier = next_nonnote_insn (BB_END (src));
800       if (!barrier || GET_CODE (barrier) != BARRIER)
801 	emit_barrier_after (BB_END (src));
802       else
803 	{
804 	  if (barrier != NEXT_INSN (BB_END (src)))
805 	    {
806 	      /* Move the jump before barrier so that the notes
807 		 which originally were or were created before jump table are
808 		 inside the basic block.  */
809 	      rtx new_insn = BB_END (src);
810 	      rtx tmp;
811 
812 	      for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
813 		   tmp = NEXT_INSN (tmp))
814 		set_block_for_insn (tmp, src);
815 
816 	      NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
817 	      PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
818 
819 	      NEXT_INSN (new_insn) = barrier;
820 	      NEXT_INSN (PREV_INSN (barrier)) = new_insn;
821 
822 	      PREV_INSN (new_insn) = PREV_INSN (barrier);
823 	      PREV_INSN (barrier) = new_insn;
824 	    }
825 	}
826     }
827 
828   /* Keep only one edge out and set proper flags.  */
829   while (src->succ->succ_next)
830     remove_edge (src->succ);
831   e = src->succ;
832   if (fallthru)
833     e->flags = EDGE_FALLTHRU;
834   else
835     e->flags = 0;
836 
837   e->probability = REG_BR_PROB_BASE;
838   e->count = src->count;
839 
840   /* We don't want a block to end on a line-number note since that has
841      the potential of changing the code between -g and not -g.  */
842   while (GET_CODE (BB_END (e->src)) == NOTE
843 	 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
844     delete_insn (BB_END (e->src));
845 
846   if (e->dest != target)
847     redirect_edge_succ (e, target);
848 
849   return true;
850 }
851 
852 /* Return last loop_beg note appearing after INSN, before start of next
853    basic block.  Return INSN if there are no such notes.
854 
855    When emitting jump to redirect a fallthru edge, it should always appear
856    after the LOOP_BEG notes, as loop optimizer expect loop to either start by
857    fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
858    test.  */
859 
860 static rtx
last_loop_beg_note(rtx insn)861 last_loop_beg_note (rtx insn)
862 {
863   rtx last = insn;
864 
865   for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
866        && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
867        insn = NEXT_INSN (insn))
868     if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
869       last = insn;
870 
871   return last;
872 }
873 
874 /* Redirect edge representing branch of (un)conditional jump or tablejump.  */
875 static bool
redirect_branch_edge(edge e,basic_block target)876 redirect_branch_edge (edge e, basic_block target)
877 {
878   rtx tmp;
879   rtx old_label = BB_HEAD (e->dest);
880   basic_block src = e->src;
881   rtx insn = BB_END (src);
882 
883   /* We can only redirect non-fallthru edges of jump insn.  */
884   if (e->flags & EDGE_FALLTHRU)
885     return false;
886   else if (GET_CODE (insn) != JUMP_INSN)
887     return false;
888 
889   /* Recognize a tablejump and adjust all matching cases.  */
890   if (tablejump_p (insn, NULL, &tmp))
891     {
892       rtvec vec;
893       int j;
894       rtx new_label = block_label (target);
895 
896       if (target == EXIT_BLOCK_PTR)
897 	return false;
898       if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
899 	vec = XVEC (PATTERN (tmp), 0);
900       else
901 	vec = XVEC (PATTERN (tmp), 1);
902 
903       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
904 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
905 	  {
906 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
907 	    --LABEL_NUSES (old_label);
908 	    ++LABEL_NUSES (new_label);
909 	  }
910 
911       /* Handle casesi dispatch insns.  */
912       if ((tmp = single_set (insn)) != NULL
913 	  && SET_DEST (tmp) == pc_rtx
914 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
915 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
916 	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
917 	{
918 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
919 						       new_label);
920 	  --LABEL_NUSES (old_label);
921 	  ++LABEL_NUSES (new_label);
922 	}
923     }
924   else
925     {
926       /* ?? We may play the games with moving the named labels from
927 	 one basic block to the other in case only one computed_jump is
928 	 available.  */
929       if (computed_jump_p (insn)
930 	  /* A return instruction can't be redirected.  */
931 	  || returnjump_p (insn))
932 	return false;
933 
934       /* If the insn doesn't go where we think, we're confused.  */
935       if (JUMP_LABEL (insn) != old_label)
936 	abort ();
937 
938       /* If the substitution doesn't succeed, die.  This can happen
939 	 if the back end emitted unrecognizable instructions or if
940 	 target is exit block on some arches.  */
941       if (!redirect_jump (insn, block_label (target), 0))
942 	{
943 	  if (target == EXIT_BLOCK_PTR)
944 	    return false;
945 	  abort ();
946 	}
947     }
948 
949   if (rtl_dump_file)
950     fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
951 	     e->src->index, e->dest->index, target->index);
952 
953   if (e->dest != target)
954     redirect_edge_succ_nodup (e, target);
955   return true;
956 }
957 
958 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
959    expense of adding new instructions or reordering basic blocks.
960 
961    Function can be also called with edge destination equivalent to the TARGET.
962    Then it should try the simplifications and do nothing if none is possible.
963 
964    Return true if transformation succeeded.  We still return false in case E
965    already destinated TARGET and we didn't managed to simplify instruction
966    stream.  */
967 
968 static bool
rtl_redirect_edge_and_branch(edge e,basic_block target)969 rtl_redirect_edge_and_branch (edge e, basic_block target)
970 {
971   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
972     return false;
973 
974   if (e->dest == target)
975     return true;
976 
977   if (try_redirect_by_replacing_jump (e, target, false))
978     return true;
979 
980   if (!redirect_branch_edge (e, target))
981     return false;
982 
983   return true;
984 }
985 
986 /* Like force_nonfallthru below, but additionally performs redirection
987    Used by redirect_edge_and_branch_force.  */
988 
989 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target)990 force_nonfallthru_and_redirect (edge e, basic_block target)
991 {
992   basic_block jump_block, new_bb = NULL, src = e->src;
993   rtx note;
994   edge new_edge;
995   int abnormal_edge_flags = 0;
996 
997   /* In the case the last instruction is conditional jump to the next
998      instruction, first redirect the jump itself and then continue
999      by creating a basic block afterwards to redirect fallthru edge.  */
1000   if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1001       && any_condjump_p (BB_END (e->src))
1002       /* When called from cfglayout, fallthru edges do not
1003          necessarily go to the next block.  */
1004       && e->src->next_bb == e->dest
1005       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1006     {
1007       rtx note;
1008       edge b = unchecked_make_edge (e->src, target, 0);
1009 
1010       if (!redirect_jump (BB_END (e->src), block_label (target), 0))
1011 	abort ();
1012       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1013       if (note)
1014 	{
1015 	  int prob = INTVAL (XEXP (note, 0));
1016 
1017 	  b->probability = prob;
1018 	  b->count = e->count * prob / REG_BR_PROB_BASE;
1019 	  e->probability -= e->probability;
1020 	  e->count -= b->count;
1021 	  if (e->probability < 0)
1022 	    e->probability = 0;
1023 	  if (e->count < 0)
1024 	    e->count = 0;
1025 	}
1026     }
1027 
1028   if (e->flags & EDGE_ABNORMAL)
1029     {
1030       /* Irritating special case - fallthru edge to the same block as abnormal
1031 	 edge.
1032 	 We can't redirect abnormal edge, but we still can split the fallthru
1033 	 one and create separate abnormal edge to original destination.
1034 	 This allows bb-reorder to make such edge non-fallthru.  */
1035       if (e->dest != target)
1036 	abort ();
1037       abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1038       e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1039     }
1040   else if (!(e->flags & EDGE_FALLTHRU))
1041     abort ();
1042   else if (e->src == ENTRY_BLOCK_PTR)
1043     {
1044       /* We can't redirect the entry block.  Create an empty block at the
1045          start of the function which we use to add the new jump.  */
1046       edge *pe1;
1047       basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1048 
1049       /* Change the existing edge's source to be the new block, and add
1050 	 a new edge from the entry block to the new block.  */
1051       e->src = bb;
1052       for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1053 	if (*pe1 == e)
1054 	  {
1055 	    *pe1 = e->succ_next;
1056 	    break;
1057 	  }
1058       e->succ_next = 0;
1059       bb->succ = e;
1060       make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1061     }
1062 
1063   if (e->src->succ->succ_next || abnormal_edge_flags)
1064     {
1065       /* Create the new structures.  */
1066 
1067       /* If the old block ended with a tablejump, skip its table
1068 	 by searching forward from there.  Otherwise start searching
1069 	 forward from the last instruction of the old block.  */
1070       if (!tablejump_p (BB_END (e->src), NULL, &note))
1071 	note = BB_END (e->src);
1072 
1073       /* Position the new block correctly relative to loop notes.  */
1074       note = last_loop_beg_note (note);
1075       note = NEXT_INSN (note);
1076 
1077       jump_block = create_basic_block (note, NULL, e->src);
1078       jump_block->count = e->count;
1079       jump_block->frequency = EDGE_FREQUENCY (e);
1080       jump_block->loop_depth = target->loop_depth;
1081 
1082       if (target->global_live_at_start)
1083 	{
1084 	  jump_block->global_live_at_start
1085 	    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1086 	  jump_block->global_live_at_end
1087 	    = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1088 	  COPY_REG_SET (jump_block->global_live_at_start,
1089 			target->global_live_at_start);
1090 	  COPY_REG_SET (jump_block->global_live_at_end,
1091 			target->global_live_at_start);
1092 	}
1093 
1094       /* Wire edge in.  */
1095       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1096       new_edge->probability = e->probability;
1097       new_edge->count = e->count;
1098 
1099       /* Redirect old edge.  */
1100       redirect_edge_pred (e, jump_block);
1101       e->probability = REG_BR_PROB_BASE;
1102 
1103       new_bb = jump_block;
1104     }
1105   else
1106     jump_block = e->src;
1107 
1108   e->flags &= ~EDGE_FALLTHRU;
1109   if (target == EXIT_BLOCK_PTR)
1110     {
1111       if (HAVE_return)
1112 	emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1113       else
1114 	abort ();
1115     }
1116   else
1117     {
1118       rtx label = block_label (target);
1119       emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1120       JUMP_LABEL (BB_END (jump_block)) = label;
1121       LABEL_NUSES (label)++;
1122     }
1123 
1124   emit_barrier_after (BB_END (jump_block));
1125   redirect_edge_succ_nodup (e, target);
1126 
1127   if (abnormal_edge_flags)
1128     make_edge (src, target, abnormal_edge_flags);
1129 
1130   return new_bb;
1131 }
1132 
1133 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1134    (and possibly create new basic block) to make edge non-fallthru.
1135    Return newly created BB or NULL if none.  */
1136 
1137 basic_block
force_nonfallthru(edge e)1138 force_nonfallthru (edge e)
1139 {
1140   return force_nonfallthru_and_redirect (e, e->dest);
1141 }
1142 
1143 /* Redirect edge even at the expense of creating new jump insn or
1144    basic block.  Return new basic block if created, NULL otherwise.
1145    Abort if conversion is impossible.  */
1146 
1147 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1148 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1149 {
1150   if (redirect_edge_and_branch (e, target)
1151       || e->dest == target)
1152     return NULL;
1153 
1154   /* In case the edge redirection failed, try to force it to be non-fallthru
1155      and redirect newly created simplejump.  */
1156   return force_nonfallthru_and_redirect (e, target);
1157 }
1158 
1159 /* The given edge should potentially be a fallthru edge.  If that is in
1160    fact true, delete the jump and barriers that are in the way.  */
1161 
1162 void
tidy_fallthru_edge(edge e,basic_block b,basic_block c)1163 tidy_fallthru_edge (edge e, basic_block b, basic_block c)
1164 {
1165   rtx q;
1166 
1167   /* ??? In a late-running flow pass, other folks may have deleted basic
1168      blocks by nopping out blocks, leaving multiple BARRIERs between here
1169      and the target label. They ought to be chastized and fixed.
1170 
1171      We can also wind up with a sequence of undeletable labels between
1172      one block and the next.
1173 
1174      So search through a sequence of barriers, labels, and notes for
1175      the head of block C and assert that we really do fall through.  */
1176 
1177   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1178     if (INSN_P (q))
1179       return;
1180 
1181   /* Remove what will soon cease being the jump insn from the source block.
1182      If block B consisted only of this single jump, turn it into a deleted
1183      note.  */
1184   q = BB_END (b);
1185   if (GET_CODE (q) == JUMP_INSN
1186       && onlyjump_p (q)
1187       && (any_uncondjump_p (q)
1188 	  || (b->succ == e && e->succ_next == NULL)))
1189     {
1190 #ifdef HAVE_cc0
1191       /* If this was a conditional jump, we need to also delete
1192 	 the insn that set cc0.  */
1193       if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1194 	q = PREV_INSN (q);
1195 #endif
1196 
1197       q = PREV_INSN (q);
1198 
1199       /* We don't want a block to end on a line-number note since that has
1200 	 the potential of changing the code between -g and not -g.  */
1201       while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1202 	q = PREV_INSN (q);
1203     }
1204 
1205   /* Selectively unlink the sequence.  */
1206   if (q != PREV_INSN (BB_HEAD (c)))
1207     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1208 
1209   e->flags |= EDGE_FALLTHRU;
1210 }
1211 
1212 /* Fix up edges that now fall through, or rather should now fall through
1213    but previously required a jump around now deleted blocks.  Simplify
1214    the search by only examining blocks numerically adjacent, since this
1215    is how find_basic_blocks created them.  */
1216 
1217 void
tidy_fallthru_edges(void)1218 tidy_fallthru_edges (void)
1219 {
1220   basic_block b, c;
1221 
1222   if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1223     return;
1224 
1225   FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1226     {
1227       edge s;
1228 
1229       c = b->next_bb;
1230 
1231       /* We care about simple conditional or unconditional jumps with
1232 	 a single successor.
1233 
1234 	 If we had a conditional branch to the next instruction when
1235 	 find_basic_blocks was called, then there will only be one
1236 	 out edge for the block which ended with the conditional
1237 	 branch (since we do not create duplicate edges).
1238 
1239 	 Furthermore, the edge will be marked as a fallthru because we
1240 	 merge the flags for the duplicate edges.  So we do not want to
1241 	 check that the edge is not a FALLTHRU edge.  */
1242 
1243       if ((s = b->succ) != NULL
1244 	  && ! (s->flags & EDGE_COMPLEX)
1245 	  && s->succ_next == NULL
1246 	  && s->dest == c
1247 	  /* If the jump insn has side effects, we can't tidy the edge.  */
1248 	  && (GET_CODE (BB_END (b)) != JUMP_INSN
1249 	      || onlyjump_p (BB_END (b))))
1250 	tidy_fallthru_edge (s, b, c);
1251     }
1252 }
1253 
1254 /* Helper function for split_edge.  Return true in case edge BB2 to BB1
1255    is back edge of syntactic loop.  */
1256 
1257 static bool
back_edge_of_syntactic_loop_p(basic_block bb1,basic_block bb2)1258 back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
1259 {
1260   rtx insn;
1261   int count = 0;
1262   basic_block bb;
1263 
1264   if (bb1 == bb2)
1265     return true;
1266 
1267   /* ??? Could we guarantee that bb indices are monotone, so that we could
1268      just compare them?  */
1269   for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1270     continue;
1271 
1272   if (!bb)
1273     return false;
1274 
1275   for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
1276        insn = NEXT_INSN (insn))
1277     if (GET_CODE (insn) == NOTE)
1278       {
1279 	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1280 	  count++;
1281 	else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1282 	  count--;
1283       }
1284 
1285   return count >= 0;
1286 }
1287 
1288 /* Split a (typically critical) edge.  Return the new block.
1289    Abort on abnormal edges.
1290 
1291    ??? The code generally expects to be called on critical edges.
1292    The case of a block ending in an unconditional jump to a
1293    block with multiple predecessors is not handled optimally.  */
1294 
1295 static basic_block
rtl_split_edge(edge edge_in)1296 rtl_split_edge (edge edge_in)
1297 {
1298   basic_block bb;
1299   rtx before;
1300 
1301   /* Abnormal edges cannot be split.  */
1302   if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1303     abort ();
1304 
1305   /* We are going to place the new block in front of edge destination.
1306      Avoid existence of fallthru predecessors.  */
1307   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1308     {
1309       edge e;
1310 
1311       for (e = edge_in->dest->pred; e; e = e->pred_next)
1312 	if (e->flags & EDGE_FALLTHRU)
1313 	  break;
1314 
1315       if (e)
1316 	force_nonfallthru (e);
1317     }
1318 
1319   /* Create the basic block note.
1320 
1321      Where we place the note can have a noticeable impact on the generated
1322      code.  Consider this cfg:
1323 
1324 		        E
1325 			|
1326 			0
1327 		       / \
1328 		   +->1-->2--->E
1329                    |  |
1330 		   +--+
1331 
1332       If we need to insert an insn on the edge from block 0 to block 1,
1333       we want to ensure the instructions we insert are outside of any
1334       loop notes that physically sit between block 0 and block 1.  Otherwise
1335       we confuse the loop optimizer into thinking the loop is a phony.  */
1336 
1337   if (edge_in->dest != EXIT_BLOCK_PTR
1338       && PREV_INSN (BB_HEAD (edge_in->dest))
1339       && GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
1340       && (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
1341 	  == NOTE_INSN_LOOP_BEG)
1342       && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1343     before = PREV_INSN (BB_HEAD (edge_in->dest));
1344   else if (edge_in->dest != EXIT_BLOCK_PTR)
1345     before = BB_HEAD (edge_in->dest);
1346   else
1347     before = NULL_RTX;
1348 
1349   bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1350   bb->count = edge_in->count;
1351   bb->frequency = EDGE_FREQUENCY (edge_in);
1352 
1353   /* ??? This info is likely going to be out of date very soon.  */
1354   if (edge_in->dest->global_live_at_start)
1355     {
1356       bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1357       bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1358       COPY_REG_SET (bb->global_live_at_start,
1359 		    edge_in->dest->global_live_at_start);
1360       COPY_REG_SET (bb->global_live_at_end,
1361 		    edge_in->dest->global_live_at_start);
1362     }
1363 
1364   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1365 
1366   /* For non-fallthru edges, we must adjust the predecessor's
1367      jump instruction to target our new block.  */
1368   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1369     {
1370       if (!redirect_edge_and_branch (edge_in, bb))
1371 	abort ();
1372     }
1373   else
1374     redirect_edge_succ (edge_in, bb);
1375 
1376   return bb;
1377 }
1378 
1379 /* Queue instructions for insertion on an edge between two basic blocks.
1380    The new instructions and basic blocks (if any) will not appear in the
1381    CFG until commit_edge_insertions is called.  */
1382 
1383 void
insert_insn_on_edge(rtx pattern,edge e)1384 insert_insn_on_edge (rtx pattern, edge e)
1385 {
1386   /* We cannot insert instructions on an abnormal critical edge.
1387      It will be easier to find the culprit if we die now.  */
1388   if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1389     abort ();
1390 
1391   if (e->insns == NULL_RTX)
1392     start_sequence ();
1393   else
1394     push_to_sequence (e->insns);
1395 
1396   emit_insn (pattern);
1397 
1398   e->insns = get_insns ();
1399   end_sequence ();
1400 }
1401 
1402 /* Called from safe_insert_insn_on_edge through note_stores, marks live
1403    registers that are killed by the store.  */
1404 static void
mark_killed_regs(rtx reg,rtx set ATTRIBUTE_UNUSED,void * data)1405 mark_killed_regs (rtx reg, rtx set ATTRIBUTE_UNUSED, void *data)
1406 {
1407   regset killed = data;
1408   int regno, i;
1409 
1410   if (GET_CODE (reg) == SUBREG)
1411     reg = SUBREG_REG (reg);
1412   if (!REG_P (reg))
1413     return;
1414   regno = REGNO (reg);
1415   if (regno >= FIRST_PSEUDO_REGISTER)
1416     SET_REGNO_REG_SET (killed, regno);
1417   else
1418     {
1419       for (i = 0; i < (int) HARD_REGNO_NREGS (regno, GET_MODE (reg)); i++)
1420 	SET_REGNO_REG_SET (killed, regno + i);
1421     }
1422 }
1423 
1424 /* Similar to insert_insn_on_edge, tries to put INSN to edge E.  Additionally
1425    it checks whether this will not clobber the registers that are live on the
1426    edge (i.e. it requires liveness information to be up-to-date) and if there
1427    are some, then it tries to save and restore them.  Returns true if
1428    successful.  */
1429 bool
safe_insert_insn_on_edge(rtx insn,edge e)1430 safe_insert_insn_on_edge (rtx insn, edge e)
1431 {
1432   rtx x;
1433   regset_head killed_head;
1434   regset killed = INITIALIZE_REG_SET (killed_head);
1435   rtx save_regs = NULL_RTX;
1436   int regno, noccmode;
1437   enum machine_mode mode;
1438 
1439 #ifdef AVOID_CCMODE_COPIES
1440   noccmode = true;
1441 #else
1442   noccmode = false;
1443 #endif
1444 
1445   for (x = insn; x; x = NEXT_INSN (x))
1446     if (INSN_P (x))
1447       note_stores (PATTERN (x), mark_killed_regs, killed);
1448   bitmap_operation (killed, killed, e->dest->global_live_at_start,
1449 		    BITMAP_AND);
1450 
1451   EXECUTE_IF_SET_IN_REG_SET (killed, 0, regno,
1452     {
1453       mode = regno < FIRST_PSEUDO_REGISTER
1454 	      ? reg_raw_mode[regno]
1455 	      : GET_MODE (regno_reg_rtx[regno]);
1456       if (mode == VOIDmode)
1457 	return false;
1458 
1459       if (noccmode && mode == CCmode)
1460 	return false;
1461 
1462       save_regs = alloc_EXPR_LIST (0,
1463 				   alloc_EXPR_LIST (0,
1464 						    gen_reg_rtx (mode),
1465 						    gen_raw_REG (mode, regno)),
1466 				   save_regs);
1467     });
1468 
1469   if (save_regs)
1470     {
1471       rtx from, to;
1472 
1473       start_sequence ();
1474       for (x = save_regs; x; x = XEXP (x, 1))
1475 	{
1476 	  from = XEXP (XEXP (x, 0), 1);
1477 	  to = XEXP (XEXP (x, 0), 0);
1478 	  emit_move_insn (to, from);
1479 	}
1480       emit_insn (insn);
1481       for (x = save_regs; x; x = XEXP (x, 1))
1482 	{
1483 	  from = XEXP (XEXP (x, 0), 0);
1484 	  to = XEXP (XEXP (x, 0), 1);
1485 	  emit_move_insn (to, from);
1486 	}
1487       insn = get_insns ();
1488       end_sequence ();
1489       free_EXPR_LIST_list (&save_regs);
1490     }
1491   insert_insn_on_edge (insn, e);
1492 
1493   FREE_REG_SET (killed);
1494   return true;
1495 }
1496 
1497 /* Update the CFG for the instructions queued on edge E.  */
1498 
1499 static void
commit_one_edge_insertion(edge e,int watch_calls)1500 commit_one_edge_insertion (edge e, int watch_calls)
1501 {
1502   rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1503   basic_block bb = NULL;
1504 
1505   /* Pull the insns off the edge now since the edge might go away.  */
1506   insns = e->insns;
1507   e->insns = NULL_RTX;
1508 
1509   /* Special case -- avoid inserting code between call and storing
1510      its return value.  */
1511   if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1512       && e->src != ENTRY_BLOCK_PTR
1513       && GET_CODE (BB_END (e->src)) == CALL_INSN)
1514     {
1515       rtx next = next_nonnote_insn (BB_END (e->src));
1516 
1517       after = BB_HEAD (e->dest);
1518       /* The first insn after the call may be a stack pop, skip it.  */
1519       while (next
1520 	     && keep_with_call_p (next))
1521 	{
1522 	  after = next;
1523 	  next = next_nonnote_insn (next);
1524 	}
1525       bb = e->dest;
1526     }
1527   if (!before && !after)
1528     {
1529       /* Figure out where to put these things.  If the destination has
1530          one predecessor, insert there.  Except for the exit block.  */
1531       if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1532 	{
1533 	  bb = e->dest;
1534 
1535 	  /* Get the location correct wrt a code label, and "nice" wrt
1536 	     a basic block note, and before everything else.  */
1537 	  tmp = BB_HEAD (bb);
1538 	  if (GET_CODE (tmp) == CODE_LABEL)
1539 	    tmp = NEXT_INSN (tmp);
1540 	  if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1541 	    tmp = NEXT_INSN (tmp);
1542 	  if (tmp == BB_HEAD (bb))
1543 	    before = tmp;
1544 	  else if (tmp)
1545 	    after = PREV_INSN (tmp);
1546 	  else
1547 	    after = get_last_insn ();
1548 	}
1549 
1550       /* If the source has one successor and the edge is not abnormal,
1551          insert there.  Except for the entry block.  */
1552       else if ((e->flags & EDGE_ABNORMAL) == 0
1553 	       && e->src->succ->succ_next == NULL
1554 	       && e->src != ENTRY_BLOCK_PTR)
1555 	{
1556 	  bb = e->src;
1557 
1558 	  /* It is possible to have a non-simple jump here.  Consider a target
1559 	     where some forms of unconditional jumps clobber a register.  This
1560 	     happens on the fr30 for example.
1561 
1562 	     We know this block has a single successor, so we can just emit
1563 	     the queued insns before the jump.  */
1564 	  if (GET_CODE (BB_END (bb)) == JUMP_INSN)
1565 	    for (before = BB_END (bb);
1566 		 GET_CODE (PREV_INSN (before)) == NOTE
1567 		 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1568 		 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1569 	      ;
1570 	  else
1571 	    {
1572 	      /* We'd better be fallthru, or we've lost track of what's what.  */
1573 	      if ((e->flags & EDGE_FALLTHRU) == 0)
1574 		abort ();
1575 
1576 	      after = BB_END (bb);
1577 	    }
1578 	}
1579       /* Otherwise we must split the edge.  */
1580       else
1581 	{
1582 	  bb = split_edge (e);
1583 	  after = BB_END (bb);
1584 	}
1585     }
1586 
1587   /* Now that we've found the spot, do the insertion.  */
1588 
1589   if (before)
1590     {
1591       emit_insn_before_noloc (insns, before);
1592       last = prev_nonnote_insn (before);
1593     }
1594   else
1595     last = emit_insn_after_noloc (insns, after);
1596 
1597   if (returnjump_p (last))
1598     {
1599       /* ??? Remove all outgoing edges from BB and add one for EXIT.
1600          This is not currently a problem because this only happens
1601          for the (single) epilogue, which already has a fallthru edge
1602          to EXIT.  */
1603 
1604       e = bb->succ;
1605       if (e->dest != EXIT_BLOCK_PTR
1606 	  || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1607 	abort ();
1608 
1609       e->flags &= ~EDGE_FALLTHRU;
1610       emit_barrier_after (last);
1611 
1612       if (before)
1613 	delete_insn (before);
1614     }
1615   else if (GET_CODE (last) == JUMP_INSN)
1616     abort ();
1617 
1618   /* Mark the basic block for find_sub_basic_blocks.  */
1619   bb->aux = &bb->aux;
1620 }
1621 
1622 /* Update the CFG for all queued instructions.  */
1623 
1624 void
commit_edge_insertions(void)1625 commit_edge_insertions (void)
1626 {
1627   basic_block bb;
1628   sbitmap blocks;
1629   bool changed = false;
1630 
1631 #ifdef ENABLE_CHECKING
1632   verify_flow_info ();
1633 #endif
1634 
1635   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1636     {
1637       edge e, next;
1638 
1639       for (e = bb->succ; e; e = next)
1640 	{
1641 	  next = e->succ_next;
1642 	  if (e->insns)
1643 	    {
1644 	       changed = true;
1645 	       commit_one_edge_insertion (e, false);
1646 	    }
1647 	}
1648     }
1649 
1650   if (!changed)
1651     return;
1652 
1653   blocks = sbitmap_alloc (last_basic_block);
1654   sbitmap_zero (blocks);
1655   FOR_EACH_BB (bb)
1656     if (bb->aux)
1657       {
1658         SET_BIT (blocks, bb->index);
1659 	/* Check for forgotten bb->aux values before commit_edge_insertions
1660 	   call.  */
1661 	if (bb->aux != &bb->aux)
1662 	  abort ();
1663 	bb->aux = NULL;
1664       }
1665   find_many_sub_basic_blocks (blocks);
1666   sbitmap_free (blocks);
1667 }
1668 
1669 /* Update the CFG for all queued instructions, taking special care of inserting
1670    code on edges between call and storing its return value.  */
1671 
1672 void
commit_edge_insertions_watch_calls(void)1673 commit_edge_insertions_watch_calls (void)
1674 {
1675   basic_block bb;
1676   sbitmap blocks;
1677   bool changed = false;
1678 
1679 #ifdef ENABLE_CHECKING
1680   verify_flow_info ();
1681 #endif
1682 
1683   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1684     {
1685       edge e, next;
1686 
1687       for (e = bb->succ; e; e = next)
1688 	{
1689 	  next = e->succ_next;
1690 	  if (e->insns)
1691 	    {
1692 	      changed = true;
1693 	      commit_one_edge_insertion (e, true);
1694 	    }
1695 	}
1696     }
1697 
1698   if (!changed)
1699     return;
1700 
1701   blocks = sbitmap_alloc (last_basic_block);
1702   sbitmap_zero (blocks);
1703   FOR_EACH_BB (bb)
1704     if (bb->aux)
1705       {
1706         SET_BIT (blocks, bb->index);
1707 	/* Check for forgotten bb->aux values before commit_edge_insertions
1708 	   call.  */
1709 	if (bb->aux != &bb->aux)
1710 	  abort ();
1711 	bb->aux = NULL;
1712       }
1713   find_many_sub_basic_blocks (blocks);
1714   sbitmap_free (blocks);
1715 }
1716 
1717 /* Print out one basic block with live information at start and end.  */
1718 
1719 static void
rtl_dump_bb(basic_block bb,FILE * outf)1720 rtl_dump_bb (basic_block bb, FILE *outf)
1721 {
1722   rtx insn;
1723   rtx last;
1724 
1725   fputs (";; Registers live at start:", outf);
1726   dump_regset (bb->global_live_at_start, outf);
1727   putc ('\n', outf);
1728 
1729   for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1730        insn = NEXT_INSN (insn))
1731     print_rtl_single (outf, insn);
1732 
1733   fputs (";; Registers live at end:", outf);
1734   dump_regset (bb->global_live_at_end, outf);
1735   putc ('\n', outf);
1736 }
1737 
1738 /* Like print_rtl, but also print out live information for the start of each
1739    basic block.  */
1740 
1741 void
print_rtl_with_bb(FILE * outf,rtx rtx_first)1742 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1743 {
1744   rtx tmp_rtx;
1745 
1746   if (rtx_first == 0)
1747     fprintf (outf, "(nil)\n");
1748   else
1749     {
1750       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1751       int max_uid = get_max_uid ();
1752       basic_block *start = xcalloc (max_uid, sizeof (basic_block));
1753       basic_block *end = xcalloc (max_uid, sizeof (basic_block));
1754       enum bb_state *in_bb_p = xcalloc (max_uid, sizeof (enum bb_state));
1755 
1756       basic_block bb;
1757 
1758       FOR_EACH_BB_REVERSE (bb)
1759 	{
1760 	  rtx x;
1761 
1762 	  start[INSN_UID (BB_HEAD (bb))] = bb;
1763 	  end[INSN_UID (BB_END (bb))] = bb;
1764 	  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1765 	    {
1766 	      enum bb_state state = IN_MULTIPLE_BB;
1767 
1768 	      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1769 		state = IN_ONE_BB;
1770 	      in_bb_p[INSN_UID (x)] = state;
1771 
1772 	      if (x == BB_END (bb))
1773 		break;
1774 	    }
1775 	}
1776 
1777       for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1778 	{
1779 	  int did_output;
1780 
1781 	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1782 	    {
1783 	      fprintf (outf, ";; Start of basic block %d, registers live:",
1784 		       bb->index);
1785 	      dump_regset (bb->global_live_at_start, outf);
1786 	      putc ('\n', outf);
1787 	    }
1788 
1789 	  if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1790 	      && GET_CODE (tmp_rtx) != NOTE
1791 	      && GET_CODE (tmp_rtx) != BARRIER)
1792 	    fprintf (outf, ";; Insn is not within a basic block\n");
1793 	  else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1794 	    fprintf (outf, ";; Insn is in multiple basic blocks\n");
1795 
1796 	  did_output = print_rtl_single (outf, tmp_rtx);
1797 
1798 	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1799 	    {
1800 	      fprintf (outf, ";; End of basic block %d, registers live:\n",
1801 		       bb->index);
1802 	      dump_regset (bb->global_live_at_end, outf);
1803 	      putc ('\n', outf);
1804 	    }
1805 
1806 	  if (did_output)
1807 	    putc ('\n', outf);
1808 	}
1809 
1810       free (start);
1811       free (end);
1812       free (in_bb_p);
1813     }
1814 
1815   if (current_function_epilogue_delay_list != 0)
1816     {
1817       fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1818       for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1819 	   tmp_rtx = XEXP (tmp_rtx, 1))
1820 	print_rtl_single (outf, XEXP (tmp_rtx, 0));
1821     }
1822 }
1823 
1824 void
update_br_prob_note(basic_block bb)1825 update_br_prob_note (basic_block bb)
1826 {
1827   rtx note;
1828   if (GET_CODE (BB_END (bb)) != JUMP_INSN)
1829     return;
1830   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1831   if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1832     return;
1833   XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1834 }
1835 
1836 /* Verify the CFG and RTL consistency common for both underlying RTL and
1837    cfglayout RTL.
1838 
1839    Currently it does following checks:
1840 
1841    - test head/end pointers
1842    - overlapping of basic blocks
1843    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1844    - tails of basic blocks (ensure that boundary is necessary)
1845    - scans body of the basic block for JUMP_INSN, CODE_LABEL
1846      and NOTE_INSN_BASIC_BLOCK
1847 
1848    In future it can be extended check a lot of other stuff as well
1849    (reachability of basic blocks, life information, etc. etc.).  */
1850 static int
rtl_verify_flow_info_1(void)1851 rtl_verify_flow_info_1 (void)
1852 {
1853   const int max_uid = get_max_uid ();
1854   rtx last_head = get_last_insn ();
1855   basic_block *bb_info;
1856   rtx x;
1857   int err = 0;
1858   basic_block bb, last_bb_seen;
1859 
1860   bb_info = xcalloc (max_uid, sizeof (basic_block));
1861 
1862   /* Check bb chain & numbers.  */
1863   last_bb_seen = ENTRY_BLOCK_PTR;
1864 
1865   FOR_EACH_BB_REVERSE (bb)
1866     {
1867       rtx head = BB_HEAD (bb);
1868       rtx end = BB_END (bb);
1869 
1870       /* Verify the end of the basic block is in the INSN chain.  */
1871       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1872 	if (x == end)
1873 	  break;
1874 
1875       if (!x)
1876 	{
1877 	  error ("end insn %d for block %d not found in the insn stream",
1878 		 INSN_UID (end), bb->index);
1879 	  err = 1;
1880 	}
1881 
1882       /* Work backwards from the end to the head of the basic block
1883 	 to verify the head is in the RTL chain.  */
1884       for (; x != NULL_RTX; x = PREV_INSN (x))
1885 	{
1886 	  /* While walking over the insn chain, verify insns appear
1887 	     in only one basic block and initialize the BB_INFO array
1888 	     used by other passes.  */
1889 	  if (bb_info[INSN_UID (x)] != NULL)
1890 	    {
1891 	      error ("insn %d is in multiple basic blocks (%d and %d)",
1892 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1893 	      err = 1;
1894 	    }
1895 
1896 	  bb_info[INSN_UID (x)] = bb;
1897 
1898 	  if (x == head)
1899 	    break;
1900 	}
1901       if (!x)
1902 	{
1903 	  error ("head insn %d for block %d not found in the insn stream",
1904 		 INSN_UID (head), bb->index);
1905 	  err = 1;
1906 	}
1907 
1908       last_head = x;
1909     }
1910 
1911   /* Now check the basic blocks (boundaries etc.) */
1912   FOR_EACH_BB_REVERSE (bb)
1913     {
1914       int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1915       edge e, fallthru = NULL;
1916       rtx note;
1917 
1918       if (INSN_P (BB_END (bb))
1919 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1920 	  && bb->succ && bb->succ->succ_next
1921 	  && any_condjump_p (BB_END (bb)))
1922 	{
1923 	  if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1924 	    {
1925 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1926 		     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1927 	      err = 1;
1928 	    }
1929 	}
1930       for (e = bb->succ; e; e = e->succ_next)
1931 	{
1932 	  if (e->flags & EDGE_FALLTHRU)
1933 	    n_fallthru++, fallthru = e;
1934 
1935 	  if ((e->flags & ~(EDGE_DFS_BACK
1936 			    | EDGE_CAN_FALLTHRU
1937 			    | EDGE_IRREDUCIBLE_LOOP
1938 			    | EDGE_LOOP_EXIT)) == 0)
1939 	    n_branch++;
1940 
1941 	  if (e->flags & EDGE_ABNORMAL_CALL)
1942 	    n_call++;
1943 
1944 	  if (e->flags & EDGE_EH)
1945 	    n_eh++;
1946 	  else if (e->flags & EDGE_ABNORMAL)
1947 	    n_abnormal++;
1948 	}
1949 
1950       if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1951 	  && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1952 	{
1953 	  error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1954 	  err = 1;
1955 	}
1956       if (n_branch
1957 	  && (GET_CODE (BB_END (bb)) != JUMP_INSN
1958 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1959 				   || any_condjump_p (BB_END (bb))))))
1960 	{
1961 	  error ("Too many outgoing branch edges from bb %i", bb->index);
1962 	  err = 1;
1963 	}
1964       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1965 	{
1966 	  error ("Fallthru edge after unconditional jump %i", bb->index);
1967 	  err = 1;
1968 	}
1969       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1970 	{
1971 	  error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1972 	  err = 1;
1973 	}
1974       if (n_branch != 1 && any_condjump_p (BB_END (bb))
1975 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1976 	{
1977 	  error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1978 	  err = 1;
1979 	}
1980       if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
1981 	{
1982 	  error ("Call edges for non-call insn in bb %i", bb->index);
1983 	  err = 1;
1984 	}
1985       if (n_abnormal
1986 	  && (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
1987 	  && (GET_CODE (BB_END (bb)) != JUMP_INSN
1988 	      || any_condjump_p (BB_END (bb))
1989 	      || any_uncondjump_p (BB_END (bb))))
1990 	{
1991 	  error ("Abnormal edges for no purpose in bb %i", bb->index);
1992 	  err = 1;
1993 	}
1994 
1995       for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1996 	if (BLOCK_FOR_INSN (x) != bb)
1997 	  {
1998 	    debug_rtx (x);
1999 	    if (! BLOCK_FOR_INSN (x))
2000 	      error
2001 		("insn %d inside basic block %d but block_for_insn is NULL",
2002 		 INSN_UID (x), bb->index);
2003 	    else
2004 	      error
2005 		("insn %d inside basic block %d but block_for_insn is %i",
2006 		 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2007 
2008 	    err = 1;
2009 	  }
2010 
2011       /* OK pointers are correct.  Now check the header of basic
2012          block.  It ought to contain optional CODE_LABEL followed
2013 	 by NOTE_BASIC_BLOCK.  */
2014       x = BB_HEAD (bb);
2015       if (GET_CODE (x) == CODE_LABEL)
2016 	{
2017 	  if (BB_END (bb) == x)
2018 	    {
2019 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2020 		     bb->index);
2021 	      err = 1;
2022 	    }
2023 
2024 	  x = NEXT_INSN (x);
2025 	}
2026 
2027       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2028 	{
2029 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2030 		 bb->index);
2031 	  err = 1;
2032 	}
2033 
2034       if (BB_END (bb) == x)
2035 	/* Do checks for empty blocks her. e */
2036 	;
2037       else
2038 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2039 	  {
2040 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2041 	      {
2042 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2043 		       INSN_UID (x), bb->index);
2044 		err = 1;
2045 	      }
2046 
2047 	    if (x == BB_END (bb))
2048 	      break;
2049 
2050 	    if (control_flow_insn_p (x))
2051 	      {
2052 		error ("in basic block %d:", bb->index);
2053 		fatal_insn ("flow control insn inside a basic block", x);
2054 	      }
2055 	  }
2056     }
2057 
2058   /* Clean up.  */
2059   free (bb_info);
2060   return err;
2061 }
2062 
2063 /* Verify the CFG and RTL consistency common for both underlying RTL and
2064    cfglayout RTL.
2065 
2066    Currently it does following checks:
2067    - all checks of rtl_verify_flow_info_1
2068    - check that all insns are in the basic blocks
2069      (except the switch handling code, barriers and notes)
2070    - check that all returns are followed by barriers
2071    - check that all fallthru edge points to the adjacent blocks.  */
2072 static int
rtl_verify_flow_info(void)2073 rtl_verify_flow_info (void)
2074 {
2075   basic_block bb;
2076   int err = rtl_verify_flow_info_1 ();
2077   rtx x;
2078   int num_bb_notes;
2079   const rtx rtx_first = get_insns ();
2080   basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2081 
2082   FOR_EACH_BB_REVERSE (bb)
2083     {
2084       edge e;
2085       for (e = bb->succ; e; e = e->succ_next)
2086 	if (e->flags & EDGE_FALLTHRU)
2087 	  break;
2088       if (!e)
2089 	{
2090 	  rtx insn;
2091 
2092 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2093 	  for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
2094 	       insn = NEXT_INSN (insn))
2095 	    if (!insn
2096 		|| (GET_CODE (insn) == NOTE
2097 		    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2098 		{
2099 		  error ("missing barrier after block %i", bb->index);
2100 		  err = 1;
2101 		  break;
2102 		}
2103 	}
2104       else if (e->src != ENTRY_BLOCK_PTR
2105 	       && e->dest != EXIT_BLOCK_PTR)
2106         {
2107 	  rtx insn;
2108 
2109 	  if (e->src->next_bb != e->dest)
2110 	    {
2111 	      error
2112 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2113 		 e->src->index, e->dest->index);
2114 	      err = 1;
2115 	    }
2116 	  else
2117 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2118 		 insn = NEXT_INSN (insn))
2119 	      if (GET_CODE (insn) == BARRIER
2120 #ifndef CASE_DROPS_THROUGH
2121 		  || INSN_P (insn)
2122 #else
2123 		  || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
2124 #endif
2125 		  )
2126 		{
2127 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2128 			 e->src->index, e->dest->index);
2129 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2130 		  err = 1;
2131 		}
2132         }
2133     }
2134 
2135   num_bb_notes = 0;
2136   last_bb_seen = ENTRY_BLOCK_PTR;
2137 
2138   for (x = rtx_first; x; x = NEXT_INSN (x))
2139     {
2140       if (NOTE_INSN_BASIC_BLOCK_P (x))
2141 	{
2142 	  bb = NOTE_BASIC_BLOCK (x);
2143 
2144 	  num_bb_notes++;
2145 	  if (bb != last_bb_seen->next_bb)
2146 	    internal_error ("basic blocks not laid down consecutively");
2147 
2148 	  curr_bb = last_bb_seen = bb;
2149 	}
2150 
2151       if (!curr_bb)
2152 	{
2153 	  switch (GET_CODE (x))
2154 	    {
2155 	    case BARRIER:
2156 	    case NOTE:
2157 	      break;
2158 
2159 	    case CODE_LABEL:
2160 	      /* An addr_vec is placed outside any block block.  */
2161 	      if (NEXT_INSN (x)
2162 		  && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2163 		  && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2164 		      || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2165 		x = NEXT_INSN (x);
2166 
2167 	      /* But in any case, non-deletable labels can appear anywhere.  */
2168 	      break;
2169 
2170 	    default:
2171 	      fatal_insn ("insn outside basic block", x);
2172 	    }
2173 	}
2174 
2175       if (INSN_P (x)
2176 	  && GET_CODE (x) == JUMP_INSN
2177 	  && returnjump_p (x) && ! condjump_p (x)
2178 	  && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2179 	    fatal_insn ("return not followed by barrier", x);
2180       if (curr_bb && x == BB_END (curr_bb))
2181 	curr_bb = NULL;
2182     }
2183 
2184   if (num_bb_notes != n_basic_blocks)
2185     internal_error
2186       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2187        num_bb_notes, n_basic_blocks);
2188 
2189    return err;
2190 }
2191 
2192 /* Assume that the preceding pass has possibly eliminated jump instructions
2193    or converted the unconditional jumps.  Eliminate the edges from CFG.
2194    Return true if any edges are eliminated.  */
2195 
2196 bool
purge_dead_edges(basic_block bb)2197 purge_dead_edges (basic_block bb)
2198 {
2199   edge e, next;
2200   rtx insn = BB_END (bb), note;
2201   bool purged = false;
2202 
2203   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2204   if (GET_CODE (insn) == INSN
2205       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2206     {
2207       rtx eqnote;
2208 
2209       if (! may_trap_p (PATTERN (insn))
2210 	  || ((eqnote = find_reg_equal_equiv_note (insn))
2211 	      && ! may_trap_p (XEXP (eqnote, 0))))
2212 	remove_note (insn, note);
2213     }
2214 
2215   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
2216   for (e = bb->succ; e; e = next)
2217     {
2218       next = e->succ_next;
2219       if (e->flags & EDGE_EH)
2220 	{
2221 	  if (can_throw_internal (BB_END (bb)))
2222 	    continue;
2223 	}
2224       else if (e->flags & EDGE_ABNORMAL_CALL)
2225 	{
2226 	  if (GET_CODE (BB_END (bb)) == CALL_INSN
2227 	      && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2228 		  || INTVAL (XEXP (note, 0)) >= 0))
2229 	    continue;
2230 	}
2231       else
2232 	continue;
2233 
2234       remove_edge (e);
2235       bb->flags |= BB_DIRTY;
2236       purged = true;
2237     }
2238 
2239   if (GET_CODE (insn) == JUMP_INSN)
2240     {
2241       rtx note;
2242       edge b,f;
2243 
2244       /* We do care only about conditional jumps and simplejumps.  */
2245       if (!any_condjump_p (insn)
2246 	  && !returnjump_p (insn)
2247 	  && !simplejump_p (insn))
2248 	return purged;
2249 
2250       /* Branch probability/prediction notes are defined only for
2251 	 condjumps.  We've possibly turned condjump into simplejump.  */
2252       if (simplejump_p (insn))
2253 	{
2254 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2255 	  if (note)
2256 	    remove_note (insn, note);
2257 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2258 	    remove_note (insn, note);
2259 	}
2260 
2261       for (e = bb->succ; e; e = next)
2262 	{
2263 	  next = e->succ_next;
2264 
2265 	  /* Avoid abnormal flags to leak from computed jumps turned
2266 	     into simplejumps.  */
2267 
2268 	  e->flags &= ~EDGE_ABNORMAL;
2269 
2270 	  /* See if this edge is one we should keep.  */
2271 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2272 	    /* A conditional jump can fall through into the next
2273 	       block, so we should keep the edge.  */
2274 	    continue;
2275 	  else if (e->dest != EXIT_BLOCK_PTR
2276 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2277 	    /* If the destination block is the target of the jump,
2278 	       keep the edge.  */
2279 	    continue;
2280 	  else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2281 	    /* If the destination block is the exit block, and this
2282 	       instruction is a return, then keep the edge.  */
2283 	    continue;
2284 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2285 	    /* Keep the edges that correspond to exceptions thrown by
2286 	       this instruction and rematerialize the EDGE_ABNORMAL
2287 	       flag we just cleared above.  */
2288 	    {
2289 	      e->flags |= EDGE_ABNORMAL;
2290 	      continue;
2291 	    }
2292 
2293 	  /* We do not need this edge.  */
2294 	  bb->flags |= BB_DIRTY;
2295 	  purged = true;
2296 	  remove_edge (e);
2297 	}
2298 
2299       if (!bb->succ || !purged)
2300 	return purged;
2301 
2302       if (rtl_dump_file)
2303 	fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2304 
2305       if (!optimize)
2306 	return purged;
2307 
2308       /* Redistribute probabilities.  */
2309       if (!bb->succ->succ_next)
2310 	{
2311 	  bb->succ->probability = REG_BR_PROB_BASE;
2312 	  bb->succ->count = bb->count;
2313 	}
2314       else
2315 	{
2316 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2317 	  if (!note)
2318 	    return purged;
2319 
2320 	  b = BRANCH_EDGE (bb);
2321 	  f = FALLTHRU_EDGE (bb);
2322 	  b->probability = INTVAL (XEXP (note, 0));
2323 	  f->probability = REG_BR_PROB_BASE - b->probability;
2324 	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2325 	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2326 	}
2327 
2328       return purged;
2329     }
2330   else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2331     {
2332       /* First, there should not be any EH or ABCALL edges resulting
2333 	 from non-local gotos and the like.  If there were, we shouldn't
2334 	 have created the sibcall in the first place.  Second, there
2335 	 should of course never have been a fallthru edge.  */
2336       if (!bb->succ || bb->succ->succ_next)
2337 	abort ();
2338       if (bb->succ->flags != (EDGE_SIBCALL | EDGE_ABNORMAL))
2339 	abort ();
2340 
2341       return 0;
2342     }
2343 
2344   /* If we don't see a jump insn, we don't know exactly why the block would
2345      have been broken at this point.  Look for a simple, non-fallthru edge,
2346      as these are only created by conditional branches.  If we find such an
2347      edge we know that there used to be a jump here and can then safely
2348      remove all non-fallthru edges.  */
2349   for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2350        e = e->succ_next)
2351     ;
2352 
2353   if (!e)
2354     return purged;
2355 
2356   for (e = bb->succ; e; e = next)
2357     {
2358       next = e->succ_next;
2359       if (!(e->flags & EDGE_FALLTHRU))
2360 	{
2361 	  bb->flags |= BB_DIRTY;
2362 	  remove_edge (e);
2363 	  purged = true;
2364 	}
2365     }
2366 
2367   if (!bb->succ || bb->succ->succ_next)
2368     abort ();
2369 
2370   bb->succ->probability = REG_BR_PROB_BASE;
2371   bb->succ->count = bb->count;
2372 
2373   if (rtl_dump_file)
2374     fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2375 	     bb->index);
2376   return purged;
2377 }
2378 
2379 /* Search all basic blocks for potentially dead edges and purge them.  Return
2380    true if some edge has been eliminated.  */
2381 
2382 bool
purge_all_dead_edges(int update_life_p)2383 purge_all_dead_edges (int update_life_p)
2384 {
2385   int purged = false;
2386   sbitmap blocks = 0;
2387   basic_block bb;
2388 
2389   if (update_life_p)
2390     {
2391       blocks = sbitmap_alloc (last_basic_block);
2392       sbitmap_zero (blocks);
2393     }
2394 
2395   FOR_EACH_BB (bb)
2396     {
2397       bool purged_here = purge_dead_edges (bb);
2398 
2399       purged |= purged_here;
2400       if (purged_here && update_life_p)
2401 	SET_BIT (blocks, bb->index);
2402     }
2403 
2404   if (update_life_p && purged)
2405     update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2406 		      PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2407 		      | PROP_KILL_DEAD_CODE);
2408 
2409   if (update_life_p)
2410     sbitmap_free (blocks);
2411   return purged;
2412 }
2413 
2414 /* Same as split_block but update cfg_layout structures.  */
2415 static edge
cfg_layout_split_block(basic_block bb,void * insnp)2416 cfg_layout_split_block (basic_block bb, void *insnp)
2417 {
2418   rtx insn = insnp;
2419 
2420   edge fallthru = rtl_split_block (bb, insn);
2421 
2422   fallthru->dest->rbi->footer = fallthru->src->rbi->footer;
2423   fallthru->src->rbi->footer = NULL;
2424   return fallthru;
2425 }
2426 
2427 
2428 /* Redirect Edge to DEST.  */
2429 static bool
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)2430 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2431 {
2432   basic_block src = e->src;
2433   bool ret;
2434 
2435   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2436     return false;
2437 
2438   if (e->dest == dest)
2439     return true;
2440 
2441   if (e->src != ENTRY_BLOCK_PTR
2442       && try_redirect_by_replacing_jump (e, dest, true))
2443     return true;
2444 
2445   if (e->src == ENTRY_BLOCK_PTR
2446       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2447     {
2448       if (rtl_dump_file)
2449 	fprintf (rtl_dump_file, "Redirecting entry edge from bb %i to %i\n",
2450 		 e->src->index, dest->index);
2451 
2452       redirect_edge_succ (e, dest);
2453       return true;
2454     }
2455 
2456   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2457      in the case the basic block appears to be in sequence.  Avoid this
2458      transformation.  */
2459 
2460   if (e->flags & EDGE_FALLTHRU)
2461     {
2462       /* Redirect any branch edges unified with the fallthru one.  */
2463       if (GET_CODE (BB_END (src)) == JUMP_INSN
2464 	  && label_is_jump_target_p (BB_HEAD (e->dest),
2465 				     BB_END (src)))
2466 	{
2467 	  if (rtl_dump_file)
2468 	    fprintf (rtl_dump_file, "Fallthru edge unified with branch "
2469 		     "%i->%i redirected to %i\n",
2470 		     e->src->index, e->dest->index, dest->index);
2471 	  e->flags &= ~EDGE_FALLTHRU;
2472 	  if (!redirect_branch_edge (e, dest))
2473 	    abort ();
2474 	  e->flags |= EDGE_FALLTHRU;
2475 	  return true;
2476 	}
2477       /* In case we are redirecting fallthru edge to the branch edge
2478          of conditional jump, remove it.  */
2479       if (src->succ->succ_next
2480 	  && !src->succ->succ_next->succ_next)
2481 	{
2482 	  edge s = e->succ_next ? e->succ_next : src->succ;
2483 	  if (s->dest == dest
2484 	      && any_condjump_p (BB_END (src))
2485 	      && onlyjump_p (BB_END (src)))
2486 	    delete_insn (BB_END (src));
2487 	}
2488 
2489       if (rtl_dump_file)
2490 	fprintf (rtl_dump_file, "Fallthru edge %i->%i redirected to %i\n",
2491 		 e->src->index, e->dest->index, dest->index);
2492       redirect_edge_succ_nodup (e, dest);
2493 
2494       ret = true;
2495     }
2496   else
2497     ret = redirect_branch_edge (e, dest);
2498 
2499   /* We don't want simplejumps in the insn stream during cfglayout.  */
2500   if (simplejump_p (BB_END (src)))
2501     abort ();
2502 
2503   return ret;
2504 }
2505 
2506 /* Simple wrapper as we always can redirect fallthru edges.  */
2507 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)2508 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2509 {
2510   if (!cfg_layout_redirect_edge_and_branch (e, dest))
2511     abort ();
2512   return NULL;
2513 }
2514 
2515 /* Same as flow_delete_block but update cfg_layout structures.  */
2516 static void
cfg_layout_delete_block(basic_block bb)2517 cfg_layout_delete_block (basic_block bb)
2518 {
2519   rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2520 
2521   if (bb->rbi->header)
2522     {
2523       next = BB_HEAD (bb);
2524       if (prev)
2525 	NEXT_INSN (prev) = bb->rbi->header;
2526       else
2527 	set_first_insn (bb->rbi->header);
2528       PREV_INSN (bb->rbi->header) = prev;
2529       insn = bb->rbi->header;
2530       while (NEXT_INSN (insn))
2531 	insn = NEXT_INSN (insn);
2532       NEXT_INSN (insn) = next;
2533       PREV_INSN (next) = insn;
2534     }
2535   next = NEXT_INSN (BB_END (bb));
2536   if (bb->rbi->footer)
2537     {
2538       insn = bb->rbi->footer;
2539       while (insn)
2540 	{
2541 	  if (GET_CODE (insn) == BARRIER)
2542 	    {
2543 	      if (PREV_INSN (insn))
2544 		NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2545 	      else
2546 		bb->rbi->footer = NEXT_INSN (insn);
2547 	      if (NEXT_INSN (insn))
2548 		PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2549 	    }
2550 	  if (GET_CODE (insn) == CODE_LABEL)
2551 	    break;
2552 	  insn = NEXT_INSN (insn);
2553 	}
2554       if (bb->rbi->footer)
2555 	{
2556 	  insn = BB_END (bb);
2557 	  NEXT_INSN (insn) = bb->rbi->footer;
2558 	  PREV_INSN (bb->rbi->footer) = insn;
2559 	  while (NEXT_INSN (insn))
2560 	    insn = NEXT_INSN (insn);
2561 	  NEXT_INSN (insn) = next;
2562 	  if (next)
2563 	    PREV_INSN (next) = insn;
2564 	  else
2565 	    set_last_insn (insn);
2566 	}
2567     }
2568   if (bb->next_bb != EXIT_BLOCK_PTR)
2569     to = &bb->next_bb->rbi->header;
2570   else
2571     to = &cfg_layout_function_footer;
2572   rtl_delete_block (bb);
2573 
2574   if (prev)
2575     prev = NEXT_INSN (prev);
2576   else
2577     prev = get_insns ();
2578   if (next)
2579     next = PREV_INSN (next);
2580   else
2581     next = get_last_insn ();
2582 
2583   if (next && NEXT_INSN (next) != prev)
2584     {
2585       remaints = unlink_insn_chain (prev, next);
2586       insn = remaints;
2587       while (NEXT_INSN (insn))
2588 	insn = NEXT_INSN (insn);
2589       NEXT_INSN (insn) = *to;
2590       if (*to)
2591 	PREV_INSN (*to) = insn;
2592       *to = remaints;
2593     }
2594 }
2595 
2596 /* Return true when blocks A and B can be safely merged.  */
2597 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)2598 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2599 {
2600   /* There must be exactly one edge in between the blocks.  */
2601   return (a->succ && !a->succ->succ_next && a->succ->dest == b
2602 	  && !b->pred->pred_next && a != b
2603 	  /* Must be simple edge.  */
2604 	  && !(a->succ->flags & EDGE_COMPLEX)
2605 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2606 	  /* If the jump insn has side effects,
2607 	     we can't kill the edge.  */
2608 	  && (GET_CODE (BB_END (a)) != JUMP_INSN
2609 	      || (reload_completed
2610 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2611 }
2612 
2613 /* Merge block A and B, abort when it is not possible.  */
2614 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)2615 cfg_layout_merge_blocks (basic_block a, basic_block b)
2616 {
2617 #ifdef ENABLE_CHECKING
2618   if (!cfg_layout_can_merge_blocks_p (a, b))
2619     abort ();
2620 #endif
2621 
2622   /* If there was a CODE_LABEL beginning B, delete it.  */
2623   if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
2624     delete_insn (BB_HEAD (b));
2625 
2626   /* We should have fallthru edge in a, or we can do dummy redirection to get
2627      it cleaned up.  */
2628   if (GET_CODE (BB_END (a)) == JUMP_INSN)
2629     try_redirect_by_replacing_jump (a->succ, b, true);
2630   if (GET_CODE (BB_END (a)) == JUMP_INSN)
2631     abort ();
2632 
2633   /* Possible line number notes should appear in between.  */
2634   if (b->rbi->header)
2635     {
2636       rtx first = BB_END (a), last;
2637 
2638       last = emit_insn_after_noloc (b->rbi->header, BB_END (a));
2639       delete_insn_chain (NEXT_INSN (first), last);
2640       b->rbi->header = NULL;
2641     }
2642 
2643   /* In the case basic blocks are not adjacent, move them around.  */
2644   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2645     {
2646       rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2647 
2648       emit_insn_after_noloc (first, BB_END (a));
2649       /* Skip possible DELETED_LABEL insn.  */
2650       if (!NOTE_INSN_BASIC_BLOCK_P (first))
2651 	first = NEXT_INSN (first);
2652       if (!NOTE_INSN_BASIC_BLOCK_P (first))
2653 	abort ();
2654       BB_HEAD (b) = NULL;
2655       delete_insn (first);
2656     }
2657   /* Otherwise just re-associate the instructions.  */
2658   else
2659     {
2660       rtx insn;
2661 
2662       for (insn = BB_HEAD (b);
2663 	   insn != NEXT_INSN (BB_END (b));
2664 	   insn = NEXT_INSN (insn))
2665 	set_block_for_insn (insn, a);
2666       insn = BB_HEAD (b);
2667       /* Skip possible DELETED_LABEL insn.  */
2668       if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2669 	insn = NEXT_INSN (insn);
2670       if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2671 	abort ();
2672       BB_HEAD (b) = NULL;
2673       BB_END (a) = BB_END (b);
2674       delete_insn (insn);
2675     }
2676 
2677   /* Possible tablejumps and barriers should appear after the block.  */
2678   if (b->rbi->footer)
2679     {
2680       if (!a->rbi->footer)
2681 	a->rbi->footer = b->rbi->footer;
2682       else
2683 	{
2684 	  rtx last = a->rbi->footer;
2685 
2686 	  while (NEXT_INSN (last))
2687 	    last = NEXT_INSN (last);
2688 	  NEXT_INSN (last) = b->rbi->footer;
2689 	  PREV_INSN (b->rbi->footer) = last;
2690 	}
2691       b->rbi->footer = NULL;
2692     }
2693 
2694   if (rtl_dump_file)
2695     fprintf (rtl_dump_file, "Merged blocks %d and %d.\n",
2696 	     a->index, b->index);
2697 
2698   update_cfg_after_block_merging (a, b);
2699 }
2700 
2701 /* Split edge E.  */
2702 static basic_block
cfg_layout_split_edge(edge e)2703 cfg_layout_split_edge (edge e)
2704 {
2705   edge new_e;
2706   basic_block new_bb =
2707     create_basic_block (e->src != ENTRY_BLOCK_PTR
2708 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
2709 			NULL_RTX, e->src);
2710 
2711   new_bb->count = e->count;
2712   new_bb->frequency = EDGE_FREQUENCY (e);
2713 
2714   /* ??? This info is likely going to be out of date very soon, but we must
2715      create it to avoid getting an ICE later.  */
2716   if (e->dest->global_live_at_start)
2717     {
2718       new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2719       new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2720       COPY_REG_SET (new_bb->global_live_at_start,
2721 		    e->dest->global_live_at_start);
2722       COPY_REG_SET (new_bb->global_live_at_end,
2723 		    e->dest->global_live_at_start);
2724     }
2725 
2726   new_e = make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2727   new_e->probability = REG_BR_PROB_BASE;
2728   new_e->count = e->count;
2729   redirect_edge_and_branch_force (e, new_bb);
2730 
2731   return new_bb;
2732 }
2733 
2734 /* Implementation of CFG manipulation for linearized RTL.  */
2735 struct cfg_hooks rtl_cfg_hooks = {
2736   rtl_verify_flow_info,
2737   rtl_dump_bb,
2738   rtl_create_basic_block,
2739   rtl_redirect_edge_and_branch,
2740   rtl_redirect_edge_and_branch_force,
2741   rtl_delete_block,
2742   rtl_split_block,
2743   rtl_can_merge_blocks,  /* can_merge_blocks_p */
2744   rtl_merge_blocks,
2745   rtl_split_edge
2746 };
2747 
2748 /* Implementation of CFG manipulation for cfg layout RTL, where
2749    basic block connected via fallthru edges does not have to be adjacent.
2750    This representation will hopefully become the default one in future
2751    version of the compiler.  */
2752 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2753   rtl_verify_flow_info_1,
2754   rtl_dump_bb,
2755   cfg_layout_create_basic_block,
2756   cfg_layout_redirect_edge_and_branch,
2757   cfg_layout_redirect_edge_and_branch_force,
2758   cfg_layout_delete_block,
2759   cfg_layout_split_block,
2760   cfg_layout_can_merge_blocks_p,
2761   cfg_layout_merge_blocks,
2762   cfg_layout_split_edge
2763 };
2764