xref: /netbsd/external/gpl3/gcc.old/dist/gcc/cfgrtl.c (revision 0fc04c29)
110d565efSmrg /* Control flow graph manipulation code for GNU compiler.
2*0fc04c29Smrg    Copyright (C) 1987-2019 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg /* This file contains low level functions to manipulate the CFG and analyze it
2110d565efSmrg    that are aware of the RTL intermediate language.
2210d565efSmrg 
2310d565efSmrg    Available functionality:
2410d565efSmrg      - Basic CFG/RTL manipulation API documented in cfghooks.h
2510d565efSmrg      - CFG-aware instruction chain manipulation
2610d565efSmrg 	 delete_insn, delete_insn_chain
2710d565efSmrg      - Edge splitting and committing to edges
2810d565efSmrg 	 insert_insn_on_edge, commit_edge_insertions
2910d565efSmrg      - CFG updating after insn simplification
3010d565efSmrg 	 purge_dead_edges, purge_all_dead_edges
3110d565efSmrg      - CFG fixing after coarse manipulation
3210d565efSmrg 	fixup_abnormal_edges
3310d565efSmrg 
3410d565efSmrg    Functions not supposed for generic use:
3510d565efSmrg      - Infrastructure to determine quickly basic block for insn
3610d565efSmrg 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
3710d565efSmrg      - Edge redirection with updating and optimizing of insn chain
3810d565efSmrg 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
3910d565efSmrg 
4010d565efSmrg #include "config.h"
4110d565efSmrg #include "system.h"
4210d565efSmrg #include "coretypes.h"
4310d565efSmrg #include "backend.h"
4410d565efSmrg #include "target.h"
4510d565efSmrg #include "rtl.h"
4610d565efSmrg #include "tree.h"
4710d565efSmrg #include "cfghooks.h"
4810d565efSmrg #include "df.h"
4910d565efSmrg #include "insn-config.h"
5010d565efSmrg #include "memmodel.h"
5110d565efSmrg #include "emit-rtl.h"
5210d565efSmrg #include "cfgrtl.h"
5310d565efSmrg #include "cfganal.h"
5410d565efSmrg #include "cfgbuild.h"
5510d565efSmrg #include "cfgcleanup.h"
5610d565efSmrg #include "bb-reorder.h"
5710d565efSmrg #include "rtl-error.h"
5810d565efSmrg #include "insn-attr.h"
5910d565efSmrg #include "dojump.h"
6010d565efSmrg #include "expr.h"
6110d565efSmrg #include "cfgloop.h"
6210d565efSmrg #include "tree-pass.h"
6310d565efSmrg #include "print-rtl.h"
6410d565efSmrg 
6510d565efSmrg /* Holds the interesting leading and trailing notes for the function.
6610d565efSmrg    Only applicable if the CFG is in cfglayout mode.  */
6710d565efSmrg static GTY(()) rtx_insn *cfg_layout_function_footer;
6810d565efSmrg static GTY(()) rtx_insn *cfg_layout_function_header;
6910d565efSmrg 
7010d565efSmrg static rtx_insn *skip_insns_after_block (basic_block);
7110d565efSmrg static void record_effective_endpoints (void);
7210d565efSmrg static void fixup_reorder_chain (void);
7310d565efSmrg 
7410d565efSmrg void verify_insn_chain (void);
7510d565efSmrg static void fixup_fallthru_exit_predecessor (void);
7610d565efSmrg static int can_delete_note_p (const rtx_note *);
7710d565efSmrg static int can_delete_label_p (const rtx_code_label *);
7810d565efSmrg static basic_block rtl_split_edge (edge);
7910d565efSmrg static bool rtl_move_block_after (basic_block, basic_block);
8010d565efSmrg static int rtl_verify_flow_info (void);
8110d565efSmrg static basic_block cfg_layout_split_block (basic_block, void *);
8210d565efSmrg static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
8310d565efSmrg static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
8410d565efSmrg static void cfg_layout_delete_block (basic_block);
8510d565efSmrg static void rtl_delete_block (basic_block);
8610d565efSmrg static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
8710d565efSmrg static edge rtl_redirect_edge_and_branch (edge, basic_block);
8810d565efSmrg static basic_block rtl_split_block (basic_block, void *);
89c7a68eb7Smrg static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
9010d565efSmrg static int rtl_verify_flow_info_1 (void);
9110d565efSmrg static void rtl_make_forwarder_block (edge);
9210d565efSmrg 
9310d565efSmrg /* Return true if NOTE is not one of the ones that must be kept paired,
9410d565efSmrg    so that we may simply delete it.  */
9510d565efSmrg 
9610d565efSmrg static int
9710d565efSmrg can_delete_note_p (const rtx_note *note)
9810d565efSmrg {
9910d565efSmrg   switch (NOTE_KIND (note))
10010d565efSmrg     {
10110d565efSmrg     case NOTE_INSN_DELETED:
10210d565efSmrg     case NOTE_INSN_BASIC_BLOCK:
10310d565efSmrg     case NOTE_INSN_EPILOGUE_BEG:
10410d565efSmrg       return true;
10510d565efSmrg 
10610d565efSmrg     default:
10710d565efSmrg       return false;
10810d565efSmrg     }
10910d565efSmrg }
11010d565efSmrg 
11110d565efSmrg /* True if a given label can be deleted.  */
11210d565efSmrg 
11310d565efSmrg static int
11410d565efSmrg can_delete_label_p (const rtx_code_label *label)
11510d565efSmrg {
11610d565efSmrg   return (!LABEL_PRESERVE_P (label)
11710d565efSmrg 	  /* User declared labels must be preserved.  */
11810d565efSmrg 	  && LABEL_NAME (label) == 0
11910d565efSmrg 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
12010d565efSmrg 					     const_cast<rtx_code_label *> (label)));
12110d565efSmrg }
12210d565efSmrg 
12310d565efSmrg /* Delete INSN by patching it out.  */
12410d565efSmrg 
12510d565efSmrg void
12610d565efSmrg delete_insn (rtx_insn *insn)
12710d565efSmrg {
12810d565efSmrg   rtx note;
12910d565efSmrg   bool really_delete = true;
13010d565efSmrg 
13110d565efSmrg   if (LABEL_P (insn))
13210d565efSmrg     {
13310d565efSmrg       /* Some labels can't be directly removed from the INSN chain, as they
13410d565efSmrg 	 might be references via variables, constant pool etc.
13510d565efSmrg 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
13610d565efSmrg       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
13710d565efSmrg 	{
13810d565efSmrg 	  const char *name = LABEL_NAME (insn);
13910d565efSmrg 	  basic_block bb = BLOCK_FOR_INSN (insn);
14010d565efSmrg 	  rtx_insn *bb_note = NEXT_INSN (insn);
14110d565efSmrg 
14210d565efSmrg 	  really_delete = false;
14310d565efSmrg 	  PUT_CODE (insn, NOTE);
14410d565efSmrg 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
14510d565efSmrg 	  NOTE_DELETED_LABEL_NAME (insn) = name;
14610d565efSmrg 
14710d565efSmrg 	  /* If the note following the label starts a basic block, and the
14810d565efSmrg 	     label is a member of the same basic block, interchange the two.  */
14910d565efSmrg 	  if (bb_note != NULL_RTX
15010d565efSmrg 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
15110d565efSmrg 	      && bb != NULL
15210d565efSmrg 	      && bb == BLOCK_FOR_INSN (bb_note))
15310d565efSmrg 	    {
15410d565efSmrg 	      reorder_insns_nobb (insn, insn, bb_note);
15510d565efSmrg 	      BB_HEAD (bb) = bb_note;
15610d565efSmrg 	      if (BB_END (bb) == bb_note)
15710d565efSmrg 		BB_END (bb) = insn;
15810d565efSmrg 	    }
15910d565efSmrg 	}
16010d565efSmrg 
16110d565efSmrg       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
16210d565efSmrg     }
16310d565efSmrg 
16410d565efSmrg   if (really_delete)
16510d565efSmrg     {
16610d565efSmrg       /* If this insn has already been deleted, something is very wrong.  */
16710d565efSmrg       gcc_assert (!insn->deleted ());
16810d565efSmrg       if (INSN_P (insn))
16910d565efSmrg 	df_insn_delete (insn);
17010d565efSmrg       remove_insn (insn);
17110d565efSmrg       insn->set_deleted ();
17210d565efSmrg     }
17310d565efSmrg 
17410d565efSmrg   /* If deleting a jump, decrement the use count of the label.  Deleting
17510d565efSmrg      the label itself should happen in the normal course of block merging.  */
17610d565efSmrg   if (JUMP_P (insn))
17710d565efSmrg     {
17810d565efSmrg       if (JUMP_LABEL (insn)
17910d565efSmrg 	  && LABEL_P (JUMP_LABEL (insn)))
18010d565efSmrg 	LABEL_NUSES (JUMP_LABEL (insn))--;
18110d565efSmrg 
18210d565efSmrg       /* If there are more targets, remove them too.  */
18310d565efSmrg       while ((note
18410d565efSmrg 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
18510d565efSmrg 	     && LABEL_P (XEXP (note, 0)))
18610d565efSmrg 	{
18710d565efSmrg 	  LABEL_NUSES (XEXP (note, 0))--;
18810d565efSmrg 	  remove_note (insn, note);
18910d565efSmrg 	}
19010d565efSmrg     }
19110d565efSmrg 
19210d565efSmrg   /* Also if deleting any insn that references a label as an operand.  */
19310d565efSmrg   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
19410d565efSmrg 	 && LABEL_P (XEXP (note, 0)))
19510d565efSmrg     {
19610d565efSmrg       LABEL_NUSES (XEXP (note, 0))--;
19710d565efSmrg       remove_note (insn, note);
19810d565efSmrg     }
19910d565efSmrg 
20010d565efSmrg   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
20110d565efSmrg     {
20210d565efSmrg       rtvec vec = table->get_labels ();
20310d565efSmrg       int len = GET_NUM_ELEM (vec);
20410d565efSmrg       int i;
20510d565efSmrg 
20610d565efSmrg       for (i = 0; i < len; i++)
20710d565efSmrg 	{
20810d565efSmrg 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
20910d565efSmrg 
21010d565efSmrg 	  /* When deleting code in bulk (e.g. removing many unreachable
21110d565efSmrg 	     blocks) we can delete a label that's a target of the vector
21210d565efSmrg 	     before deleting the vector itself.  */
21310d565efSmrg 	  if (!NOTE_P (label))
21410d565efSmrg 	    LABEL_NUSES (label)--;
21510d565efSmrg 	}
21610d565efSmrg     }
21710d565efSmrg }
21810d565efSmrg 
21910d565efSmrg /* Like delete_insn but also purge dead edges from BB.
22010d565efSmrg    Return true if any edges are eliminated.  */
22110d565efSmrg 
22210d565efSmrg bool
22310d565efSmrg delete_insn_and_edges (rtx_insn *insn)
22410d565efSmrg {
22510d565efSmrg   bool purge = false;
22610d565efSmrg 
22710d565efSmrg   if (INSN_P (insn)
22810d565efSmrg       && BLOCK_FOR_INSN (insn)
22910d565efSmrg       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
23010d565efSmrg     purge = true;
23110d565efSmrg   delete_insn (insn);
23210d565efSmrg   if (purge)
23310d565efSmrg     return purge_dead_edges (BLOCK_FOR_INSN (insn));
23410d565efSmrg   return false;
23510d565efSmrg }
23610d565efSmrg 
23710d565efSmrg /* Unlink a chain of insns between START and FINISH, leaving notes
23810d565efSmrg    that must be paired.  If CLEAR_BB is true, we set bb field for
23910d565efSmrg    insns that cannot be removed to NULL.  */
24010d565efSmrg 
24110d565efSmrg void
24210d565efSmrg delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
24310d565efSmrg {
24410d565efSmrg   /* Unchain the insns one by one.  It would be quicker to delete all of these
24510d565efSmrg      with a single unchaining, rather than one at a time, but we need to keep
24610d565efSmrg      the NOTE's.  */
24710d565efSmrg   rtx_insn *current = finish;
24810d565efSmrg   while (1)
24910d565efSmrg     {
25010d565efSmrg       rtx_insn *prev = PREV_INSN (current);
25110d565efSmrg       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
25210d565efSmrg 	;
25310d565efSmrg       else
25410d565efSmrg 	delete_insn (current);
25510d565efSmrg 
25610d565efSmrg       if (clear_bb && !current->deleted ())
25710d565efSmrg 	set_block_for_insn (current, NULL);
25810d565efSmrg 
25910d565efSmrg       if (current == start)
26010d565efSmrg 	break;
26110d565efSmrg       current = prev;
26210d565efSmrg     }
26310d565efSmrg }
26410d565efSmrg 
26510d565efSmrg /* Create a new basic block consisting of the instructions between HEAD and END
26610d565efSmrg    inclusive.  This function is designed to allow fast BB construction - reuses
26710d565efSmrg    the note and basic block struct in BB_NOTE, if any and do not grow
26810d565efSmrg    BASIC_BLOCK chain and should be used directly only by CFG construction code.
26910d565efSmrg    END can be NULL in to create new empty basic block before HEAD.  Both END
27010d565efSmrg    and HEAD can be NULL to create basic block at the end of INSN chain.
27110d565efSmrg    AFTER is the basic block we should be put after.  */
27210d565efSmrg 
27310d565efSmrg basic_block
27410d565efSmrg create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
27510d565efSmrg 			      basic_block after)
27610d565efSmrg {
27710d565efSmrg   basic_block bb;
27810d565efSmrg 
27910d565efSmrg   if (bb_note
28010d565efSmrg       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
28110d565efSmrg       && bb->aux == NULL)
28210d565efSmrg     {
28310d565efSmrg       /* If we found an existing note, thread it back onto the chain.  */
28410d565efSmrg 
28510d565efSmrg       rtx_insn *after;
28610d565efSmrg 
28710d565efSmrg       if (LABEL_P (head))
28810d565efSmrg 	after = head;
28910d565efSmrg       else
29010d565efSmrg 	{
29110d565efSmrg 	  after = PREV_INSN (head);
29210d565efSmrg 	  head = bb_note;
29310d565efSmrg 	}
29410d565efSmrg 
29510d565efSmrg       if (after != bb_note && NEXT_INSN (after) != bb_note)
29610d565efSmrg 	reorder_insns_nobb (bb_note, bb_note, after);
29710d565efSmrg     }
29810d565efSmrg   else
29910d565efSmrg     {
30010d565efSmrg       /* Otherwise we must create a note and a basic block structure.  */
30110d565efSmrg 
30210d565efSmrg       bb = alloc_block ();
30310d565efSmrg 
30410d565efSmrg       init_rtl_bb_info (bb);
30510d565efSmrg       if (!head && !end)
30610d565efSmrg 	head = end = bb_note
30710d565efSmrg 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
30810d565efSmrg       else if (LABEL_P (head) && end)
30910d565efSmrg 	{
31010d565efSmrg 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
31110d565efSmrg 	  if (head == end)
31210d565efSmrg 	    end = bb_note;
31310d565efSmrg 	}
31410d565efSmrg       else
31510d565efSmrg 	{
31610d565efSmrg 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
31710d565efSmrg 	  head = bb_note;
31810d565efSmrg 	  if (!end)
31910d565efSmrg 	    end = head;
32010d565efSmrg 	}
32110d565efSmrg 
32210d565efSmrg       NOTE_BASIC_BLOCK (bb_note) = bb;
32310d565efSmrg     }
32410d565efSmrg 
32510d565efSmrg   /* Always include the bb note in the block.  */
32610d565efSmrg   if (NEXT_INSN (end) == bb_note)
32710d565efSmrg     end = bb_note;
32810d565efSmrg 
32910d565efSmrg   BB_HEAD (bb) = head;
33010d565efSmrg   BB_END (bb) = end;
33110d565efSmrg   bb->index = last_basic_block_for_fn (cfun)++;
33210d565efSmrg   bb->flags = BB_NEW | BB_RTL;
33310d565efSmrg   link_block (bb, after);
33410d565efSmrg   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
33510d565efSmrg   df_bb_refs_record (bb->index, false);
33610d565efSmrg   update_bb_for_insn (bb);
33710d565efSmrg   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
33810d565efSmrg 
33910d565efSmrg   /* Tag the block so that we know it has been used when considering
34010d565efSmrg      other basic block notes.  */
34110d565efSmrg   bb->aux = bb;
34210d565efSmrg 
34310d565efSmrg   return bb;
34410d565efSmrg }
34510d565efSmrg 
34610d565efSmrg /* Create new basic block consisting of instructions in between HEAD and END
34710d565efSmrg    and place it to the BB chain after block AFTER.  END can be NULL to
34810d565efSmrg    create a new empty basic block before HEAD.  Both END and HEAD can be
34910d565efSmrg    NULL to create basic block at the end of INSN chain.  */
35010d565efSmrg 
35110d565efSmrg static basic_block
35210d565efSmrg rtl_create_basic_block (void *headp, void *endp, basic_block after)
35310d565efSmrg {
35410d565efSmrg   rtx_insn *head = (rtx_insn *) headp;
35510d565efSmrg   rtx_insn *end = (rtx_insn *) endp;
35610d565efSmrg   basic_block bb;
35710d565efSmrg 
35810d565efSmrg   /* Grow the basic block array if needed.  */
35910d565efSmrg   if ((size_t) last_basic_block_for_fn (cfun)
36010d565efSmrg       >= basic_block_info_for_fn (cfun)->length ())
36110d565efSmrg     {
36210d565efSmrg       size_t new_size =
36310d565efSmrg 	(last_basic_block_for_fn (cfun)
36410d565efSmrg 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
36510d565efSmrg       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
36610d565efSmrg     }
36710d565efSmrg 
36810d565efSmrg   n_basic_blocks_for_fn (cfun)++;
36910d565efSmrg 
37010d565efSmrg   bb = create_basic_block_structure (head, end, NULL, after);
37110d565efSmrg   bb->aux = NULL;
37210d565efSmrg   return bb;
37310d565efSmrg }
37410d565efSmrg 
37510d565efSmrg static basic_block
37610d565efSmrg cfg_layout_create_basic_block (void *head, void *end, basic_block after)
37710d565efSmrg {
37810d565efSmrg   basic_block newbb = rtl_create_basic_block (head, end, after);
37910d565efSmrg 
38010d565efSmrg   return newbb;
38110d565efSmrg }
38210d565efSmrg 
38310d565efSmrg /* Delete the insns in a (non-live) block.  We physically delete every
38410d565efSmrg    non-deleted-note insn, and update the flow graph appropriately.
38510d565efSmrg 
38610d565efSmrg    Return nonzero if we deleted an exception handler.  */
38710d565efSmrg 
38810d565efSmrg /* ??? Preserving all such notes strikes me as wrong.  It would be nice
38910d565efSmrg    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
39010d565efSmrg 
39110d565efSmrg static void
39210d565efSmrg rtl_delete_block (basic_block b)
39310d565efSmrg {
39410d565efSmrg   rtx_insn *insn, *end;
39510d565efSmrg 
39610d565efSmrg   /* If the head of this block is a CODE_LABEL, then it might be the
39710d565efSmrg      label for an exception handler which can't be reached.  We need
39810d565efSmrg      to remove the label from the exception_handler_label list.  */
39910d565efSmrg   insn = BB_HEAD (b);
40010d565efSmrg 
40110d565efSmrg   end = get_last_bb_insn (b);
40210d565efSmrg 
40310d565efSmrg   /* Selectively delete the entire chain.  */
40410d565efSmrg   BB_HEAD (b) = NULL;
40510d565efSmrg   delete_insn_chain (insn, end, true);
40610d565efSmrg 
40710d565efSmrg 
40810d565efSmrg   if (dump_file)
40910d565efSmrg     fprintf (dump_file, "deleting block %d\n", b->index);
41010d565efSmrg   df_bb_delete (b->index);
41110d565efSmrg }
41210d565efSmrg 
41310d565efSmrg /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
41410d565efSmrg 
41510d565efSmrg void
41610d565efSmrg compute_bb_for_insn (void)
41710d565efSmrg {
41810d565efSmrg   basic_block bb;
41910d565efSmrg 
42010d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
42110d565efSmrg     {
42210d565efSmrg       rtx_insn *end = BB_END (bb);
42310d565efSmrg       rtx_insn *insn;
42410d565efSmrg 
42510d565efSmrg       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
42610d565efSmrg 	{
42710d565efSmrg 	  BLOCK_FOR_INSN (insn) = bb;
42810d565efSmrg 	  if (insn == end)
42910d565efSmrg 	    break;
43010d565efSmrg 	}
43110d565efSmrg     }
43210d565efSmrg }
43310d565efSmrg 
43410d565efSmrg /* Release the basic_block_for_insn array.  */
43510d565efSmrg 
43610d565efSmrg unsigned int
43710d565efSmrg free_bb_for_insn (void)
43810d565efSmrg {
43910d565efSmrg   rtx_insn *insn;
44010d565efSmrg   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
44110d565efSmrg     if (!BARRIER_P (insn))
44210d565efSmrg       BLOCK_FOR_INSN (insn) = NULL;
44310d565efSmrg   return 0;
44410d565efSmrg }
44510d565efSmrg 
44610d565efSmrg namespace {
44710d565efSmrg 
44810d565efSmrg const pass_data pass_data_free_cfg =
44910d565efSmrg {
45010d565efSmrg   RTL_PASS, /* type */
45110d565efSmrg   "*free_cfg", /* name */
45210d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
45310d565efSmrg   TV_NONE, /* tv_id */
45410d565efSmrg   0, /* properties_required */
45510d565efSmrg   0, /* properties_provided */
45610d565efSmrg   PROP_cfg, /* properties_destroyed */
45710d565efSmrg   0, /* todo_flags_start */
45810d565efSmrg   0, /* todo_flags_finish */
45910d565efSmrg };
46010d565efSmrg 
46110d565efSmrg class pass_free_cfg : public rtl_opt_pass
46210d565efSmrg {
46310d565efSmrg public:
46410d565efSmrg   pass_free_cfg (gcc::context *ctxt)
46510d565efSmrg     : rtl_opt_pass (pass_data_free_cfg, ctxt)
46610d565efSmrg   {}
46710d565efSmrg 
46810d565efSmrg   /* opt_pass methods: */
46910d565efSmrg   virtual unsigned int execute (function *);
47010d565efSmrg 
47110d565efSmrg }; // class pass_free_cfg
47210d565efSmrg 
47310d565efSmrg unsigned int
47410d565efSmrg pass_free_cfg::execute (function *)
47510d565efSmrg {
47610d565efSmrg   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
47710d565efSmrg      valid at that point so it would be too late to call df_analyze.  */
47810d565efSmrg   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
47910d565efSmrg     {
48010d565efSmrg       df_note_add_problem ();
48110d565efSmrg       df_analyze ();
48210d565efSmrg     }
48310d565efSmrg 
48410d565efSmrg   if (crtl->has_bb_partition)
48510d565efSmrg     insert_section_boundary_note ();
48610d565efSmrg 
48710d565efSmrg   free_bb_for_insn ();
48810d565efSmrg   return 0;
48910d565efSmrg }
49010d565efSmrg 
49110d565efSmrg } // anon namespace
49210d565efSmrg 
49310d565efSmrg rtl_opt_pass *
49410d565efSmrg make_pass_free_cfg (gcc::context *ctxt)
49510d565efSmrg {
49610d565efSmrg   return new pass_free_cfg (ctxt);
49710d565efSmrg }
49810d565efSmrg 
49910d565efSmrg /* Return RTX to emit after when we want to emit code on the entry of function.  */
50010d565efSmrg rtx_insn *
50110d565efSmrg entry_of_function (void)
50210d565efSmrg {
50310d565efSmrg   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
50410d565efSmrg 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
50510d565efSmrg }
50610d565efSmrg 
50710d565efSmrg /* Emit INSN at the entry point of the function, ensuring that it is only
50810d565efSmrg    executed once per function.  */
50910d565efSmrg void
51010d565efSmrg emit_insn_at_entry (rtx insn)
51110d565efSmrg {
51210d565efSmrg   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
51310d565efSmrg   edge e = ei_safe_edge (ei);
51410d565efSmrg   gcc_assert (e->flags & EDGE_FALLTHRU);
51510d565efSmrg 
51610d565efSmrg   insert_insn_on_edge (insn, e);
51710d565efSmrg   commit_edge_insertions ();
51810d565efSmrg }
51910d565efSmrg 
52010d565efSmrg /* Update BLOCK_FOR_INSN of insns between BEGIN and END
52110d565efSmrg    (or BARRIER if found) and notify df of the bb change.
52210d565efSmrg    The insn chain range is inclusive
52310d565efSmrg    (i.e. both BEGIN and END will be updated. */
52410d565efSmrg 
52510d565efSmrg static void
52610d565efSmrg update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
52710d565efSmrg {
52810d565efSmrg   rtx_insn *insn;
52910d565efSmrg 
53010d565efSmrg   end = NEXT_INSN (end);
53110d565efSmrg   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
53210d565efSmrg     if (!BARRIER_P (insn))
53310d565efSmrg       df_insn_change_bb (insn, bb);
53410d565efSmrg }
53510d565efSmrg 
53610d565efSmrg /* Update BLOCK_FOR_INSN of insns in BB to BB,
53710d565efSmrg    and notify df of the change.  */
53810d565efSmrg 
53910d565efSmrg void
54010d565efSmrg update_bb_for_insn (basic_block bb)
54110d565efSmrg {
54210d565efSmrg   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
54310d565efSmrg }
54410d565efSmrg 
54510d565efSmrg 
54610d565efSmrg /* Like active_insn_p, except keep the return value clobber around
54710d565efSmrg    even after reload.  */
54810d565efSmrg 
54910d565efSmrg static bool
55010d565efSmrg flow_active_insn_p (const rtx_insn *insn)
55110d565efSmrg {
55210d565efSmrg   if (active_insn_p (insn))
55310d565efSmrg     return true;
55410d565efSmrg 
55510d565efSmrg   /* A clobber of the function return value exists for buggy
55610d565efSmrg      programs that fail to return a value.  Its effect is to
55710d565efSmrg      keep the return value from being live across the entire
55810d565efSmrg      function.  If we allow it to be skipped, we introduce the
55910d565efSmrg      possibility for register lifetime confusion.  */
56010d565efSmrg   if (GET_CODE (PATTERN (insn)) == CLOBBER
56110d565efSmrg       && REG_P (XEXP (PATTERN (insn), 0))
56210d565efSmrg       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
56310d565efSmrg     return true;
56410d565efSmrg 
56510d565efSmrg   return false;
56610d565efSmrg }
56710d565efSmrg 
56810d565efSmrg /* Return true if the block has no effect and only forwards control flow to
56910d565efSmrg    its single destination.  */
57010d565efSmrg 
57110d565efSmrg bool
57210d565efSmrg contains_no_active_insn_p (const_basic_block bb)
57310d565efSmrg {
57410d565efSmrg   rtx_insn *insn;
57510d565efSmrg 
57610d565efSmrg   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
57710d565efSmrg       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
57810d565efSmrg       || !single_succ_p (bb)
57910d565efSmrg       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
58010d565efSmrg     return false;
58110d565efSmrg 
58210d565efSmrg   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
58310d565efSmrg     if (INSN_P (insn) && flow_active_insn_p (insn))
58410d565efSmrg       return false;
58510d565efSmrg 
58610d565efSmrg   return (!INSN_P (insn)
58710d565efSmrg 	  || (JUMP_P (insn) && simplejump_p (insn))
58810d565efSmrg 	  || !flow_active_insn_p (insn));
58910d565efSmrg }
59010d565efSmrg 
59110d565efSmrg /* Likewise, but protect loop latches, headers and preheaders.  */
59210d565efSmrg /* FIXME: Make this a cfg hook.  */
59310d565efSmrg 
59410d565efSmrg bool
59510d565efSmrg forwarder_block_p (const_basic_block bb)
59610d565efSmrg {
59710d565efSmrg   if (!contains_no_active_insn_p (bb))
59810d565efSmrg     return false;
59910d565efSmrg 
60010d565efSmrg   /* Protect loop latches, headers and preheaders.  */
60110d565efSmrg   if (current_loops)
60210d565efSmrg     {
60310d565efSmrg       basic_block dest;
60410d565efSmrg       if (bb->loop_father->header == bb)
60510d565efSmrg 	return false;
60610d565efSmrg       dest = EDGE_SUCC (bb, 0)->dest;
60710d565efSmrg       if (dest->loop_father->header == dest)
60810d565efSmrg 	return false;
60910d565efSmrg     }
61010d565efSmrg 
61110d565efSmrg   return true;
61210d565efSmrg }
61310d565efSmrg 
61410d565efSmrg /* Return nonzero if we can reach target from src by falling through.  */
61510d565efSmrg /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
61610d565efSmrg 
61710d565efSmrg bool
61810d565efSmrg can_fallthru (basic_block src, basic_block target)
61910d565efSmrg {
62010d565efSmrg   rtx_insn *insn = BB_END (src);
62110d565efSmrg   rtx_insn *insn2;
62210d565efSmrg   edge e;
62310d565efSmrg   edge_iterator ei;
62410d565efSmrg 
62510d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
62610d565efSmrg     return true;
62710d565efSmrg   if (src->next_bb != target)
62810d565efSmrg     return false;
62910d565efSmrg 
63010d565efSmrg   /* ??? Later we may add code to move jump tables offline.  */
63110d565efSmrg   if (tablejump_p (insn, NULL, NULL))
63210d565efSmrg     return false;
63310d565efSmrg 
63410d565efSmrg   FOR_EACH_EDGE (e, ei, src->succs)
63510d565efSmrg     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
63610d565efSmrg 	&& e->flags & EDGE_FALLTHRU)
63710d565efSmrg       return false;
63810d565efSmrg 
63910d565efSmrg   insn2 = BB_HEAD (target);
64010d565efSmrg   if (!active_insn_p (insn2))
64110d565efSmrg     insn2 = next_active_insn (insn2);
64210d565efSmrg 
64310d565efSmrg   return next_active_insn (insn) == insn2;
64410d565efSmrg }
64510d565efSmrg 
64610d565efSmrg /* Return nonzero if we could reach target from src by falling through,
64710d565efSmrg    if the target was made adjacent.  If we already have a fall-through
64810d565efSmrg    edge to the exit block, we can't do that.  */
64910d565efSmrg static bool
65010d565efSmrg could_fall_through (basic_block src, basic_block target)
65110d565efSmrg {
65210d565efSmrg   edge e;
65310d565efSmrg   edge_iterator ei;
65410d565efSmrg 
65510d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
65610d565efSmrg     return true;
65710d565efSmrg   FOR_EACH_EDGE (e, ei, src->succs)
65810d565efSmrg     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
65910d565efSmrg 	&& e->flags & EDGE_FALLTHRU)
66010d565efSmrg       return 0;
66110d565efSmrg   return true;
66210d565efSmrg }
66310d565efSmrg 
66410d565efSmrg /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
66510d565efSmrg rtx_note *
66610d565efSmrg bb_note (basic_block bb)
66710d565efSmrg {
66810d565efSmrg   rtx_insn *note;
66910d565efSmrg 
67010d565efSmrg   note = BB_HEAD (bb);
67110d565efSmrg   if (LABEL_P (note))
67210d565efSmrg     note = NEXT_INSN (note);
67310d565efSmrg 
67410d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
67510d565efSmrg   return as_a <rtx_note *> (note);
67610d565efSmrg }
67710d565efSmrg 
67810d565efSmrg /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
67910d565efSmrg    note associated with the BLOCK.  */
68010d565efSmrg 
68110d565efSmrg static rtx_insn *
68210d565efSmrg first_insn_after_basic_block_note (basic_block block)
68310d565efSmrg {
68410d565efSmrg   rtx_insn *insn;
68510d565efSmrg 
68610d565efSmrg   /* Get the first instruction in the block.  */
68710d565efSmrg   insn = BB_HEAD (block);
68810d565efSmrg 
68910d565efSmrg   if (insn == NULL_RTX)
69010d565efSmrg     return NULL;
69110d565efSmrg   if (LABEL_P (insn))
69210d565efSmrg     insn = NEXT_INSN (insn);
69310d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
69410d565efSmrg 
69510d565efSmrg   return NEXT_INSN (insn);
69610d565efSmrg }
69710d565efSmrg 
69810d565efSmrg /* Creates a new basic block just after basic block BB by splitting
69910d565efSmrg    everything after specified instruction INSNP.  */
70010d565efSmrg 
70110d565efSmrg static basic_block
70210d565efSmrg rtl_split_block (basic_block bb, void *insnp)
70310d565efSmrg {
70410d565efSmrg   basic_block new_bb;
70510d565efSmrg   rtx_insn *insn = (rtx_insn *) insnp;
70610d565efSmrg   edge e;
70710d565efSmrg   edge_iterator ei;
70810d565efSmrg 
70910d565efSmrg   if (!insn)
71010d565efSmrg     {
71110d565efSmrg       insn = first_insn_after_basic_block_note (bb);
71210d565efSmrg 
71310d565efSmrg       if (insn)
71410d565efSmrg 	{
71510d565efSmrg 	  rtx_insn *next = insn;
71610d565efSmrg 
71710d565efSmrg 	  insn = PREV_INSN (insn);
71810d565efSmrg 
71910d565efSmrg 	  /* If the block contains only debug insns, insn would have
72010d565efSmrg 	     been NULL in a non-debug compilation, and then we'd end
72110d565efSmrg 	     up emitting a DELETED note.  For -fcompare-debug
72210d565efSmrg 	     stability, emit the note too.  */
72310d565efSmrg 	  if (insn != BB_END (bb)
72410d565efSmrg 	      && DEBUG_INSN_P (next)
72510d565efSmrg 	      && DEBUG_INSN_P (BB_END (bb)))
72610d565efSmrg 	    {
72710d565efSmrg 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
72810d565efSmrg 		next = NEXT_INSN (next);
72910d565efSmrg 
73010d565efSmrg 	      if (next == BB_END (bb))
73110d565efSmrg 		emit_note_after (NOTE_INSN_DELETED, next);
73210d565efSmrg 	    }
73310d565efSmrg 	}
73410d565efSmrg       else
73510d565efSmrg 	insn = get_last_insn ();
73610d565efSmrg     }
73710d565efSmrg 
73810d565efSmrg   /* We probably should check type of the insn so that we do not create
73910d565efSmrg      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
74010d565efSmrg      bother.  */
74110d565efSmrg   if (insn == BB_END (bb))
74210d565efSmrg     emit_note_after (NOTE_INSN_DELETED, insn);
74310d565efSmrg 
74410d565efSmrg   /* Create the new basic block.  */
74510d565efSmrg   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
74610d565efSmrg   BB_COPY_PARTITION (new_bb, bb);
74710d565efSmrg   BB_END (bb) = insn;
74810d565efSmrg 
74910d565efSmrg   /* Redirect the outgoing edges.  */
75010d565efSmrg   new_bb->succs = bb->succs;
75110d565efSmrg   bb->succs = NULL;
75210d565efSmrg   FOR_EACH_EDGE (e, ei, new_bb->succs)
75310d565efSmrg     e->src = new_bb;
75410d565efSmrg 
75510d565efSmrg   /* The new block starts off being dirty.  */
75610d565efSmrg   df_set_bb_dirty (bb);
75710d565efSmrg   return new_bb;
75810d565efSmrg }
75910d565efSmrg 
76010d565efSmrg /* Return true if the single edge between blocks A and B is the only place
76110d565efSmrg    in RTL which holds some unique locus.  */
76210d565efSmrg 
76310d565efSmrg static bool
76410d565efSmrg unique_locus_on_edge_between_p (basic_block a, basic_block b)
76510d565efSmrg {
76610d565efSmrg   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
76710d565efSmrg   rtx_insn *insn, *end;
76810d565efSmrg 
76910d565efSmrg   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
77010d565efSmrg     return false;
77110d565efSmrg 
77210d565efSmrg   /* First scan block A backward.  */
77310d565efSmrg   insn = BB_END (a);
77410d565efSmrg   end = PREV_INSN (BB_HEAD (a));
77510d565efSmrg   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
77610d565efSmrg     insn = PREV_INSN (insn);
77710d565efSmrg 
77810d565efSmrg   if (insn != end && INSN_LOCATION (insn) == goto_locus)
77910d565efSmrg     return false;
78010d565efSmrg 
78110d565efSmrg   /* Then scan block B forward.  */
78210d565efSmrg   insn = BB_HEAD (b);
78310d565efSmrg   if (insn)
78410d565efSmrg     {
78510d565efSmrg       end = NEXT_INSN (BB_END (b));
78610d565efSmrg       while (insn != end && !NONDEBUG_INSN_P (insn))
78710d565efSmrg 	insn = NEXT_INSN (insn);
78810d565efSmrg 
78910d565efSmrg       if (insn != end && INSN_HAS_LOCATION (insn)
79010d565efSmrg 	  && INSN_LOCATION (insn) == goto_locus)
79110d565efSmrg 	return false;
79210d565efSmrg     }
79310d565efSmrg 
79410d565efSmrg   return true;
79510d565efSmrg }
79610d565efSmrg 
79710d565efSmrg /* If the single edge between blocks A and B is the only place in RTL which
79810d565efSmrg    holds some unique locus, emit a nop with that locus between the blocks.  */
79910d565efSmrg 
80010d565efSmrg static void
80110d565efSmrg emit_nop_for_unique_locus_between (basic_block a, basic_block b)
80210d565efSmrg {
80310d565efSmrg   if (!unique_locus_on_edge_between_p (a, b))
80410d565efSmrg     return;
80510d565efSmrg 
80610d565efSmrg   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
80710d565efSmrg   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
80810d565efSmrg }
80910d565efSmrg 
81010d565efSmrg /* Blocks A and B are to be merged into a single block A.  The insns
81110d565efSmrg    are already contiguous.  */
81210d565efSmrg 
81310d565efSmrg static void
81410d565efSmrg rtl_merge_blocks (basic_block a, basic_block b)
81510d565efSmrg {
816*0fc04c29Smrg   /* If B is a forwarder block whose outgoing edge has no location, we'll
817*0fc04c29Smrg      propagate the locus of the edge between A and B onto it.  */
818*0fc04c29Smrg   const bool forward_edge_locus
819*0fc04c29Smrg     = (b->flags & BB_FORWARDER_BLOCK) != 0
820*0fc04c29Smrg       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
82110d565efSmrg   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
82210d565efSmrg   rtx_insn *del_first = NULL, *del_last = NULL;
82310d565efSmrg   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
82410d565efSmrg   int b_empty = 0;
82510d565efSmrg 
82610d565efSmrg   if (dump_file)
82710d565efSmrg     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
82810d565efSmrg 	     a->index);
82910d565efSmrg 
83010d565efSmrg   while (DEBUG_INSN_P (b_end))
83110d565efSmrg     b_end = PREV_INSN (b_debug_start = b_end);
83210d565efSmrg 
83310d565efSmrg   /* If there was a CODE_LABEL beginning B, delete it.  */
83410d565efSmrg   if (LABEL_P (b_head))
83510d565efSmrg     {
83610d565efSmrg       /* Detect basic blocks with nothing but a label.  This can happen
83710d565efSmrg 	 in particular at the end of a function.  */
83810d565efSmrg       if (b_head == b_end)
83910d565efSmrg 	b_empty = 1;
84010d565efSmrg 
84110d565efSmrg       del_first = del_last = b_head;
84210d565efSmrg       b_head = NEXT_INSN (b_head);
84310d565efSmrg     }
84410d565efSmrg 
84510d565efSmrg   /* Delete the basic block note and handle blocks containing just that
84610d565efSmrg      note.  */
84710d565efSmrg   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
84810d565efSmrg     {
84910d565efSmrg       if (b_head == b_end)
85010d565efSmrg 	b_empty = 1;
85110d565efSmrg       if (! del_last)
85210d565efSmrg 	del_first = b_head;
85310d565efSmrg 
85410d565efSmrg       del_last = b_head;
85510d565efSmrg       b_head = NEXT_INSN (b_head);
85610d565efSmrg     }
85710d565efSmrg 
85810d565efSmrg   /* If there was a jump out of A, delete it.  */
85910d565efSmrg   if (JUMP_P (a_end))
86010d565efSmrg     {
86110d565efSmrg       rtx_insn *prev;
86210d565efSmrg 
86310d565efSmrg       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
86410d565efSmrg 	if (!NOTE_P (prev)
86510d565efSmrg 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
86610d565efSmrg 	    || prev == BB_HEAD (a))
86710d565efSmrg 	  break;
86810d565efSmrg 
86910d565efSmrg       del_first = a_end;
87010d565efSmrg 
87110d565efSmrg       /* If this was a conditional jump, we need to also delete
87210d565efSmrg 	 the insn that set cc0.  */
87310d565efSmrg       if (HAVE_cc0 && only_sets_cc0_p (prev))
87410d565efSmrg 	{
87510d565efSmrg 	  rtx_insn *tmp = prev;
87610d565efSmrg 
87710d565efSmrg 	  prev = prev_nonnote_insn (prev);
87810d565efSmrg 	  if (!prev)
87910d565efSmrg 	    prev = BB_HEAD (a);
88010d565efSmrg 	  del_first = tmp;
88110d565efSmrg 	}
88210d565efSmrg 
88310d565efSmrg       a_end = PREV_INSN (del_first);
88410d565efSmrg     }
88510d565efSmrg   else if (BARRIER_P (NEXT_INSN (a_end)))
88610d565efSmrg     del_first = NEXT_INSN (a_end);
88710d565efSmrg 
88810d565efSmrg   /* Delete everything marked above as well as crap that might be
88910d565efSmrg      hanging out between the two blocks.  */
89010d565efSmrg   BB_END (a) = a_end;
89110d565efSmrg   BB_HEAD (b) = b_empty ? NULL : b_head;
89210d565efSmrg   delete_insn_chain (del_first, del_last, true);
89310d565efSmrg 
894*0fc04c29Smrg   /* If not optimizing, preserve the locus of the single edge between
895*0fc04c29Smrg      blocks A and B if necessary by emitting a nop.  */
896*0fc04c29Smrg   if (!optimize
897*0fc04c29Smrg       && !forward_edge_locus
898*0fc04c29Smrg       && !DECL_IGNORED_P (current_function_decl))
89910d565efSmrg     {
90010d565efSmrg       emit_nop_for_unique_locus_between (a, b);
90110d565efSmrg       a_end = BB_END (a);
90210d565efSmrg     }
90310d565efSmrg 
90410d565efSmrg   /* Reassociate the insns of B with A.  */
90510d565efSmrg   if (!b_empty)
90610d565efSmrg     {
90710d565efSmrg       update_bb_for_insn_chain (a_end, b_debug_end, a);
90810d565efSmrg 
90910d565efSmrg       BB_END (a) = b_debug_end;
91010d565efSmrg       BB_HEAD (b) = NULL;
91110d565efSmrg     }
91210d565efSmrg   else if (b_end != b_debug_end)
91310d565efSmrg     {
91410d565efSmrg       /* Move any deleted labels and other notes between the end of A
91510d565efSmrg 	 and the debug insns that make up B after the debug insns,
91610d565efSmrg 	 bringing the debug insns into A while keeping the notes after
91710d565efSmrg 	 the end of A.  */
91810d565efSmrg       if (NEXT_INSN (a_end) != b_debug_start)
91910d565efSmrg 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
92010d565efSmrg 			    b_debug_end);
92110d565efSmrg       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
92210d565efSmrg       BB_END (a) = b_debug_end;
92310d565efSmrg     }
92410d565efSmrg 
92510d565efSmrg   df_bb_delete (b->index);
92610d565efSmrg 
927*0fc04c29Smrg   if (forward_edge_locus)
92810d565efSmrg     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
92910d565efSmrg 
93010d565efSmrg   if (dump_file)
93110d565efSmrg     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
93210d565efSmrg }
93310d565efSmrg 
93410d565efSmrg 
93510d565efSmrg /* Return true when block A and B can be merged.  */
93610d565efSmrg 
93710d565efSmrg static bool
93810d565efSmrg rtl_can_merge_blocks (basic_block a, basic_block b)
93910d565efSmrg {
94010d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
94110d565efSmrg      mess up unconditional or indirect jumps that cross between hot
94210d565efSmrg      and cold sections.
94310d565efSmrg 
94410d565efSmrg      Basic block partitioning may result in some jumps that appear to
94510d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
94610d565efSmrg      must be left untouched (they are required to make it safely across
94710d565efSmrg      partition boundaries).  See  the comments at the top of
94810d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
94910d565efSmrg 
95010d565efSmrg   if (BB_PARTITION (a) != BB_PARTITION (b))
95110d565efSmrg     return false;
95210d565efSmrg 
95310d565efSmrg   /* Protect the loop latches.  */
95410d565efSmrg   if (current_loops && b->loop_father->latch == b)
95510d565efSmrg     return false;
95610d565efSmrg 
95710d565efSmrg   /* There must be exactly one edge in between the blocks.  */
95810d565efSmrg   return (single_succ_p (a)
95910d565efSmrg 	  && single_succ (a) == b
96010d565efSmrg 	  && single_pred_p (b)
96110d565efSmrg 	  && a != b
96210d565efSmrg 	  /* Must be simple edge.  */
96310d565efSmrg 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
96410d565efSmrg 	  && a->next_bb == b
96510d565efSmrg 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
96610d565efSmrg 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
96710d565efSmrg 	  /* If the jump insn has side effects,
96810d565efSmrg 	     we can't kill the edge.  */
96910d565efSmrg 	  && (!JUMP_P (BB_END (a))
97010d565efSmrg 	      || (reload_completed
97110d565efSmrg 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
97210d565efSmrg }
97310d565efSmrg 
97410d565efSmrg /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
97510d565efSmrg    exist.  */
97610d565efSmrg 
97710d565efSmrg rtx_code_label *
97810d565efSmrg block_label (basic_block block)
97910d565efSmrg {
98010d565efSmrg   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
98110d565efSmrg     return NULL;
98210d565efSmrg 
98310d565efSmrg   if (!LABEL_P (BB_HEAD (block)))
98410d565efSmrg     {
98510d565efSmrg       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
98610d565efSmrg     }
98710d565efSmrg 
98810d565efSmrg   return as_a <rtx_code_label *> (BB_HEAD (block));
98910d565efSmrg }
99010d565efSmrg 
991c7a68eb7Smrg /* Remove all barriers from BB_FOOTER of a BB.  */
992c7a68eb7Smrg 
993c7a68eb7Smrg static void
994c7a68eb7Smrg remove_barriers_from_footer (basic_block bb)
995c7a68eb7Smrg {
996c7a68eb7Smrg   rtx_insn *insn = BB_FOOTER (bb);
997c7a68eb7Smrg 
998c7a68eb7Smrg   /* Remove barriers but keep jumptables.  */
999c7a68eb7Smrg   while (insn)
1000c7a68eb7Smrg     {
1001c7a68eb7Smrg       if (BARRIER_P (insn))
1002c7a68eb7Smrg 	{
1003c7a68eb7Smrg 	  if (PREV_INSN (insn))
1004c7a68eb7Smrg 	    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1005c7a68eb7Smrg 	  else
1006c7a68eb7Smrg 	    BB_FOOTER (bb) = NEXT_INSN (insn);
1007c7a68eb7Smrg 	  if (NEXT_INSN (insn))
1008c7a68eb7Smrg 	    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1009c7a68eb7Smrg 	}
1010c7a68eb7Smrg       if (LABEL_P (insn))
1011c7a68eb7Smrg 	return;
1012c7a68eb7Smrg       insn = NEXT_INSN (insn);
1013c7a68eb7Smrg     }
1014c7a68eb7Smrg }
1015c7a68eb7Smrg 
101610d565efSmrg /* Attempt to perform edge redirection by replacing possibly complex jump
101710d565efSmrg    instruction by unconditional jump or removing jump completely.  This can
101810d565efSmrg    apply only if all edges now point to the same block.  The parameters and
101910d565efSmrg    return values are equivalent to redirect_edge_and_branch.  */
102010d565efSmrg 
102110d565efSmrg edge
102210d565efSmrg try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
102310d565efSmrg {
102410d565efSmrg   basic_block src = e->src;
102510d565efSmrg   rtx_insn *insn = BB_END (src), *kill_from;
102610d565efSmrg   rtx set;
102710d565efSmrg   int fallthru = 0;
102810d565efSmrg 
102910d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
103010d565efSmrg      mess up unconditional or indirect jumps that cross between hot
103110d565efSmrg      and cold sections.
103210d565efSmrg 
103310d565efSmrg      Basic block partitioning may result in some jumps that appear to
103410d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
103510d565efSmrg      must be left untouched (they are required to make it safely across
103610d565efSmrg      partition boundaries).  See  the comments at the top of
103710d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
103810d565efSmrg 
103910d565efSmrg   if (BB_PARTITION (src) != BB_PARTITION (target))
104010d565efSmrg     return NULL;
104110d565efSmrg 
104210d565efSmrg   /* We can replace or remove a complex jump only when we have exactly
104310d565efSmrg      two edges.  Also, if we have exactly one outgoing edge, we can
104410d565efSmrg      redirect that.  */
104510d565efSmrg   if (EDGE_COUNT (src->succs) >= 3
104610d565efSmrg       /* Verify that all targets will be TARGET.  Specifically, the
104710d565efSmrg 	 edge that is not E must also go to TARGET.  */
104810d565efSmrg       || (EDGE_COUNT (src->succs) == 2
104910d565efSmrg 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
105010d565efSmrg     return NULL;
105110d565efSmrg 
105210d565efSmrg   if (!onlyjump_p (insn))
105310d565efSmrg     return NULL;
105410d565efSmrg   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
105510d565efSmrg     return NULL;
105610d565efSmrg 
105710d565efSmrg   /* Avoid removing branch with side effects.  */
105810d565efSmrg   set = single_set (insn);
105910d565efSmrg   if (!set || side_effects_p (set))
106010d565efSmrg     return NULL;
106110d565efSmrg 
106210d565efSmrg   /* In case we zap a conditional jump, we'll need to kill
106310d565efSmrg      the cc0 setter too.  */
106410d565efSmrg   kill_from = insn;
106510d565efSmrg   if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
106610d565efSmrg       && only_sets_cc0_p (PREV_INSN (insn)))
106710d565efSmrg     kill_from = PREV_INSN (insn);
106810d565efSmrg 
106910d565efSmrg   /* See if we can create the fallthru edge.  */
107010d565efSmrg   if (in_cfglayout || can_fallthru (src, target))
107110d565efSmrg     {
107210d565efSmrg       if (dump_file)
107310d565efSmrg 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
107410d565efSmrg       fallthru = 1;
107510d565efSmrg 
107610d565efSmrg       /* Selectively unlink whole insn chain.  */
107710d565efSmrg       if (in_cfglayout)
107810d565efSmrg 	{
107910d565efSmrg 	  delete_insn_chain (kill_from, BB_END (src), false);
1080c7a68eb7Smrg 	  remove_barriers_from_footer (src);
108110d565efSmrg 	}
108210d565efSmrg       else
108310d565efSmrg 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
108410d565efSmrg 			   false);
108510d565efSmrg     }
108610d565efSmrg 
108710d565efSmrg   /* If this already is simplejump, redirect it.  */
108810d565efSmrg   else if (simplejump_p (insn))
108910d565efSmrg     {
109010d565efSmrg       if (e->dest == target)
109110d565efSmrg 	return NULL;
109210d565efSmrg       if (dump_file)
109310d565efSmrg 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
109410d565efSmrg 		 INSN_UID (insn), e->dest->index, target->index);
109510d565efSmrg       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
109610d565efSmrg 			  block_label (target), 0))
109710d565efSmrg 	{
109810d565efSmrg 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
109910d565efSmrg 	  return NULL;
110010d565efSmrg 	}
110110d565efSmrg     }
110210d565efSmrg 
110310d565efSmrg   /* Cannot do anything for target exit block.  */
110410d565efSmrg   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
110510d565efSmrg     return NULL;
110610d565efSmrg 
110710d565efSmrg   /* Or replace possibly complicated jump insn by simple jump insn.  */
110810d565efSmrg   else
110910d565efSmrg     {
111010d565efSmrg       rtx_code_label *target_label = block_label (target);
111110d565efSmrg       rtx_insn *barrier;
111210d565efSmrg       rtx_insn *label;
111310d565efSmrg       rtx_jump_table_data *table;
111410d565efSmrg 
111510d565efSmrg       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
111610d565efSmrg       JUMP_LABEL (BB_END (src)) = target_label;
111710d565efSmrg       LABEL_NUSES (target_label)++;
111810d565efSmrg       if (dump_file)
111910d565efSmrg 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
112010d565efSmrg 		 INSN_UID (insn), INSN_UID (BB_END (src)));
112110d565efSmrg 
112210d565efSmrg 
112310d565efSmrg       delete_insn_chain (kill_from, insn, false);
112410d565efSmrg 
112510d565efSmrg       /* Recognize a tablejump that we are converting to a
112610d565efSmrg 	 simple jump and remove its associated CODE_LABEL
112710d565efSmrg 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
112810d565efSmrg       if (tablejump_p (insn, &label, &table))
112910d565efSmrg 	delete_insn_chain (label, table, false);
113010d565efSmrg 
1131c7a68eb7Smrg       barrier = next_nonnote_nondebug_insn (BB_END (src));
113210d565efSmrg       if (!barrier || !BARRIER_P (barrier))
113310d565efSmrg 	emit_barrier_after (BB_END (src));
113410d565efSmrg       else
113510d565efSmrg 	{
113610d565efSmrg 	  if (barrier != NEXT_INSN (BB_END (src)))
113710d565efSmrg 	    {
113810d565efSmrg 	      /* Move the jump before barrier so that the notes
113910d565efSmrg 		 which originally were or were created before jump table are
114010d565efSmrg 		 inside the basic block.  */
114110d565efSmrg 	      rtx_insn *new_insn = BB_END (src);
114210d565efSmrg 
114310d565efSmrg 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
114410d565efSmrg 				        PREV_INSN (barrier), src);
114510d565efSmrg 
114610d565efSmrg 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
114710d565efSmrg 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
114810d565efSmrg 
114910d565efSmrg 	      SET_NEXT_INSN (new_insn) = barrier;
115010d565efSmrg 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
115110d565efSmrg 
115210d565efSmrg 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
115310d565efSmrg 	      SET_PREV_INSN (barrier) = new_insn;
115410d565efSmrg 	    }
115510d565efSmrg 	}
115610d565efSmrg     }
115710d565efSmrg 
115810d565efSmrg   /* Keep only one edge out and set proper flags.  */
115910d565efSmrg   if (!single_succ_p (src))
116010d565efSmrg     remove_edge (e);
116110d565efSmrg   gcc_assert (single_succ_p (src));
116210d565efSmrg 
116310d565efSmrg   e = single_succ_edge (src);
116410d565efSmrg   if (fallthru)
116510d565efSmrg     e->flags = EDGE_FALLTHRU;
116610d565efSmrg   else
116710d565efSmrg     e->flags = 0;
116810d565efSmrg 
1169c7a68eb7Smrg   e->probability = profile_probability::always ();
117010d565efSmrg 
117110d565efSmrg   if (e->dest != target)
117210d565efSmrg     redirect_edge_succ (e, target);
117310d565efSmrg   return e;
117410d565efSmrg }
117510d565efSmrg 
117610d565efSmrg /* Subroutine of redirect_branch_edge that tries to patch the jump
117710d565efSmrg    instruction INSN so that it reaches block NEW.  Do this
117810d565efSmrg    only when it originally reached block OLD.  Return true if this
117910d565efSmrg    worked or the original target wasn't OLD, return false if redirection
118010d565efSmrg    doesn't work.  */
118110d565efSmrg 
118210d565efSmrg static bool
118310d565efSmrg patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
118410d565efSmrg {
118510d565efSmrg   rtx_jump_table_data *table;
118610d565efSmrg   rtx tmp;
118710d565efSmrg   /* Recognize a tablejump and adjust all matching cases.  */
118810d565efSmrg   if (tablejump_p (insn, NULL, &table))
118910d565efSmrg     {
119010d565efSmrg       rtvec vec;
119110d565efSmrg       int j;
119210d565efSmrg       rtx_code_label *new_label = block_label (new_bb);
119310d565efSmrg 
119410d565efSmrg       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
119510d565efSmrg 	return false;
119610d565efSmrg       vec = table->get_labels ();
119710d565efSmrg 
119810d565efSmrg       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
119910d565efSmrg 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
120010d565efSmrg 	  {
120110d565efSmrg 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
120210d565efSmrg 	    --LABEL_NUSES (old_label);
120310d565efSmrg 	    ++LABEL_NUSES (new_label);
120410d565efSmrg 	  }
120510d565efSmrg 
120610d565efSmrg       /* Handle casesi dispatch insns.  */
120710d565efSmrg       if ((tmp = single_set (insn)) != NULL
120810d565efSmrg 	  && SET_DEST (tmp) == pc_rtx
120910d565efSmrg 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
121010d565efSmrg 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
121110d565efSmrg 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
121210d565efSmrg 	{
121310d565efSmrg 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
121410d565efSmrg 						       new_label);
121510d565efSmrg 	  --LABEL_NUSES (old_label);
121610d565efSmrg 	  ++LABEL_NUSES (new_label);
121710d565efSmrg 	}
121810d565efSmrg     }
121910d565efSmrg   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
122010d565efSmrg     {
122110d565efSmrg       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
122210d565efSmrg       rtx note;
122310d565efSmrg 
122410d565efSmrg       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
122510d565efSmrg 	return false;
122610d565efSmrg       rtx_code_label *new_label = block_label (new_bb);
122710d565efSmrg 
122810d565efSmrg       for (i = 0; i < n; ++i)
122910d565efSmrg 	{
123010d565efSmrg 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
123110d565efSmrg 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
123210d565efSmrg 	  if (XEXP (old_ref, 0) == old_label)
123310d565efSmrg 	    {
123410d565efSmrg 	      ASM_OPERANDS_LABEL (tmp, i)
123510d565efSmrg 		= gen_rtx_LABEL_REF (Pmode, new_label);
123610d565efSmrg 	      --LABEL_NUSES (old_label);
123710d565efSmrg 	      ++LABEL_NUSES (new_label);
123810d565efSmrg 	    }
123910d565efSmrg 	}
124010d565efSmrg 
124110d565efSmrg       if (JUMP_LABEL (insn) == old_label)
124210d565efSmrg 	{
124310d565efSmrg 	  JUMP_LABEL (insn) = new_label;
124410d565efSmrg 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
124510d565efSmrg 	  if (note)
124610d565efSmrg 	    remove_note (insn, note);
124710d565efSmrg 	}
124810d565efSmrg       else
124910d565efSmrg 	{
125010d565efSmrg 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
125110d565efSmrg 	  if (note)
125210d565efSmrg 	    remove_note (insn, note);
125310d565efSmrg 	  if (JUMP_LABEL (insn) != new_label
125410d565efSmrg 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
125510d565efSmrg 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
125610d565efSmrg 	}
125710d565efSmrg       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
125810d565efSmrg 	     != NULL_RTX)
125910d565efSmrg 	XEXP (note, 0) = new_label;
126010d565efSmrg     }
126110d565efSmrg   else
126210d565efSmrg     {
126310d565efSmrg       /* ?? We may play the games with moving the named labels from
126410d565efSmrg 	 one basic block to the other in case only one computed_jump is
126510d565efSmrg 	 available.  */
126610d565efSmrg       if (computed_jump_p (insn)
126710d565efSmrg 	  /* A return instruction can't be redirected.  */
126810d565efSmrg 	  || returnjump_p (insn))
126910d565efSmrg 	return false;
127010d565efSmrg 
127110d565efSmrg       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
127210d565efSmrg 	{
127310d565efSmrg 	  /* If the insn doesn't go where we think, we're confused.  */
127410d565efSmrg 	  gcc_assert (JUMP_LABEL (insn) == old_label);
127510d565efSmrg 
127610d565efSmrg 	  /* If the substitution doesn't succeed, die.  This can happen
127710d565efSmrg 	     if the back end emitted unrecognizable instructions or if
1278c7a68eb7Smrg 	     target is exit block on some arches.  Or for crossing
1279c7a68eb7Smrg 	     jumps.  */
128010d565efSmrg 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
128110d565efSmrg 			      block_label (new_bb), 0))
128210d565efSmrg 	    {
1283c7a68eb7Smrg 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1284c7a68eb7Smrg 			  || CROSSING_JUMP_P (insn));
128510d565efSmrg 	      return false;
128610d565efSmrg 	    }
128710d565efSmrg 	}
128810d565efSmrg     }
128910d565efSmrg   return true;
129010d565efSmrg }
129110d565efSmrg 
129210d565efSmrg 
129310d565efSmrg /* Redirect edge representing branch of (un)conditional jump or tablejump,
129410d565efSmrg    NULL on failure  */
129510d565efSmrg static edge
129610d565efSmrg redirect_branch_edge (edge e, basic_block target)
129710d565efSmrg {
129810d565efSmrg   rtx_insn *old_label = BB_HEAD (e->dest);
129910d565efSmrg   basic_block src = e->src;
130010d565efSmrg   rtx_insn *insn = BB_END (src);
130110d565efSmrg 
130210d565efSmrg   /* We can only redirect non-fallthru edges of jump insn.  */
130310d565efSmrg   if (e->flags & EDGE_FALLTHRU)
130410d565efSmrg     return NULL;
130510d565efSmrg   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
130610d565efSmrg     return NULL;
130710d565efSmrg 
130810d565efSmrg   if (!currently_expanding_to_rtl)
130910d565efSmrg     {
131010d565efSmrg       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
131110d565efSmrg 	return NULL;
131210d565efSmrg     }
131310d565efSmrg   else
131410d565efSmrg     /* When expanding this BB might actually contain multiple
131510d565efSmrg        jumps (i.e. not yet split by find_many_sub_basic_blocks).
131610d565efSmrg        Redirect all of those that match our label.  */
131710d565efSmrg     FOR_BB_INSNS (src, insn)
131810d565efSmrg       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
131910d565efSmrg 					     old_label, target))
132010d565efSmrg 	return NULL;
132110d565efSmrg 
132210d565efSmrg   if (dump_file)
132310d565efSmrg     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
132410d565efSmrg 	     e->src->index, e->dest->index, target->index);
132510d565efSmrg 
132610d565efSmrg   if (e->dest != target)
132710d565efSmrg     e = redirect_edge_succ_nodup (e, target);
132810d565efSmrg 
132910d565efSmrg   return e;
133010d565efSmrg }
133110d565efSmrg 
133210d565efSmrg /* Called when edge E has been redirected to a new destination,
133310d565efSmrg    in order to update the region crossing flag on the edge and
133410d565efSmrg    jump.  */
133510d565efSmrg 
133610d565efSmrg static void
133710d565efSmrg fixup_partition_crossing (edge e)
133810d565efSmrg {
133910d565efSmrg   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
134010d565efSmrg       == EXIT_BLOCK_PTR_FOR_FN (cfun))
134110d565efSmrg     return;
134210d565efSmrg   /* If we redirected an existing edge, it may already be marked
134310d565efSmrg      crossing, even though the new src is missing a reg crossing note.
134410d565efSmrg      But make sure reg crossing note doesn't already exist before
134510d565efSmrg      inserting.  */
134610d565efSmrg   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
134710d565efSmrg     {
134810d565efSmrg       e->flags |= EDGE_CROSSING;
1349c7a68eb7Smrg       if (JUMP_P (BB_END (e->src)))
135010d565efSmrg 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
135110d565efSmrg     }
135210d565efSmrg   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
135310d565efSmrg     {
135410d565efSmrg       e->flags &= ~EDGE_CROSSING;
135510d565efSmrg       /* Remove the section crossing note from jump at end of
135610d565efSmrg          src if it exists, and if no other successors are
135710d565efSmrg          still crossing.  */
135810d565efSmrg       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
135910d565efSmrg         {
136010d565efSmrg           bool has_crossing_succ = false;
136110d565efSmrg           edge e2;
136210d565efSmrg           edge_iterator ei;
136310d565efSmrg           FOR_EACH_EDGE (e2, ei, e->src->succs)
136410d565efSmrg             {
136510d565efSmrg               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
136610d565efSmrg               if (has_crossing_succ)
136710d565efSmrg                 break;
136810d565efSmrg             }
136910d565efSmrg           if (!has_crossing_succ)
137010d565efSmrg 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
137110d565efSmrg         }
137210d565efSmrg     }
137310d565efSmrg }
137410d565efSmrg 
137510d565efSmrg /* Called when block BB has been reassigned to the cold partition,
137610d565efSmrg    because it is now dominated by another cold block,
137710d565efSmrg    to ensure that the region crossing attributes are updated.  */
137810d565efSmrg 
137910d565efSmrg static void
138010d565efSmrg fixup_new_cold_bb (basic_block bb)
138110d565efSmrg {
138210d565efSmrg   edge e;
138310d565efSmrg   edge_iterator ei;
138410d565efSmrg 
138510d565efSmrg   /* This is called when a hot bb is found to now be dominated
138610d565efSmrg      by a cold bb and therefore needs to become cold. Therefore,
138710d565efSmrg      its preds will no longer be region crossing. Any non-dominating
138810d565efSmrg      preds that were previously hot would also have become cold
138910d565efSmrg      in the caller for the same region. Any preds that were previously
139010d565efSmrg      region-crossing will be adjusted in fixup_partition_crossing.  */
139110d565efSmrg   FOR_EACH_EDGE (e, ei, bb->preds)
139210d565efSmrg     {
139310d565efSmrg       fixup_partition_crossing (e);
139410d565efSmrg     }
139510d565efSmrg 
139610d565efSmrg   /* Possibly need to make bb's successor edges region crossing,
139710d565efSmrg      or remove stale region crossing.  */
139810d565efSmrg   FOR_EACH_EDGE (e, ei, bb->succs)
139910d565efSmrg     {
140010d565efSmrg       /* We can't have fall-through edges across partition boundaries.
140110d565efSmrg          Note that force_nonfallthru will do any necessary partition
140210d565efSmrg          boundary fixup by calling fixup_partition_crossing itself.  */
140310d565efSmrg       if ((e->flags & EDGE_FALLTHRU)
140410d565efSmrg           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
140510d565efSmrg 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
140610d565efSmrg         force_nonfallthru (e);
140710d565efSmrg       else
140810d565efSmrg         fixup_partition_crossing (e);
140910d565efSmrg     }
141010d565efSmrg }
141110d565efSmrg 
141210d565efSmrg /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
141310d565efSmrg    expense of adding new instructions or reordering basic blocks.
141410d565efSmrg 
141510d565efSmrg    Function can be also called with edge destination equivalent to the TARGET.
141610d565efSmrg    Then it should try the simplifications and do nothing if none is possible.
141710d565efSmrg 
141810d565efSmrg    Return edge representing the branch if transformation succeeded.  Return NULL
141910d565efSmrg    on failure.
142010d565efSmrg    We still return NULL in case E already destinated TARGET and we didn't
142110d565efSmrg    managed to simplify instruction stream.  */
142210d565efSmrg 
142310d565efSmrg static edge
142410d565efSmrg rtl_redirect_edge_and_branch (edge e, basic_block target)
142510d565efSmrg {
142610d565efSmrg   edge ret;
142710d565efSmrg   basic_block src = e->src;
142810d565efSmrg   basic_block dest = e->dest;
142910d565efSmrg 
143010d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
143110d565efSmrg     return NULL;
143210d565efSmrg 
143310d565efSmrg   if (dest == target)
143410d565efSmrg     return e;
143510d565efSmrg 
143610d565efSmrg   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
143710d565efSmrg     {
143810d565efSmrg       df_set_bb_dirty (src);
143910d565efSmrg       fixup_partition_crossing (ret);
144010d565efSmrg       return ret;
144110d565efSmrg     }
144210d565efSmrg 
144310d565efSmrg   ret = redirect_branch_edge (e, target);
144410d565efSmrg   if (!ret)
144510d565efSmrg     return NULL;
144610d565efSmrg 
144710d565efSmrg   df_set_bb_dirty (src);
144810d565efSmrg   fixup_partition_crossing (ret);
144910d565efSmrg   return ret;
145010d565efSmrg }
145110d565efSmrg 
145210d565efSmrg /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
145310d565efSmrg 
145410d565efSmrg void
145510d565efSmrg emit_barrier_after_bb (basic_block bb)
145610d565efSmrg {
145710d565efSmrg   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
145810d565efSmrg   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
145910d565efSmrg               || current_ir_type () == IR_RTL_CFGLAYOUT);
146010d565efSmrg   if (current_ir_type () == IR_RTL_CFGLAYOUT)
146110d565efSmrg     {
146210d565efSmrg       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
146310d565efSmrg 
146410d565efSmrg       if (BB_FOOTER (bb))
146510d565efSmrg 	{
146610d565efSmrg           rtx_insn *footer_tail = BB_FOOTER (bb);
146710d565efSmrg 
146810d565efSmrg           while (NEXT_INSN (footer_tail))
146910d565efSmrg             footer_tail = NEXT_INSN (footer_tail);
147010d565efSmrg           if (!BARRIER_P (footer_tail))
147110d565efSmrg             {
147210d565efSmrg               SET_NEXT_INSN (footer_tail) = insn;
147310d565efSmrg               SET_PREV_INSN (insn) = footer_tail;
147410d565efSmrg             }
147510d565efSmrg 	}
147610d565efSmrg       else
147710d565efSmrg         BB_FOOTER (bb) = insn;
147810d565efSmrg     }
147910d565efSmrg }
148010d565efSmrg 
148110d565efSmrg /* Like force_nonfallthru below, but additionally performs redirection
148210d565efSmrg    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
148310d565efSmrg    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
148410d565efSmrg    simple_return_rtx, indicating which kind of returnjump to create.
148510d565efSmrg    It should be NULL otherwise.  */
148610d565efSmrg 
148710d565efSmrg basic_block
148810d565efSmrg force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
148910d565efSmrg {
149010d565efSmrg   basic_block jump_block, new_bb = NULL, src = e->src;
149110d565efSmrg   rtx note;
149210d565efSmrg   edge new_edge;
149310d565efSmrg   int abnormal_edge_flags = 0;
149410d565efSmrg   bool asm_goto_edge = false;
149510d565efSmrg   int loc;
149610d565efSmrg 
149710d565efSmrg   /* In the case the last instruction is conditional jump to the next
149810d565efSmrg      instruction, first redirect the jump itself and then continue
149910d565efSmrg      by creating a basic block afterwards to redirect fallthru edge.  */
150010d565efSmrg   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
150110d565efSmrg       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
150210d565efSmrg       && any_condjump_p (BB_END (e->src))
150310d565efSmrg       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
150410d565efSmrg     {
150510d565efSmrg       rtx note;
150610d565efSmrg       edge b = unchecked_make_edge (e->src, target, 0);
150710d565efSmrg       bool redirected;
150810d565efSmrg 
150910d565efSmrg       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
151010d565efSmrg 				  block_label (target), 0);
151110d565efSmrg       gcc_assert (redirected);
151210d565efSmrg 
151310d565efSmrg       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
151410d565efSmrg       if (note)
151510d565efSmrg 	{
151610d565efSmrg 	  int prob = XINT (note, 0);
151710d565efSmrg 
1518c7a68eb7Smrg 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
151910d565efSmrg 	  e->probability -= e->probability;
152010d565efSmrg 	}
152110d565efSmrg     }
152210d565efSmrg 
152310d565efSmrg   if (e->flags & EDGE_ABNORMAL)
152410d565efSmrg     {
152510d565efSmrg       /* Irritating special case - fallthru edge to the same block as abnormal
152610d565efSmrg 	 edge.
152710d565efSmrg 	 We can't redirect abnormal edge, but we still can split the fallthru
152810d565efSmrg 	 one and create separate abnormal edge to original destination.
152910d565efSmrg 	 This allows bb-reorder to make such edge non-fallthru.  */
153010d565efSmrg       gcc_assert (e->dest == target);
153110d565efSmrg       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
153210d565efSmrg       e->flags &= EDGE_FALLTHRU;
153310d565efSmrg     }
153410d565efSmrg   else
153510d565efSmrg     {
153610d565efSmrg       gcc_assert (e->flags & EDGE_FALLTHRU);
153710d565efSmrg       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
153810d565efSmrg 	{
153910d565efSmrg 	  /* We can't redirect the entry block.  Create an empty block
154010d565efSmrg 	     at the start of the function which we use to add the new
154110d565efSmrg 	     jump.  */
154210d565efSmrg 	  edge tmp;
154310d565efSmrg 	  edge_iterator ei;
154410d565efSmrg 	  bool found = false;
154510d565efSmrg 
154610d565efSmrg 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
154710d565efSmrg 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1548c7a68eb7Smrg 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
154910d565efSmrg 
155010d565efSmrg 	  /* Make sure new block ends up in correct hot/cold section.  */
155110d565efSmrg 	  BB_COPY_PARTITION (bb, e->dest);
155210d565efSmrg 
155310d565efSmrg 	  /* Change the existing edge's source to be the new block, and add
155410d565efSmrg 	     a new edge from the entry block to the new block.  */
155510d565efSmrg 	  e->src = bb;
155610d565efSmrg 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
155710d565efSmrg 	       (tmp = ei_safe_edge (ei)); )
155810d565efSmrg 	    {
155910d565efSmrg 	      if (tmp == e)
156010d565efSmrg 		{
156110d565efSmrg 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
156210d565efSmrg 		  found = true;
156310d565efSmrg 		  break;
156410d565efSmrg 		}
156510d565efSmrg 	      else
156610d565efSmrg 		ei_next (&ei);
156710d565efSmrg 	    }
156810d565efSmrg 
156910d565efSmrg 	  gcc_assert (found);
157010d565efSmrg 
157110d565efSmrg 	  vec_safe_push (bb->succs, e);
157210d565efSmrg 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
157310d565efSmrg 				 EDGE_FALLTHRU);
157410d565efSmrg 	}
157510d565efSmrg     }
157610d565efSmrg 
157710d565efSmrg   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
157810d565efSmrg      don't point to the target or fallthru label.  */
157910d565efSmrg   if (JUMP_P (BB_END (e->src))
158010d565efSmrg       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
158110d565efSmrg       && (e->flags & EDGE_FALLTHRU)
158210d565efSmrg       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
158310d565efSmrg     {
158410d565efSmrg       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
158510d565efSmrg       bool adjust_jump_target = false;
158610d565efSmrg 
158710d565efSmrg       for (i = 0; i < n; ++i)
158810d565efSmrg 	{
158910d565efSmrg 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
159010d565efSmrg 	    {
159110d565efSmrg 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
159210d565efSmrg 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
159310d565efSmrg 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
159410d565efSmrg 	      adjust_jump_target = true;
159510d565efSmrg 	    }
159610d565efSmrg 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
159710d565efSmrg 	    asm_goto_edge = true;
159810d565efSmrg 	}
159910d565efSmrg       if (adjust_jump_target)
160010d565efSmrg 	{
160110d565efSmrg 	  rtx_insn *insn = BB_END (e->src);
160210d565efSmrg 	  rtx note;
160310d565efSmrg 	  rtx_insn *old_label = BB_HEAD (e->dest);
160410d565efSmrg 	  rtx_insn *new_label = BB_HEAD (target);
160510d565efSmrg 
160610d565efSmrg 	  if (JUMP_LABEL (insn) == old_label)
160710d565efSmrg 	    {
160810d565efSmrg 	      JUMP_LABEL (insn) = new_label;
160910d565efSmrg 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
161010d565efSmrg 	      if (note)
161110d565efSmrg 		remove_note (insn, note);
161210d565efSmrg 	    }
161310d565efSmrg 	  else
161410d565efSmrg 	    {
161510d565efSmrg 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
161610d565efSmrg 	      if (note)
161710d565efSmrg 		remove_note (insn, note);
161810d565efSmrg 	      if (JUMP_LABEL (insn) != new_label
161910d565efSmrg 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
162010d565efSmrg 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
162110d565efSmrg 	    }
162210d565efSmrg 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
162310d565efSmrg 		 != NULL_RTX)
162410d565efSmrg 	    XEXP (note, 0) = new_label;
162510d565efSmrg 	}
162610d565efSmrg     }
162710d565efSmrg 
162810d565efSmrg   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
162910d565efSmrg     {
163010d565efSmrg       rtx_insn *new_head;
1631c7a68eb7Smrg       profile_count count = e->count ();
1632c7a68eb7Smrg       profile_probability probability = e->probability;
163310d565efSmrg       /* Create the new structures.  */
163410d565efSmrg 
163510d565efSmrg       /* If the old block ended with a tablejump, skip its table
163610d565efSmrg 	 by searching forward from there.  Otherwise start searching
163710d565efSmrg 	 forward from the last instruction of the old block.  */
163810d565efSmrg       rtx_jump_table_data *table;
163910d565efSmrg       if (tablejump_p (BB_END (e->src), NULL, &table))
164010d565efSmrg 	new_head = table;
164110d565efSmrg       else
164210d565efSmrg 	new_head = BB_END (e->src);
164310d565efSmrg       new_head = NEXT_INSN (new_head);
164410d565efSmrg 
164510d565efSmrg       jump_block = create_basic_block (new_head, NULL, e->src);
164610d565efSmrg       jump_block->count = count;
164710d565efSmrg 
164810d565efSmrg       /* Make sure new block ends up in correct hot/cold section.  */
164910d565efSmrg 
165010d565efSmrg       BB_COPY_PARTITION (jump_block, e->src);
165110d565efSmrg 
165210d565efSmrg       /* Wire edge in.  */
165310d565efSmrg       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
165410d565efSmrg       new_edge->probability = probability;
165510d565efSmrg 
165610d565efSmrg       /* Redirect old edge.  */
165710d565efSmrg       redirect_edge_pred (e, jump_block);
1658c7a68eb7Smrg       e->probability = profile_probability::always ();
165910d565efSmrg 
166010d565efSmrg       /* If e->src was previously region crossing, it no longer is
166110d565efSmrg          and the reg crossing note should be removed.  */
166210d565efSmrg       fixup_partition_crossing (new_edge);
166310d565efSmrg 
166410d565efSmrg       /* If asm goto has any label refs to target's label,
166510d565efSmrg 	 add also edge from asm goto bb to target.  */
166610d565efSmrg       if (asm_goto_edge)
166710d565efSmrg 	{
1668c7a68eb7Smrg 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1669c7a68eb7Smrg 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1670c7a68eb7Smrg 	  edge new_edge2 = make_edge (new_edge->src, target,
167110d565efSmrg 				      e->flags & ~EDGE_FALLTHRU);
1672c7a68eb7Smrg 	  new_edge2->probability = probability - new_edge->probability;
167310d565efSmrg 	}
167410d565efSmrg 
167510d565efSmrg       new_bb = jump_block;
167610d565efSmrg     }
167710d565efSmrg   else
167810d565efSmrg     jump_block = e->src;
167910d565efSmrg 
168010d565efSmrg   loc = e->goto_locus;
168110d565efSmrg   e->flags &= ~EDGE_FALLTHRU;
168210d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
168310d565efSmrg     {
168410d565efSmrg       if (jump_label == ret_rtx)
168510d565efSmrg 	emit_jump_insn_after_setloc (targetm.gen_return (),
168610d565efSmrg 				     BB_END (jump_block), loc);
168710d565efSmrg       else
168810d565efSmrg 	{
168910d565efSmrg 	  gcc_assert (jump_label == simple_return_rtx);
169010d565efSmrg 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
169110d565efSmrg 				       BB_END (jump_block), loc);
169210d565efSmrg 	}
169310d565efSmrg       set_return_jump_label (BB_END (jump_block));
169410d565efSmrg     }
169510d565efSmrg   else
169610d565efSmrg     {
169710d565efSmrg       rtx_code_label *label = block_label (target);
169810d565efSmrg       emit_jump_insn_after_setloc (targetm.gen_jump (label),
169910d565efSmrg 				   BB_END (jump_block), loc);
170010d565efSmrg       JUMP_LABEL (BB_END (jump_block)) = label;
170110d565efSmrg       LABEL_NUSES (label)++;
170210d565efSmrg     }
170310d565efSmrg 
170410d565efSmrg   /* We might be in cfg layout mode, and if so, the following routine will
170510d565efSmrg      insert the barrier correctly.  */
170610d565efSmrg   emit_barrier_after_bb (jump_block);
170710d565efSmrg   redirect_edge_succ_nodup (e, target);
170810d565efSmrg 
170910d565efSmrg   if (abnormal_edge_flags)
171010d565efSmrg     make_edge (src, target, abnormal_edge_flags);
171110d565efSmrg 
171210d565efSmrg   df_mark_solutions_dirty ();
171310d565efSmrg   fixup_partition_crossing (e);
171410d565efSmrg   return new_bb;
171510d565efSmrg }
171610d565efSmrg 
171710d565efSmrg /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
171810d565efSmrg    (and possibly create new basic block) to make edge non-fallthru.
171910d565efSmrg    Return newly created BB or NULL if none.  */
172010d565efSmrg 
172110d565efSmrg static basic_block
172210d565efSmrg rtl_force_nonfallthru (edge e)
172310d565efSmrg {
172410d565efSmrg   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
172510d565efSmrg }
172610d565efSmrg 
172710d565efSmrg /* Redirect edge even at the expense of creating new jump insn or
172810d565efSmrg    basic block.  Return new basic block if created, NULL otherwise.
172910d565efSmrg    Conversion must be possible.  */
173010d565efSmrg 
173110d565efSmrg static basic_block
173210d565efSmrg rtl_redirect_edge_and_branch_force (edge e, basic_block target)
173310d565efSmrg {
173410d565efSmrg   if (redirect_edge_and_branch (e, target)
173510d565efSmrg       || e->dest == target)
173610d565efSmrg     return NULL;
173710d565efSmrg 
173810d565efSmrg   /* In case the edge redirection failed, try to force it to be non-fallthru
173910d565efSmrg      and redirect newly created simplejump.  */
174010d565efSmrg   df_set_bb_dirty (e->src);
174110d565efSmrg   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
174210d565efSmrg }
174310d565efSmrg 
174410d565efSmrg /* The given edge should potentially be a fallthru edge.  If that is in
174510d565efSmrg    fact true, delete the jump and barriers that are in the way.  */
174610d565efSmrg 
174710d565efSmrg static void
174810d565efSmrg rtl_tidy_fallthru_edge (edge e)
174910d565efSmrg {
175010d565efSmrg   rtx_insn *q;
175110d565efSmrg   basic_block b = e->src, c = b->next_bb;
175210d565efSmrg 
175310d565efSmrg   /* ??? In a late-running flow pass, other folks may have deleted basic
175410d565efSmrg      blocks by nopping out blocks, leaving multiple BARRIERs between here
175510d565efSmrg      and the target label. They ought to be chastised and fixed.
175610d565efSmrg 
175710d565efSmrg      We can also wind up with a sequence of undeletable labels between
175810d565efSmrg      one block and the next.
175910d565efSmrg 
176010d565efSmrg      So search through a sequence of barriers, labels, and notes for
176110d565efSmrg      the head of block C and assert that we really do fall through.  */
176210d565efSmrg 
176310d565efSmrg   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1764c7a68eb7Smrg     if (NONDEBUG_INSN_P (q))
176510d565efSmrg       return;
176610d565efSmrg 
176710d565efSmrg   /* Remove what will soon cease being the jump insn from the source block.
176810d565efSmrg      If block B consisted only of this single jump, turn it into a deleted
176910d565efSmrg      note.  */
177010d565efSmrg   q = BB_END (b);
177110d565efSmrg   if (JUMP_P (q)
177210d565efSmrg       && onlyjump_p (q)
177310d565efSmrg       && (any_uncondjump_p (q)
177410d565efSmrg 	  || single_succ_p (b)))
177510d565efSmrg     {
177610d565efSmrg       rtx_insn *label;
177710d565efSmrg       rtx_jump_table_data *table;
177810d565efSmrg 
177910d565efSmrg       if (tablejump_p (q, &label, &table))
178010d565efSmrg 	{
178110d565efSmrg 	  /* The label is likely mentioned in some instruction before
178210d565efSmrg 	     the tablejump and might not be DCEd, so turn it into
178310d565efSmrg 	     a note instead and move before the tablejump that is going to
178410d565efSmrg 	     be deleted.  */
178510d565efSmrg 	  const char *name = LABEL_NAME (label);
178610d565efSmrg 	  PUT_CODE (label, NOTE);
178710d565efSmrg 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
178810d565efSmrg 	  NOTE_DELETED_LABEL_NAME (label) = name;
178910d565efSmrg 	  reorder_insns (label, label, PREV_INSN (q));
179010d565efSmrg 	  delete_insn (table);
179110d565efSmrg 	}
179210d565efSmrg 
179310d565efSmrg       /* If this was a conditional jump, we need to also delete
179410d565efSmrg 	 the insn that set cc0.  */
179510d565efSmrg       if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
179610d565efSmrg 	q = PREV_INSN (q);
179710d565efSmrg 
179810d565efSmrg       q = PREV_INSN (q);
179910d565efSmrg     }
180010d565efSmrg   /* Unconditional jumps with side-effects (i.e. which we can't just delete
180110d565efSmrg      together with the barrier) should never have a fallthru edge.  */
180210d565efSmrg   else if (JUMP_P (q) && any_uncondjump_p (q))
180310d565efSmrg     return;
180410d565efSmrg 
180510d565efSmrg   /* Selectively unlink the sequence.  */
180610d565efSmrg   if (q != PREV_INSN (BB_HEAD (c)))
180710d565efSmrg     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
180810d565efSmrg 
180910d565efSmrg   e->flags |= EDGE_FALLTHRU;
181010d565efSmrg }
181110d565efSmrg 
181210d565efSmrg /* Should move basic block BB after basic block AFTER.  NIY.  */
181310d565efSmrg 
181410d565efSmrg static bool
181510d565efSmrg rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
181610d565efSmrg 		      basic_block after ATTRIBUTE_UNUSED)
181710d565efSmrg {
181810d565efSmrg   return false;
181910d565efSmrg }
182010d565efSmrg 
182110d565efSmrg /* Locate the last bb in the same partition as START_BB.  */
182210d565efSmrg 
182310d565efSmrg static basic_block
182410d565efSmrg last_bb_in_partition (basic_block start_bb)
182510d565efSmrg {
182610d565efSmrg   basic_block bb;
182710d565efSmrg   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
182810d565efSmrg     {
182910d565efSmrg       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
183010d565efSmrg         return bb;
183110d565efSmrg     }
183210d565efSmrg   /* Return bb before the exit block.  */
183310d565efSmrg   return bb->prev_bb;
183410d565efSmrg }
183510d565efSmrg 
183610d565efSmrg /* Split a (typically critical) edge.  Return the new block.
183710d565efSmrg    The edge must not be abnormal.
183810d565efSmrg 
183910d565efSmrg    ??? The code generally expects to be called on critical edges.
184010d565efSmrg    The case of a block ending in an unconditional jump to a
184110d565efSmrg    block with multiple predecessors is not handled optimally.  */
184210d565efSmrg 
184310d565efSmrg static basic_block
184410d565efSmrg rtl_split_edge (edge edge_in)
184510d565efSmrg {
184610d565efSmrg   basic_block bb, new_bb;
184710d565efSmrg   rtx_insn *before;
184810d565efSmrg 
184910d565efSmrg   /* Abnormal edges cannot be split.  */
185010d565efSmrg   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
185110d565efSmrg 
185210d565efSmrg   /* We are going to place the new block in front of edge destination.
185310d565efSmrg      Avoid existence of fallthru predecessors.  */
185410d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
185510d565efSmrg     {
185610d565efSmrg       edge e = find_fallthru_edge (edge_in->dest->preds);
185710d565efSmrg 
185810d565efSmrg       if (e)
185910d565efSmrg 	force_nonfallthru (e);
186010d565efSmrg     }
186110d565efSmrg 
186210d565efSmrg   /* Create the basic block note.  */
186310d565efSmrg   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
186410d565efSmrg     before = BB_HEAD (edge_in->dest);
186510d565efSmrg   else
186610d565efSmrg     before = NULL;
186710d565efSmrg 
186810d565efSmrg   /* If this is a fall through edge to the exit block, the blocks might be
186910d565efSmrg      not adjacent, and the right place is after the source.  */
187010d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU)
187110d565efSmrg       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
187210d565efSmrg     {
187310d565efSmrg       before = NEXT_INSN (BB_END (edge_in->src));
187410d565efSmrg       bb = create_basic_block (before, NULL, edge_in->src);
187510d565efSmrg       BB_COPY_PARTITION (bb, edge_in->src);
187610d565efSmrg     }
187710d565efSmrg   else
187810d565efSmrg     {
187910d565efSmrg       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
188010d565efSmrg         {
188110d565efSmrg           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
188210d565efSmrg           BB_COPY_PARTITION (bb, edge_in->dest);
188310d565efSmrg         }
188410d565efSmrg       else
188510d565efSmrg         {
188610d565efSmrg           basic_block after = edge_in->dest->prev_bb;
188710d565efSmrg           /* If this is post-bb reordering, and the edge crosses a partition
188810d565efSmrg              boundary, the new block needs to be inserted in the bb chain
188910d565efSmrg              at the end of the src partition (since we put the new bb into
189010d565efSmrg              that partition, see below). Otherwise we may end up creating
189110d565efSmrg              an extra partition crossing in the chain, which is illegal.
189210d565efSmrg              It can't go after the src, because src may have a fall-through
189310d565efSmrg              to a different block.  */
189410d565efSmrg           if (crtl->bb_reorder_complete
189510d565efSmrg               && (edge_in->flags & EDGE_CROSSING))
189610d565efSmrg             {
189710d565efSmrg               after = last_bb_in_partition (edge_in->src);
189810d565efSmrg               before = get_last_bb_insn (after);
189910d565efSmrg               /* The instruction following the last bb in partition should
190010d565efSmrg                  be a barrier, since it cannot end in a fall-through.  */
190110d565efSmrg               gcc_checking_assert (BARRIER_P (before));
190210d565efSmrg               before = NEXT_INSN (before);
190310d565efSmrg             }
190410d565efSmrg           bb = create_basic_block (before, NULL, after);
190510d565efSmrg           /* Put the split bb into the src partition, to avoid creating
190610d565efSmrg              a situation where a cold bb dominates a hot bb, in the case
190710d565efSmrg              where src is cold and dest is hot. The src will dominate
190810d565efSmrg              the new bb (whereas it might not have dominated dest).  */
190910d565efSmrg           BB_COPY_PARTITION (bb, edge_in->src);
191010d565efSmrg         }
191110d565efSmrg     }
191210d565efSmrg 
191310d565efSmrg   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
191410d565efSmrg 
191510d565efSmrg   /* Can't allow a region crossing edge to be fallthrough.  */
191610d565efSmrg   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
191710d565efSmrg       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
191810d565efSmrg     {
191910d565efSmrg       new_bb = force_nonfallthru (single_succ_edge (bb));
192010d565efSmrg       gcc_assert (!new_bb);
192110d565efSmrg     }
192210d565efSmrg 
192310d565efSmrg   /* For non-fallthru edges, we must adjust the predecessor's
192410d565efSmrg      jump instruction to target our new block.  */
192510d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
192610d565efSmrg     {
192710d565efSmrg       edge redirected = redirect_edge_and_branch (edge_in, bb);
192810d565efSmrg       gcc_assert (redirected);
192910d565efSmrg     }
193010d565efSmrg   else
193110d565efSmrg     {
193210d565efSmrg       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
193310d565efSmrg 	{
193410d565efSmrg 	  /* For asm goto even splitting of fallthru edge might
193510d565efSmrg 	     need insn patching, as other labels might point to the
193610d565efSmrg 	     old label.  */
193710d565efSmrg 	  rtx_insn *last = BB_END (edge_in->src);
193810d565efSmrg 	  if (last
193910d565efSmrg 	      && JUMP_P (last)
194010d565efSmrg 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
194110d565efSmrg 	      && (extract_asm_operands (PATTERN (last))
194210d565efSmrg 		  || JUMP_LABEL (last) == before)
194310d565efSmrg 	      && patch_jump_insn (last, before, bb))
194410d565efSmrg 	    df_set_bb_dirty (edge_in->src);
194510d565efSmrg 	}
194610d565efSmrg       redirect_edge_succ (edge_in, bb);
194710d565efSmrg     }
194810d565efSmrg 
194910d565efSmrg   return bb;
195010d565efSmrg }
195110d565efSmrg 
195210d565efSmrg /* Queue instructions for insertion on an edge between two basic blocks.
195310d565efSmrg    The new instructions and basic blocks (if any) will not appear in the
195410d565efSmrg    CFG until commit_edge_insertions is called.  */
195510d565efSmrg 
195610d565efSmrg void
195710d565efSmrg insert_insn_on_edge (rtx pattern, edge e)
195810d565efSmrg {
195910d565efSmrg   /* We cannot insert instructions on an abnormal critical edge.
196010d565efSmrg      It will be easier to find the culprit if we die now.  */
196110d565efSmrg   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
196210d565efSmrg 
196310d565efSmrg   if (e->insns.r == NULL_RTX)
196410d565efSmrg     start_sequence ();
196510d565efSmrg   else
196610d565efSmrg     push_to_sequence (e->insns.r);
196710d565efSmrg 
196810d565efSmrg   emit_insn (pattern);
196910d565efSmrg 
197010d565efSmrg   e->insns.r = get_insns ();
197110d565efSmrg   end_sequence ();
197210d565efSmrg }
197310d565efSmrg 
197410d565efSmrg /* Update the CFG for the instructions queued on edge E.  */
197510d565efSmrg 
197610d565efSmrg void
197710d565efSmrg commit_one_edge_insertion (edge e)
197810d565efSmrg {
197910d565efSmrg   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
198010d565efSmrg   basic_block bb;
198110d565efSmrg 
198210d565efSmrg   /* Pull the insns off the edge now since the edge might go away.  */
198310d565efSmrg   insns = e->insns.r;
198410d565efSmrg   e->insns.r = NULL;
198510d565efSmrg 
198610d565efSmrg   /* Figure out where to put these insns.  If the destination has
198710d565efSmrg      one predecessor, insert there.  Except for the exit block.  */
198810d565efSmrg   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
198910d565efSmrg     {
199010d565efSmrg       bb = e->dest;
199110d565efSmrg 
199210d565efSmrg       /* Get the location correct wrt a code label, and "nice" wrt
199310d565efSmrg 	 a basic block note, and before everything else.  */
199410d565efSmrg       tmp = BB_HEAD (bb);
199510d565efSmrg       if (LABEL_P (tmp))
199610d565efSmrg 	tmp = NEXT_INSN (tmp);
199710d565efSmrg       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
199810d565efSmrg 	tmp = NEXT_INSN (tmp);
199910d565efSmrg       if (tmp == BB_HEAD (bb))
200010d565efSmrg 	before = tmp;
200110d565efSmrg       else if (tmp)
200210d565efSmrg 	after = PREV_INSN (tmp);
200310d565efSmrg       else
200410d565efSmrg 	after = get_last_insn ();
200510d565efSmrg     }
200610d565efSmrg 
200710d565efSmrg   /* If the source has one successor and the edge is not abnormal,
200810d565efSmrg      insert there.  Except for the entry block.
200910d565efSmrg      Don't do this if the predecessor ends in a jump other than
201010d565efSmrg      unconditional simple jump.  E.g. for asm goto that points all
201110d565efSmrg      its labels at the fallthru basic block, we can't insert instructions
201210d565efSmrg      before the asm goto, as the asm goto can have various of side effects,
201310d565efSmrg      and can't emit instructions after the asm goto, as it must end
201410d565efSmrg      the basic block.  */
201510d565efSmrg   else if ((e->flags & EDGE_ABNORMAL) == 0
201610d565efSmrg 	   && single_succ_p (e->src)
201710d565efSmrg 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
201810d565efSmrg 	   && (!JUMP_P (BB_END (e->src))
201910d565efSmrg 	       || simplejump_p (BB_END (e->src))))
202010d565efSmrg     {
202110d565efSmrg       bb = e->src;
202210d565efSmrg 
202310d565efSmrg       /* It is possible to have a non-simple jump here.  Consider a target
202410d565efSmrg 	 where some forms of unconditional jumps clobber a register.  This
202510d565efSmrg 	 happens on the fr30 for example.
202610d565efSmrg 
202710d565efSmrg 	 We know this block has a single successor, so we can just emit
202810d565efSmrg 	 the queued insns before the jump.  */
202910d565efSmrg       if (JUMP_P (BB_END (bb)))
203010d565efSmrg 	before = BB_END (bb);
203110d565efSmrg       else
203210d565efSmrg 	{
203310d565efSmrg 	  /* We'd better be fallthru, or we've lost track of what's what.  */
203410d565efSmrg 	  gcc_assert (e->flags & EDGE_FALLTHRU);
203510d565efSmrg 
203610d565efSmrg 	  after = BB_END (bb);
203710d565efSmrg 	}
203810d565efSmrg     }
203910d565efSmrg 
204010d565efSmrg   /* Otherwise we must split the edge.  */
204110d565efSmrg   else
204210d565efSmrg     {
204310d565efSmrg       bb = split_edge (e);
204410d565efSmrg 
204510d565efSmrg       /* If E crossed a partition boundary, we needed to make bb end in
204610d565efSmrg          a region-crossing jump, even though it was originally fallthru.  */
204710d565efSmrg       if (JUMP_P (BB_END (bb)))
204810d565efSmrg 	before = BB_END (bb);
204910d565efSmrg       else
205010d565efSmrg         after = BB_END (bb);
205110d565efSmrg     }
205210d565efSmrg 
205310d565efSmrg   /* Now that we've found the spot, do the insertion.  */
205410d565efSmrg   if (before)
205510d565efSmrg     {
205610d565efSmrg       emit_insn_before_noloc (insns, before, bb);
205710d565efSmrg       last = prev_nonnote_insn (before);
205810d565efSmrg     }
205910d565efSmrg   else
206010d565efSmrg     last = emit_insn_after_noloc (insns, after, bb);
206110d565efSmrg 
206210d565efSmrg   if (returnjump_p (last))
206310d565efSmrg     {
206410d565efSmrg       /* ??? Remove all outgoing edges from BB and add one for EXIT.
206510d565efSmrg 	 This is not currently a problem because this only happens
206610d565efSmrg 	 for the (single) epilogue, which already has a fallthru edge
206710d565efSmrg 	 to EXIT.  */
206810d565efSmrg 
206910d565efSmrg       e = single_succ_edge (bb);
207010d565efSmrg       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
207110d565efSmrg 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
207210d565efSmrg 
207310d565efSmrg       e->flags &= ~EDGE_FALLTHRU;
207410d565efSmrg       emit_barrier_after (last);
207510d565efSmrg 
207610d565efSmrg       if (before)
207710d565efSmrg 	delete_insn (before);
207810d565efSmrg     }
207910d565efSmrg   else
208010d565efSmrg     gcc_assert (!JUMP_P (last));
208110d565efSmrg }
208210d565efSmrg 
208310d565efSmrg /* Update the CFG for all queued instructions.  */
208410d565efSmrg 
208510d565efSmrg void
208610d565efSmrg commit_edge_insertions (void)
208710d565efSmrg {
208810d565efSmrg   basic_block bb;
208910d565efSmrg 
209010d565efSmrg   /* Optimization passes that invoke this routine can cause hot blocks
209110d565efSmrg      previously reached by both hot and cold blocks to become dominated only
209210d565efSmrg      by cold blocks. This will cause the verification below to fail,
209310d565efSmrg      and lead to now cold code in the hot section. In some cases this
209410d565efSmrg      may only be visible after newly unreachable blocks are deleted,
209510d565efSmrg      which will be done by fixup_partitions.  */
209610d565efSmrg   fixup_partitions ();
209710d565efSmrg 
209810d565efSmrg   checking_verify_flow_info ();
209910d565efSmrg 
210010d565efSmrg   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
210110d565efSmrg 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
210210d565efSmrg     {
210310d565efSmrg       edge e;
210410d565efSmrg       edge_iterator ei;
210510d565efSmrg 
210610d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
210710d565efSmrg 	if (e->insns.r)
210810d565efSmrg 	  commit_one_edge_insertion (e);
210910d565efSmrg     }
211010d565efSmrg }
211110d565efSmrg 
211210d565efSmrg 
211310d565efSmrg /* Print out RTL-specific basic block information (live information
211410d565efSmrg    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
211510d565efSmrg    documented in dumpfile.h.  */
211610d565efSmrg 
211710d565efSmrg static void
2118c7a68eb7Smrg rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
211910d565efSmrg {
212010d565efSmrg   char *s_indent;
212110d565efSmrg 
212210d565efSmrg   s_indent = (char *) alloca ((size_t) indent + 1);
212310d565efSmrg   memset (s_indent, ' ', (size_t) indent);
212410d565efSmrg   s_indent[indent] = '\0';
212510d565efSmrg 
212610d565efSmrg   if (df && (flags & TDF_DETAILS))
212710d565efSmrg     {
212810d565efSmrg       df_dump_top (bb, outf);
212910d565efSmrg       putc ('\n', outf);
213010d565efSmrg     }
213110d565efSmrg 
213210d565efSmrg   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2133c7a68eb7Smrg     {
2134c7a68eb7Smrg       rtx_insn *last = BB_END (bb);
2135c7a68eb7Smrg       if (last)
2136c7a68eb7Smrg 	last = NEXT_INSN (last);
2137c7a68eb7Smrg       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
213810d565efSmrg 	{
213910d565efSmrg 	  if (flags & TDF_DETAILS)
214010d565efSmrg 	    df_dump_insn_top (insn, outf);
214110d565efSmrg 	  if (! (flags & TDF_SLIM))
214210d565efSmrg 	    print_rtl_single (outf, insn);
214310d565efSmrg 	  else
214410d565efSmrg 	    dump_insn_slim (outf, insn);
214510d565efSmrg 	  if (flags & TDF_DETAILS)
214610d565efSmrg 	    df_dump_insn_bottom (insn, outf);
214710d565efSmrg 	}
2148c7a68eb7Smrg     }
214910d565efSmrg 
215010d565efSmrg   if (df && (flags & TDF_DETAILS))
215110d565efSmrg     {
215210d565efSmrg       df_dump_bottom (bb, outf);
215310d565efSmrg       putc ('\n', outf);
215410d565efSmrg     }
215510d565efSmrg 
215610d565efSmrg }
215710d565efSmrg 
215810d565efSmrg /* Like dump_function_to_file, but for RTL.  Print out dataflow information
215910d565efSmrg    for the start of each basic block.  FLAGS are the TDF_* masks documented
216010d565efSmrg    in dumpfile.h.  */
216110d565efSmrg 
216210d565efSmrg void
2163c7a68eb7Smrg print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
216410d565efSmrg {
216510d565efSmrg   const rtx_insn *tmp_rtx;
216610d565efSmrg   if (rtx_first == 0)
216710d565efSmrg     fprintf (outf, "(nil)\n");
216810d565efSmrg   else
216910d565efSmrg     {
217010d565efSmrg       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
217110d565efSmrg       int max_uid = get_max_uid ();
217210d565efSmrg       basic_block *start = XCNEWVEC (basic_block, max_uid);
217310d565efSmrg       basic_block *end = XCNEWVEC (basic_block, max_uid);
217410d565efSmrg       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
217510d565efSmrg       basic_block bb;
217610d565efSmrg 
217710d565efSmrg       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
217810d565efSmrg 	 insns, but the CFG is not maintained so the basic block info
217910d565efSmrg 	 is not reliable.  Therefore it's omitted from the dumps.  */
218010d565efSmrg       if (! (cfun->curr_properties & PROP_cfg))
218110d565efSmrg         flags &= ~TDF_BLOCKS;
218210d565efSmrg 
218310d565efSmrg       if (df)
218410d565efSmrg 	df_dump_start (outf);
218510d565efSmrg 
218610d565efSmrg       if (flags & TDF_BLOCKS)
218710d565efSmrg 	{
218810d565efSmrg 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
218910d565efSmrg 	    {
219010d565efSmrg 	      rtx_insn *x;
219110d565efSmrg 
219210d565efSmrg 	      start[INSN_UID (BB_HEAD (bb))] = bb;
219310d565efSmrg 	      end[INSN_UID (BB_END (bb))] = bb;
219410d565efSmrg 	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
219510d565efSmrg 		{
219610d565efSmrg 		  enum bb_state state = IN_MULTIPLE_BB;
219710d565efSmrg 
219810d565efSmrg 		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
219910d565efSmrg 		    state = IN_ONE_BB;
220010d565efSmrg 		  in_bb_p[INSN_UID (x)] = state;
220110d565efSmrg 
220210d565efSmrg 		  if (x == BB_END (bb))
220310d565efSmrg 		    break;
220410d565efSmrg 		}
220510d565efSmrg 	    }
220610d565efSmrg 	}
220710d565efSmrg 
2208c7a68eb7Smrg       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
220910d565efSmrg 	{
221010d565efSmrg 	  if (flags & TDF_BLOCKS)
221110d565efSmrg 	    {
221210d565efSmrg 	      bb = start[INSN_UID (tmp_rtx)];
221310d565efSmrg 	      if (bb != NULL)
221410d565efSmrg 		{
2215c7a68eb7Smrg 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
221610d565efSmrg 		  if (df && (flags & TDF_DETAILS))
221710d565efSmrg 		    df_dump_top (bb, outf);
221810d565efSmrg 		}
221910d565efSmrg 
222010d565efSmrg 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
222110d565efSmrg 		  && !NOTE_P (tmp_rtx)
222210d565efSmrg 		  && !BARRIER_P (tmp_rtx))
222310d565efSmrg 		fprintf (outf, ";; Insn is not within a basic block\n");
222410d565efSmrg 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
222510d565efSmrg 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
222610d565efSmrg 	    }
222710d565efSmrg 
222810d565efSmrg 	  if (flags & TDF_DETAILS)
222910d565efSmrg 	    df_dump_insn_top (tmp_rtx, outf);
223010d565efSmrg 	  if (! (flags & TDF_SLIM))
223110d565efSmrg 	    print_rtl_single (outf, tmp_rtx);
223210d565efSmrg 	  else
223310d565efSmrg 	    dump_insn_slim (outf, tmp_rtx);
223410d565efSmrg 	  if (flags & TDF_DETAILS)
223510d565efSmrg 	    df_dump_insn_bottom (tmp_rtx, outf);
223610d565efSmrg 
223710d565efSmrg 	  if (flags & TDF_BLOCKS)
223810d565efSmrg 	    {
223910d565efSmrg 	      bb = end[INSN_UID (tmp_rtx)];
224010d565efSmrg 	      if (bb != NULL)
224110d565efSmrg 		{
2242c7a68eb7Smrg 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
224310d565efSmrg 		  if (df && (flags & TDF_DETAILS))
224410d565efSmrg 		    df_dump_bottom (bb, outf);
224510d565efSmrg 		  putc ('\n', outf);
224610d565efSmrg 		}
224710d565efSmrg 	    }
224810d565efSmrg 	}
224910d565efSmrg 
225010d565efSmrg       free (start);
225110d565efSmrg       free (end);
225210d565efSmrg       free (in_bb_p);
225310d565efSmrg     }
225410d565efSmrg }
225510d565efSmrg 
225610d565efSmrg /* Update the branch probability of BB if a REG_BR_PROB is present.  */
225710d565efSmrg 
225810d565efSmrg void
225910d565efSmrg update_br_prob_note (basic_block bb)
226010d565efSmrg {
226110d565efSmrg   rtx note;
226210d565efSmrg   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2263c7a68eb7Smrg   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2264c7a68eb7Smrg     {
2265c7a68eb7Smrg       if (note)
2266c7a68eb7Smrg 	{
2267c7a68eb7Smrg 	  rtx *note_link, this_rtx;
2268c7a68eb7Smrg 
2269c7a68eb7Smrg 	  note_link = &REG_NOTES (BB_END (bb));
2270c7a68eb7Smrg 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2271c7a68eb7Smrg 	    if (this_rtx == note)
2272c7a68eb7Smrg 	      {
2273c7a68eb7Smrg 		*note_link = XEXP (this_rtx, 1);
2274c7a68eb7Smrg 		break;
2275c7a68eb7Smrg 	      }
2276c7a68eb7Smrg 	}
227710d565efSmrg       return;
2278c7a68eb7Smrg     }
2279c7a68eb7Smrg   if (!note
2280c7a68eb7Smrg       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2281c7a68eb7Smrg     return;
2282c7a68eb7Smrg   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
228310d565efSmrg }
228410d565efSmrg 
228510d565efSmrg /* Get the last insn associated with block BB (that includes barriers and
228610d565efSmrg    tablejumps after BB).  */
228710d565efSmrg rtx_insn *
228810d565efSmrg get_last_bb_insn (basic_block bb)
228910d565efSmrg {
229010d565efSmrg   rtx_jump_table_data *table;
229110d565efSmrg   rtx_insn *tmp;
229210d565efSmrg   rtx_insn *end = BB_END (bb);
229310d565efSmrg 
229410d565efSmrg   /* Include any jump table following the basic block.  */
229510d565efSmrg   if (tablejump_p (end, NULL, &table))
229610d565efSmrg     end = table;
229710d565efSmrg 
229810d565efSmrg   /* Include any barriers that may follow the basic block.  */
2299c7a68eb7Smrg   tmp = next_nonnote_nondebug_insn_bb (end);
230010d565efSmrg   while (tmp && BARRIER_P (tmp))
230110d565efSmrg     {
230210d565efSmrg       end = tmp;
2303c7a68eb7Smrg       tmp = next_nonnote_nondebug_insn_bb (end);
230410d565efSmrg     }
230510d565efSmrg 
230610d565efSmrg   return end;
230710d565efSmrg }
230810d565efSmrg 
2309c7a68eb7Smrg /* Add all BBs reachable from entry via hot paths into the SET.  */
2310c7a68eb7Smrg 
2311c7a68eb7Smrg void
2312c7a68eb7Smrg find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2313c7a68eb7Smrg {
2314c7a68eb7Smrg   auto_vec<basic_block, 64> worklist;
2315c7a68eb7Smrg 
2316c7a68eb7Smrg   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2317c7a68eb7Smrg   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2318c7a68eb7Smrg 
2319c7a68eb7Smrg   while (worklist.length () > 0)
2320c7a68eb7Smrg     {
2321c7a68eb7Smrg       basic_block bb = worklist.pop ();
2322c7a68eb7Smrg       edge_iterator ei;
2323c7a68eb7Smrg       edge e;
2324c7a68eb7Smrg 
2325c7a68eb7Smrg       FOR_EACH_EDGE (e, ei, bb->succs)
2326c7a68eb7Smrg 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2327c7a68eb7Smrg 	    && !set->add (e->dest))
2328c7a68eb7Smrg 	  worklist.safe_push (e->dest);
2329c7a68eb7Smrg     }
2330c7a68eb7Smrg }
2331c7a68eb7Smrg 
233210d565efSmrg /* Sanity check partition hotness to ensure that basic blocks in
233310d565efSmrg    the cold partition don't dominate basic blocks in the hot partition.
233410d565efSmrg    If FLAG_ONLY is true, report violations as errors. Otherwise
233510d565efSmrg    re-mark the dominated blocks as cold, since this is run after
233610d565efSmrg    cfg optimizations that may make hot blocks previously reached
233710d565efSmrg    by both hot and cold blocks now only reachable along cold paths.  */
233810d565efSmrg 
233910d565efSmrg static vec<basic_block>
234010d565efSmrg find_partition_fixes (bool flag_only)
234110d565efSmrg {
234210d565efSmrg   basic_block bb;
234310d565efSmrg   vec<basic_block> bbs_in_cold_partition = vNULL;
234410d565efSmrg   vec<basic_block> bbs_to_fix = vNULL;
2345c7a68eb7Smrg   hash_set<basic_block> set;
234610d565efSmrg 
234710d565efSmrg   /* Callers check this.  */
234810d565efSmrg   gcc_checking_assert (crtl->has_bb_partition);
234910d565efSmrg 
2350c7a68eb7Smrg   find_bbs_reachable_by_hot_paths (&set);
2351c7a68eb7Smrg 
235210d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
2353c7a68eb7Smrg     if (!set.contains (bb)
2354c7a68eb7Smrg 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
235510d565efSmrg       {
235610d565efSmrg 	if (flag_only)
2357c7a68eb7Smrg 	  error ("non-cold basic block %d reachable only "
2358c7a68eb7Smrg 		 "by paths crossing the cold partition", bb->index);
235910d565efSmrg 	else
2360c7a68eb7Smrg 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2361c7a68eb7Smrg 	bbs_to_fix.safe_push (bb);
2362c7a68eb7Smrg 	bbs_in_cold_partition.safe_push (bb);
236310d565efSmrg       }
236410d565efSmrg 
236510d565efSmrg   return bbs_to_fix;
236610d565efSmrg }
236710d565efSmrg 
236810d565efSmrg /* Perform cleanup on the hot/cold bb partitioning after optimization
236910d565efSmrg    passes that modify the cfg.  */
237010d565efSmrg 
237110d565efSmrg void
237210d565efSmrg fixup_partitions (void)
237310d565efSmrg {
237410d565efSmrg   basic_block bb;
237510d565efSmrg 
237610d565efSmrg   if (!crtl->has_bb_partition)
237710d565efSmrg     return;
237810d565efSmrg 
237910d565efSmrg   /* Delete any blocks that became unreachable and weren't
238010d565efSmrg      already cleaned up, for example during edge forwarding
238110d565efSmrg      and convert_jumps_to_returns. This will expose more
238210d565efSmrg      opportunities for fixing the partition boundaries here.
238310d565efSmrg      Also, the calculation of the dominance graph during verification
238410d565efSmrg      will assert if there are unreachable nodes.  */
238510d565efSmrg   delete_unreachable_blocks ();
238610d565efSmrg 
238710d565efSmrg   /* If there are partitions, do a sanity check on them: A basic block in
238810d565efSmrg      a cold partition cannot dominate a basic block in a hot partition.
238910d565efSmrg      Fixup any that now violate this requirement, as a result of edge
239010d565efSmrg      forwarding and unreachable block deletion.  */
239110d565efSmrg   vec<basic_block> bbs_to_fix = find_partition_fixes (false);
239210d565efSmrg 
239310d565efSmrg   /* Do the partition fixup after all necessary blocks have been converted to
239410d565efSmrg      cold, so that we only update the region crossings the minimum number of
239510d565efSmrg      places, which can require forcing edges to be non fallthru.  */
239610d565efSmrg   while (! bbs_to_fix.is_empty ())
239710d565efSmrg     {
239810d565efSmrg       bb = bbs_to_fix.pop ();
239910d565efSmrg       fixup_new_cold_bb (bb);
240010d565efSmrg     }
240110d565efSmrg }
240210d565efSmrg 
240310d565efSmrg /* Verify, in the basic block chain, that there is at most one switch
240410d565efSmrg    between hot/cold partitions. This condition will not be true until
240510d565efSmrg    after reorder_basic_blocks is called.  */
240610d565efSmrg 
240710d565efSmrg static int
240810d565efSmrg verify_hot_cold_block_grouping (void)
240910d565efSmrg {
241010d565efSmrg   basic_block bb;
241110d565efSmrg   int err = 0;
241210d565efSmrg   bool switched_sections = false;
241310d565efSmrg   int current_partition = BB_UNPARTITIONED;
241410d565efSmrg 
241510d565efSmrg   /* Even after bb reordering is complete, we go into cfglayout mode
241610d565efSmrg      again (in compgoto). Ensure we don't call this before going back
241710d565efSmrg      into linearized RTL when any layout fixes would have been committed.  */
241810d565efSmrg   if (!crtl->bb_reorder_complete
241910d565efSmrg       || current_ir_type () != IR_RTL_CFGRTL)
242010d565efSmrg     return err;
242110d565efSmrg 
242210d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
242310d565efSmrg     {
242410d565efSmrg       if (current_partition != BB_UNPARTITIONED
242510d565efSmrg           && BB_PARTITION (bb) != current_partition)
242610d565efSmrg 	{
242710d565efSmrg 	  if (switched_sections)
242810d565efSmrg 	    {
242910d565efSmrg 	      error ("multiple hot/cold transitions found (bb %i)",
243010d565efSmrg 		     bb->index);
243110d565efSmrg 	      err = 1;
243210d565efSmrg 	    }
243310d565efSmrg 	  else
243410d565efSmrg             switched_sections = true;
243510d565efSmrg 
243610d565efSmrg           if (!crtl->has_bb_partition)
243710d565efSmrg             error ("partition found but function partition flag not set");
243810d565efSmrg 	}
243910d565efSmrg       current_partition = BB_PARTITION (bb);
244010d565efSmrg     }
244110d565efSmrg 
244210d565efSmrg   return err;
244310d565efSmrg }
244410d565efSmrg 
244510d565efSmrg 
244610d565efSmrg /* Perform several checks on the edges out of each block, such as
244710d565efSmrg    the consistency of the branch probabilities, the correctness
244810d565efSmrg    of hot/cold partition crossing edges, and the number of expected
244910d565efSmrg    successor edges.  Also verify that the dominance relationship
245010d565efSmrg    between hot/cold blocks is sane.  */
245110d565efSmrg 
245210d565efSmrg static int
245310d565efSmrg rtl_verify_edges (void)
245410d565efSmrg {
245510d565efSmrg   int err = 0;
245610d565efSmrg   basic_block bb;
245710d565efSmrg 
245810d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
245910d565efSmrg     {
246010d565efSmrg       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
246110d565efSmrg       int n_eh = 0, n_abnormal = 0;
246210d565efSmrg       edge e, fallthru = NULL;
246310d565efSmrg       edge_iterator ei;
246410d565efSmrg       rtx note;
246510d565efSmrg       bool has_crossing_edge = false;
246610d565efSmrg 
246710d565efSmrg       if (JUMP_P (BB_END (bb))
246810d565efSmrg 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
246910d565efSmrg 	  && EDGE_COUNT (bb->succs) >= 2
247010d565efSmrg 	  && any_condjump_p (BB_END (bb)))
247110d565efSmrg 	{
2472c7a68eb7Smrg 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2473c7a68eb7Smrg 	    {
2474c7a68eb7Smrg 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2475c7a68eb7Smrg 		{
2476c7a68eb7Smrg 		  error ("verify_flow_info: "
2477c7a68eb7Smrg 			 "REG_BR_PROB is set but cfg probability is not");
2478c7a68eb7Smrg 		  err = 1;
2479c7a68eb7Smrg 		}
2480c7a68eb7Smrg 	    }
2481c7a68eb7Smrg 	  else if (XINT (note, 0)
2482c7a68eb7Smrg 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
248310d565efSmrg 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
248410d565efSmrg 	    {
248510d565efSmrg 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2486c7a68eb7Smrg 		     XINT (note, 0),
2487c7a68eb7Smrg 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
248810d565efSmrg 	      err = 1;
248910d565efSmrg 	    }
249010d565efSmrg 	}
249110d565efSmrg 
249210d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
249310d565efSmrg 	{
249410d565efSmrg 	  bool is_crossing;
249510d565efSmrg 
249610d565efSmrg 	  if (e->flags & EDGE_FALLTHRU)
249710d565efSmrg 	    n_fallthru++, fallthru = e;
249810d565efSmrg 
249910d565efSmrg 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
250010d565efSmrg 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
250110d565efSmrg 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
250210d565efSmrg           has_crossing_edge |= is_crossing;
250310d565efSmrg 	  if (e->flags & EDGE_CROSSING)
250410d565efSmrg 	    {
250510d565efSmrg 	      if (!is_crossing)
250610d565efSmrg 		{
250710d565efSmrg 		  error ("EDGE_CROSSING incorrectly set across same section");
250810d565efSmrg 		  err = 1;
250910d565efSmrg 		}
251010d565efSmrg 	      if (e->flags & EDGE_FALLTHRU)
251110d565efSmrg 		{
251210d565efSmrg 		  error ("fallthru edge crosses section boundary in bb %i",
251310d565efSmrg 			 e->src->index);
251410d565efSmrg 		  err = 1;
251510d565efSmrg 		}
251610d565efSmrg 	      if (e->flags & EDGE_EH)
251710d565efSmrg 		{
251810d565efSmrg 		  error ("EH edge crosses section boundary in bb %i",
251910d565efSmrg 			 e->src->index);
252010d565efSmrg 		  err = 1;
252110d565efSmrg 		}
252210d565efSmrg               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
252310d565efSmrg 		{
252410d565efSmrg 		  error ("No region crossing jump at section boundary in bb %i",
252510d565efSmrg 			 bb->index);
252610d565efSmrg 		  err = 1;
252710d565efSmrg 		}
252810d565efSmrg 	    }
252910d565efSmrg 	  else if (is_crossing)
253010d565efSmrg 	    {
253110d565efSmrg 	      error ("EDGE_CROSSING missing across section boundary");
253210d565efSmrg 	      err = 1;
253310d565efSmrg 	    }
253410d565efSmrg 
253510d565efSmrg 	  if ((e->flags & ~(EDGE_DFS_BACK
253610d565efSmrg 			    | EDGE_CAN_FALLTHRU
253710d565efSmrg 			    | EDGE_IRREDUCIBLE_LOOP
253810d565efSmrg 			    | EDGE_LOOP_EXIT
253910d565efSmrg 			    | EDGE_CROSSING
254010d565efSmrg 			    | EDGE_PRESERVE)) == 0)
254110d565efSmrg 	    n_branch++;
254210d565efSmrg 
254310d565efSmrg 	  if (e->flags & EDGE_ABNORMAL_CALL)
254410d565efSmrg 	    n_abnormal_call++;
254510d565efSmrg 
254610d565efSmrg 	  if (e->flags & EDGE_SIBCALL)
254710d565efSmrg 	    n_sibcall++;
254810d565efSmrg 
254910d565efSmrg 	  if (e->flags & EDGE_EH)
255010d565efSmrg 	    n_eh++;
255110d565efSmrg 
255210d565efSmrg 	  if (e->flags & EDGE_ABNORMAL)
255310d565efSmrg 	    n_abnormal++;
255410d565efSmrg 	}
255510d565efSmrg 
255610d565efSmrg       if (!has_crossing_edge
255710d565efSmrg 	  && JUMP_P (BB_END (bb))
255810d565efSmrg 	  && CROSSING_JUMP_P (BB_END (bb)))
255910d565efSmrg 	{
2560c7a68eb7Smrg 	  print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
256110d565efSmrg 	  error ("Region crossing jump across same section in bb %i",
256210d565efSmrg 		 bb->index);
256310d565efSmrg 	  err = 1;
256410d565efSmrg 	}
256510d565efSmrg 
256610d565efSmrg       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
256710d565efSmrg 	{
256810d565efSmrg 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
256910d565efSmrg 	  err = 1;
257010d565efSmrg 	}
257110d565efSmrg       if (n_eh > 1)
257210d565efSmrg 	{
257310d565efSmrg 	  error ("too many exception handling edges in bb %i", bb->index);
257410d565efSmrg 	  err = 1;
257510d565efSmrg 	}
257610d565efSmrg       if (n_branch
257710d565efSmrg 	  && (!JUMP_P (BB_END (bb))
257810d565efSmrg 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
257910d565efSmrg 				   || any_condjump_p (BB_END (bb))))))
258010d565efSmrg 	{
258110d565efSmrg 	  error ("too many outgoing branch edges from bb %i", bb->index);
258210d565efSmrg 	  err = 1;
258310d565efSmrg 	}
258410d565efSmrg       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
258510d565efSmrg 	{
258610d565efSmrg 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
258710d565efSmrg 	  err = 1;
258810d565efSmrg 	}
258910d565efSmrg       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
259010d565efSmrg 	{
259110d565efSmrg 	  error ("wrong number of branch edges after unconditional jump"
259210d565efSmrg 		 " in bb %i", bb->index);
259310d565efSmrg 	  err = 1;
259410d565efSmrg 	}
259510d565efSmrg       if (n_branch != 1 && any_condjump_p (BB_END (bb))
259610d565efSmrg 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
259710d565efSmrg 	{
259810d565efSmrg 	  error ("wrong amount of branch edges after conditional jump"
259910d565efSmrg 		 " in bb %i", bb->index);
260010d565efSmrg 	  err = 1;
260110d565efSmrg 	}
260210d565efSmrg       if (n_abnormal_call && !CALL_P (BB_END (bb)))
260310d565efSmrg 	{
260410d565efSmrg 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
260510d565efSmrg 	  err = 1;
260610d565efSmrg 	}
260710d565efSmrg       if (n_sibcall && !CALL_P (BB_END (bb)))
260810d565efSmrg 	{
260910d565efSmrg 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
261010d565efSmrg 	  err = 1;
261110d565efSmrg 	}
261210d565efSmrg       if (n_abnormal > n_eh
261310d565efSmrg 	  && !(CALL_P (BB_END (bb))
261410d565efSmrg 	       && n_abnormal == n_abnormal_call + n_sibcall)
261510d565efSmrg 	  && (!JUMP_P (BB_END (bb))
261610d565efSmrg 	      || any_condjump_p (BB_END (bb))
261710d565efSmrg 	      || any_uncondjump_p (BB_END (bb))))
261810d565efSmrg 	{
261910d565efSmrg 	  error ("abnormal edges for no purpose in bb %i", bb->index);
262010d565efSmrg 	  err = 1;
262110d565efSmrg 	}
2622*0fc04c29Smrg 
2623*0fc04c29Smrg       int has_eh = -1;
2624*0fc04c29Smrg       FOR_EACH_EDGE (e, ei, bb->preds)
2625*0fc04c29Smrg 	{
2626*0fc04c29Smrg 	  if (has_eh == -1)
2627*0fc04c29Smrg 	    has_eh = (e->flags & EDGE_EH);
2628*0fc04c29Smrg 	  if ((e->flags & EDGE_EH) == has_eh)
2629*0fc04c29Smrg 	    continue;
2630*0fc04c29Smrg 	  error ("EH incoming edge mixed with non-EH incoming edges "
2631*0fc04c29Smrg 		 "in bb %i", bb->index);
2632*0fc04c29Smrg 	  err = 1;
2633*0fc04c29Smrg 	  break;
2634*0fc04c29Smrg 	}
263510d565efSmrg     }
263610d565efSmrg 
263710d565efSmrg   /* If there are partitions, do a sanity check on them: A basic block in
263810d565efSmrg      a cold partition cannot dominate a basic block in a hot partition.  */
2639c7a68eb7Smrg   if (crtl->has_bb_partition && !err
2640c7a68eb7Smrg       && current_ir_type () == IR_RTL_CFGLAYOUT)
264110d565efSmrg     {
264210d565efSmrg       vec<basic_block> bbs_to_fix = find_partition_fixes (true);
264310d565efSmrg       err = !bbs_to_fix.is_empty ();
264410d565efSmrg     }
264510d565efSmrg 
264610d565efSmrg   /* Clean up.  */
264710d565efSmrg   return err;
264810d565efSmrg }
264910d565efSmrg 
265010d565efSmrg /* Checks on the instructions within blocks. Currently checks that each
265110d565efSmrg    block starts with a basic block note, and that basic block notes and
265210d565efSmrg    control flow jumps are not found in the middle of the block.  */
265310d565efSmrg 
265410d565efSmrg static int
265510d565efSmrg rtl_verify_bb_insns (void)
265610d565efSmrg {
265710d565efSmrg   rtx_insn *x;
265810d565efSmrg   int err = 0;
265910d565efSmrg   basic_block bb;
266010d565efSmrg 
266110d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
266210d565efSmrg     {
266310d565efSmrg       /* Now check the header of basic
266410d565efSmrg 	 block.  It ought to contain optional CODE_LABEL followed
266510d565efSmrg 	 by NOTE_BASIC_BLOCK.  */
266610d565efSmrg       x = BB_HEAD (bb);
266710d565efSmrg       if (LABEL_P (x))
266810d565efSmrg 	{
266910d565efSmrg 	  if (BB_END (bb) == x)
267010d565efSmrg 	    {
267110d565efSmrg 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
267210d565efSmrg 		     bb->index);
267310d565efSmrg 	      err = 1;
267410d565efSmrg 	    }
267510d565efSmrg 
267610d565efSmrg 	  x = NEXT_INSN (x);
267710d565efSmrg 	}
267810d565efSmrg 
267910d565efSmrg       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
268010d565efSmrg 	{
268110d565efSmrg 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
268210d565efSmrg 		 bb->index);
268310d565efSmrg 	  err = 1;
268410d565efSmrg 	}
268510d565efSmrg 
268610d565efSmrg       if (BB_END (bb) == x)
268710d565efSmrg 	/* Do checks for empty blocks here.  */
268810d565efSmrg 	;
268910d565efSmrg       else
269010d565efSmrg 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
269110d565efSmrg 	  {
269210d565efSmrg 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
269310d565efSmrg 	      {
269410d565efSmrg 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
269510d565efSmrg 		       INSN_UID (x), bb->index);
269610d565efSmrg 		err = 1;
269710d565efSmrg 	      }
269810d565efSmrg 
269910d565efSmrg 	    if (x == BB_END (bb))
270010d565efSmrg 	      break;
270110d565efSmrg 
270210d565efSmrg 	    if (control_flow_insn_p (x))
270310d565efSmrg 	      {
270410d565efSmrg 		error ("in basic block %d:", bb->index);
270510d565efSmrg 		fatal_insn ("flow control insn inside a basic block", x);
270610d565efSmrg 	      }
270710d565efSmrg 	  }
270810d565efSmrg     }
270910d565efSmrg 
271010d565efSmrg   /* Clean up.  */
271110d565efSmrg   return err;
271210d565efSmrg }
271310d565efSmrg 
271410d565efSmrg /* Verify that block pointers for instructions in basic blocks, headers and
271510d565efSmrg    footers are set appropriately.  */
271610d565efSmrg 
271710d565efSmrg static int
271810d565efSmrg rtl_verify_bb_pointers (void)
271910d565efSmrg {
272010d565efSmrg   int err = 0;
272110d565efSmrg   basic_block bb;
272210d565efSmrg 
272310d565efSmrg   /* Check the general integrity of the basic blocks.  */
272410d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
272510d565efSmrg     {
272610d565efSmrg       rtx_insn *insn;
272710d565efSmrg 
272810d565efSmrg       if (!(bb->flags & BB_RTL))
272910d565efSmrg 	{
273010d565efSmrg 	  error ("BB_RTL flag not set for block %d", bb->index);
273110d565efSmrg 	  err = 1;
273210d565efSmrg 	}
273310d565efSmrg 
273410d565efSmrg       FOR_BB_INSNS (bb, insn)
273510d565efSmrg 	if (BLOCK_FOR_INSN (insn) != bb)
273610d565efSmrg 	  {
273710d565efSmrg 	    error ("insn %d basic block pointer is %d, should be %d",
273810d565efSmrg 		   INSN_UID (insn),
273910d565efSmrg 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
274010d565efSmrg 		   bb->index);
274110d565efSmrg 	    err = 1;
274210d565efSmrg 	  }
274310d565efSmrg 
274410d565efSmrg       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
274510d565efSmrg 	if (!BARRIER_P (insn)
274610d565efSmrg 	    && BLOCK_FOR_INSN (insn) != NULL)
274710d565efSmrg 	  {
274810d565efSmrg 	    error ("insn %d in header of bb %d has non-NULL basic block",
274910d565efSmrg 		   INSN_UID (insn), bb->index);
275010d565efSmrg 	    err = 1;
275110d565efSmrg 	  }
275210d565efSmrg       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
275310d565efSmrg 	if (!BARRIER_P (insn)
275410d565efSmrg 	    && BLOCK_FOR_INSN (insn) != NULL)
275510d565efSmrg 	  {
275610d565efSmrg 	    error ("insn %d in footer of bb %d has non-NULL basic block",
275710d565efSmrg 		   INSN_UID (insn), bb->index);
275810d565efSmrg 	    err = 1;
275910d565efSmrg 	  }
276010d565efSmrg     }
276110d565efSmrg 
276210d565efSmrg   /* Clean up.  */
276310d565efSmrg   return err;
276410d565efSmrg }
276510d565efSmrg 
276610d565efSmrg /* Verify the CFG and RTL consistency common for both underlying RTL and
276710d565efSmrg    cfglayout RTL.
276810d565efSmrg 
276910d565efSmrg    Currently it does following checks:
277010d565efSmrg 
277110d565efSmrg    - overlapping of basic blocks
277210d565efSmrg    - insns with wrong BLOCK_FOR_INSN pointers
277310d565efSmrg    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
277410d565efSmrg    - tails of basic blocks (ensure that boundary is necessary)
277510d565efSmrg    - scans body of the basic block for JUMP_INSN, CODE_LABEL
277610d565efSmrg      and NOTE_INSN_BASIC_BLOCK
277710d565efSmrg    - verify that no fall_thru edge crosses hot/cold partition boundaries
277810d565efSmrg    - verify that there are no pending RTL branch predictions
277910d565efSmrg    - verify that hot blocks are not dominated by cold blocks
278010d565efSmrg 
278110d565efSmrg    In future it can be extended check a lot of other stuff as well
278210d565efSmrg    (reachability of basic blocks, life information, etc. etc.).  */
278310d565efSmrg 
278410d565efSmrg static int
278510d565efSmrg rtl_verify_flow_info_1 (void)
278610d565efSmrg {
278710d565efSmrg   int err = 0;
278810d565efSmrg 
278910d565efSmrg   err |= rtl_verify_bb_pointers ();
279010d565efSmrg 
279110d565efSmrg   err |= rtl_verify_bb_insns ();
279210d565efSmrg 
279310d565efSmrg   err |= rtl_verify_edges ();
279410d565efSmrg 
279510d565efSmrg   return err;
279610d565efSmrg }
279710d565efSmrg 
279810d565efSmrg /* Walk the instruction chain and verify that bb head/end pointers
279910d565efSmrg   are correct, and that instructions are in exactly one bb and have
280010d565efSmrg   correct block pointers.  */
280110d565efSmrg 
280210d565efSmrg static int
280310d565efSmrg rtl_verify_bb_insn_chain (void)
280410d565efSmrg {
280510d565efSmrg   basic_block bb;
280610d565efSmrg   int err = 0;
280710d565efSmrg   rtx_insn *x;
280810d565efSmrg   rtx_insn *last_head = get_last_insn ();
280910d565efSmrg   basic_block *bb_info;
281010d565efSmrg   const int max_uid = get_max_uid ();
281110d565efSmrg 
281210d565efSmrg   bb_info = XCNEWVEC (basic_block, max_uid);
281310d565efSmrg 
281410d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
281510d565efSmrg     {
281610d565efSmrg       rtx_insn *head = BB_HEAD (bb);
281710d565efSmrg       rtx_insn *end = BB_END (bb);
281810d565efSmrg 
281910d565efSmrg       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
282010d565efSmrg 	{
282110d565efSmrg 	  /* Verify the end of the basic block is in the INSN chain.  */
282210d565efSmrg 	  if (x == end)
282310d565efSmrg 	    break;
282410d565efSmrg 
282510d565efSmrg             /* And that the code outside of basic blocks has NULL bb field.  */
282610d565efSmrg           if (!BARRIER_P (x)
282710d565efSmrg               && BLOCK_FOR_INSN (x) != NULL)
282810d565efSmrg             {
282910d565efSmrg               error ("insn %d outside of basic blocks has non-NULL bb field",
283010d565efSmrg                      INSN_UID (x));
283110d565efSmrg               err = 1;
283210d565efSmrg             }
283310d565efSmrg 	}
283410d565efSmrg 
283510d565efSmrg       if (!x)
283610d565efSmrg 	{
283710d565efSmrg 	  error ("end insn %d for block %d not found in the insn stream",
283810d565efSmrg 		 INSN_UID (end), bb->index);
283910d565efSmrg 	  err = 1;
284010d565efSmrg 	}
284110d565efSmrg 
284210d565efSmrg       /* Work backwards from the end to the head of the basic block
284310d565efSmrg 	 to verify the head is in the RTL chain.  */
284410d565efSmrg       for (; x != NULL_RTX; x = PREV_INSN (x))
284510d565efSmrg 	{
284610d565efSmrg 	  /* While walking over the insn chain, verify insns appear
284710d565efSmrg 	     in only one basic block.  */
284810d565efSmrg 	  if (bb_info[INSN_UID (x)] != NULL)
284910d565efSmrg 	    {
285010d565efSmrg 	      error ("insn %d is in multiple basic blocks (%d and %d)",
285110d565efSmrg 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
285210d565efSmrg 	      err = 1;
285310d565efSmrg 	    }
285410d565efSmrg 
285510d565efSmrg 	  bb_info[INSN_UID (x)] = bb;
285610d565efSmrg 
285710d565efSmrg 	  if (x == head)
285810d565efSmrg 	    break;
285910d565efSmrg 	}
286010d565efSmrg       if (!x)
286110d565efSmrg 	{
286210d565efSmrg 	  error ("head insn %d for block %d not found in the insn stream",
286310d565efSmrg 		 INSN_UID (head), bb->index);
286410d565efSmrg 	  err = 1;
286510d565efSmrg 	}
286610d565efSmrg 
286710d565efSmrg       last_head = PREV_INSN (x);
286810d565efSmrg     }
286910d565efSmrg 
287010d565efSmrg   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
287110d565efSmrg     {
287210d565efSmrg       /* Check that the code before the first basic block has NULL
287310d565efSmrg 	 bb field.  */
287410d565efSmrg       if (!BARRIER_P (x)
287510d565efSmrg 	  && BLOCK_FOR_INSN (x) != NULL)
287610d565efSmrg 	{
287710d565efSmrg 	  error ("insn %d outside of basic blocks has non-NULL bb field",
287810d565efSmrg 		 INSN_UID (x));
287910d565efSmrg 	  err = 1;
288010d565efSmrg 	}
288110d565efSmrg     }
288210d565efSmrg   free (bb_info);
288310d565efSmrg 
288410d565efSmrg   return err;
288510d565efSmrg }
288610d565efSmrg 
288710d565efSmrg /* Verify that fallthru edges point to adjacent blocks in layout order and
288810d565efSmrg    that barriers exist after non-fallthru blocks.  */
288910d565efSmrg 
289010d565efSmrg static int
289110d565efSmrg rtl_verify_fallthru (void)
289210d565efSmrg {
289310d565efSmrg   basic_block bb;
289410d565efSmrg   int err = 0;
289510d565efSmrg 
289610d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
289710d565efSmrg     {
289810d565efSmrg       edge e;
289910d565efSmrg 
290010d565efSmrg       e = find_fallthru_edge (bb->succs);
290110d565efSmrg       if (!e)
290210d565efSmrg 	{
290310d565efSmrg 	  rtx_insn *insn;
290410d565efSmrg 
290510d565efSmrg 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
290610d565efSmrg 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
290710d565efSmrg 	    {
290810d565efSmrg 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
290910d565efSmrg 		{
291010d565efSmrg 		  error ("missing barrier after block %i", bb->index);
291110d565efSmrg 		  err = 1;
291210d565efSmrg 		  break;
291310d565efSmrg 		}
291410d565efSmrg 	      if (BARRIER_P (insn))
291510d565efSmrg 		break;
291610d565efSmrg 	    }
291710d565efSmrg 	}
291810d565efSmrg       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
291910d565efSmrg 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
292010d565efSmrg 	{
292110d565efSmrg 	  rtx_insn *insn;
292210d565efSmrg 
292310d565efSmrg 	  if (e->src->next_bb != e->dest)
292410d565efSmrg 	    {
292510d565efSmrg 	      error
292610d565efSmrg 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
292710d565efSmrg 		 e->src->index, e->dest->index);
292810d565efSmrg 	      err = 1;
292910d565efSmrg 	    }
293010d565efSmrg 	  else
293110d565efSmrg 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
293210d565efSmrg 		 insn = NEXT_INSN (insn))
2933c7a68eb7Smrg 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
293410d565efSmrg 		{
293510d565efSmrg 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
293610d565efSmrg 			 e->src->index, e->dest->index);
293710d565efSmrg 		  fatal_insn ("wrong insn in the fallthru edge", insn);
293810d565efSmrg 		  err = 1;
293910d565efSmrg 		}
294010d565efSmrg 	}
294110d565efSmrg     }
294210d565efSmrg 
294310d565efSmrg    return err;
294410d565efSmrg }
294510d565efSmrg 
294610d565efSmrg /* Verify that blocks are laid out in consecutive order. While walking the
294710d565efSmrg    instructions, verify that all expected instructions are inside the basic
294810d565efSmrg    blocks, and that all returns are followed by barriers.  */
294910d565efSmrg 
295010d565efSmrg static int
295110d565efSmrg rtl_verify_bb_layout (void)
295210d565efSmrg {
295310d565efSmrg   basic_block bb;
295410d565efSmrg   int err = 0;
2955c7a68eb7Smrg   rtx_insn *x, *y;
295610d565efSmrg   int num_bb_notes;
295710d565efSmrg   rtx_insn * const rtx_first = get_insns ();
295810d565efSmrg   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
295910d565efSmrg 
296010d565efSmrg   num_bb_notes = 0;
296110d565efSmrg   last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
296210d565efSmrg 
296310d565efSmrg   for (x = rtx_first; x; x = NEXT_INSN (x))
296410d565efSmrg     {
296510d565efSmrg       if (NOTE_INSN_BASIC_BLOCK_P (x))
296610d565efSmrg 	{
296710d565efSmrg 	  bb = NOTE_BASIC_BLOCK (x);
296810d565efSmrg 
296910d565efSmrg 	  num_bb_notes++;
297010d565efSmrg 	  if (bb != last_bb_seen->next_bb)
297110d565efSmrg 	    internal_error ("basic blocks not laid down consecutively");
297210d565efSmrg 
297310d565efSmrg 	  curr_bb = last_bb_seen = bb;
297410d565efSmrg 	}
297510d565efSmrg 
297610d565efSmrg       if (!curr_bb)
297710d565efSmrg 	{
297810d565efSmrg 	  switch (GET_CODE (x))
297910d565efSmrg 	    {
298010d565efSmrg 	    case BARRIER:
298110d565efSmrg 	    case NOTE:
298210d565efSmrg 	      break;
298310d565efSmrg 
298410d565efSmrg 	    case CODE_LABEL:
298510d565efSmrg 	      /* An ADDR_VEC is placed outside any basic block.  */
298610d565efSmrg 	      if (NEXT_INSN (x)
298710d565efSmrg 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
298810d565efSmrg 		x = NEXT_INSN (x);
298910d565efSmrg 
299010d565efSmrg 	      /* But in any case, non-deletable labels can appear anywhere.  */
299110d565efSmrg 	      break;
299210d565efSmrg 
299310d565efSmrg 	    default:
299410d565efSmrg 	      fatal_insn ("insn outside basic block", x);
299510d565efSmrg 	    }
299610d565efSmrg 	}
299710d565efSmrg 
299810d565efSmrg       if (JUMP_P (x)
299910d565efSmrg 	  && returnjump_p (x) && ! condjump_p (x)
3000c7a68eb7Smrg 	  && ! ((y = next_nonnote_nondebug_insn (x))
3001c7a68eb7Smrg 		&& BARRIER_P (y)))
300210d565efSmrg 	    fatal_insn ("return not followed by barrier", x);
300310d565efSmrg 
300410d565efSmrg       if (curr_bb && x == BB_END (curr_bb))
300510d565efSmrg 	curr_bb = NULL;
300610d565efSmrg     }
300710d565efSmrg 
300810d565efSmrg   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
300910d565efSmrg     internal_error
301010d565efSmrg       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
301110d565efSmrg        num_bb_notes, n_basic_blocks_for_fn (cfun));
301210d565efSmrg 
301310d565efSmrg    return err;
301410d565efSmrg }
301510d565efSmrg 
301610d565efSmrg /* Verify the CFG and RTL consistency common for both underlying RTL and
301710d565efSmrg    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
301810d565efSmrg 
301910d565efSmrg    Currently it does following checks:
302010d565efSmrg    - all checks of rtl_verify_flow_info_1
302110d565efSmrg    - test head/end pointers
302210d565efSmrg    - check that blocks are laid out in consecutive order
302310d565efSmrg    - check that all insns are in the basic blocks
302410d565efSmrg      (except the switch handling code, barriers and notes)
302510d565efSmrg    - check that all returns are followed by barriers
302610d565efSmrg    - check that all fallthru edge points to the adjacent blocks
302710d565efSmrg    - verify that there is a single hot/cold partition boundary after bbro  */
302810d565efSmrg 
302910d565efSmrg static int
303010d565efSmrg rtl_verify_flow_info (void)
303110d565efSmrg {
303210d565efSmrg   int err = 0;
303310d565efSmrg 
303410d565efSmrg   err |= rtl_verify_flow_info_1 ();
303510d565efSmrg 
303610d565efSmrg   err |= rtl_verify_bb_insn_chain ();
303710d565efSmrg 
303810d565efSmrg   err |= rtl_verify_fallthru ();
303910d565efSmrg 
304010d565efSmrg   err |= rtl_verify_bb_layout ();
304110d565efSmrg 
304210d565efSmrg   err |= verify_hot_cold_block_grouping ();
304310d565efSmrg 
304410d565efSmrg   return err;
304510d565efSmrg }
304610d565efSmrg 
304710d565efSmrg /* Assume that the preceding pass has possibly eliminated jump instructions
304810d565efSmrg    or converted the unconditional jumps.  Eliminate the edges from CFG.
304910d565efSmrg    Return true if any edges are eliminated.  */
305010d565efSmrg 
305110d565efSmrg bool
305210d565efSmrg purge_dead_edges (basic_block bb)
305310d565efSmrg {
305410d565efSmrg   edge e;
305510d565efSmrg   rtx_insn *insn = BB_END (bb);
305610d565efSmrg   rtx note;
305710d565efSmrg   bool purged = false;
305810d565efSmrg   bool found;
305910d565efSmrg   edge_iterator ei;
306010d565efSmrg 
306110d565efSmrg   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
306210d565efSmrg     do
306310d565efSmrg       insn = PREV_INSN (insn);
306410d565efSmrg     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
306510d565efSmrg 
306610d565efSmrg   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
306710d565efSmrg   if (NONJUMP_INSN_P (insn)
306810d565efSmrg       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
306910d565efSmrg     {
307010d565efSmrg       rtx eqnote;
307110d565efSmrg 
307210d565efSmrg       if (! may_trap_p (PATTERN (insn))
307310d565efSmrg 	  || ((eqnote = find_reg_equal_equiv_note (insn))
307410d565efSmrg 	      && ! may_trap_p (XEXP (eqnote, 0))))
307510d565efSmrg 	remove_note (insn, note);
307610d565efSmrg     }
307710d565efSmrg 
307810d565efSmrg   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
307910d565efSmrg   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
308010d565efSmrg     {
308110d565efSmrg       bool remove = false;
308210d565efSmrg 
308310d565efSmrg       /* There are three types of edges we need to handle correctly here: EH
308410d565efSmrg 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
308510d565efSmrg 	 latter can appear when nonlocal gotos are used.  */
308610d565efSmrg       if (e->flags & EDGE_ABNORMAL_CALL)
308710d565efSmrg 	{
308810d565efSmrg 	  if (!CALL_P (insn))
308910d565efSmrg 	    remove = true;
309010d565efSmrg 	  else if (can_nonlocal_goto (insn))
309110d565efSmrg 	    ;
309210d565efSmrg 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
309310d565efSmrg 	    ;
309410d565efSmrg 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
309510d565efSmrg 	    ;
309610d565efSmrg 	  else
309710d565efSmrg 	    remove = true;
309810d565efSmrg 	}
309910d565efSmrg       else if (e->flags & EDGE_EH)
310010d565efSmrg 	remove = !can_throw_internal (insn);
310110d565efSmrg 
310210d565efSmrg       if (remove)
310310d565efSmrg 	{
310410d565efSmrg 	  remove_edge (e);
310510d565efSmrg 	  df_set_bb_dirty (bb);
310610d565efSmrg 	  purged = true;
310710d565efSmrg 	}
310810d565efSmrg       else
310910d565efSmrg 	ei_next (&ei);
311010d565efSmrg     }
311110d565efSmrg 
311210d565efSmrg   if (JUMP_P (insn))
311310d565efSmrg     {
311410d565efSmrg       rtx note;
311510d565efSmrg       edge b,f;
311610d565efSmrg       edge_iterator ei;
311710d565efSmrg 
311810d565efSmrg       /* We do care only about conditional jumps and simplejumps.  */
311910d565efSmrg       if (!any_condjump_p (insn)
312010d565efSmrg 	  && !returnjump_p (insn)
312110d565efSmrg 	  && !simplejump_p (insn))
312210d565efSmrg 	return purged;
312310d565efSmrg 
312410d565efSmrg       /* Branch probability/prediction notes are defined only for
312510d565efSmrg 	 condjumps.  We've possibly turned condjump into simplejump.  */
312610d565efSmrg       if (simplejump_p (insn))
312710d565efSmrg 	{
312810d565efSmrg 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
312910d565efSmrg 	  if (note)
313010d565efSmrg 	    remove_note (insn, note);
313110d565efSmrg 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
313210d565efSmrg 	    remove_note (insn, note);
313310d565efSmrg 	}
313410d565efSmrg 
313510d565efSmrg       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
313610d565efSmrg 	{
313710d565efSmrg 	  /* Avoid abnormal flags to leak from computed jumps turned
313810d565efSmrg 	     into simplejumps.  */
313910d565efSmrg 
314010d565efSmrg 	  e->flags &= ~EDGE_ABNORMAL;
314110d565efSmrg 
314210d565efSmrg 	  /* See if this edge is one we should keep.  */
314310d565efSmrg 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
314410d565efSmrg 	    /* A conditional jump can fall through into the next
314510d565efSmrg 	       block, so we should keep the edge.  */
314610d565efSmrg 	    {
314710d565efSmrg 	      ei_next (&ei);
314810d565efSmrg 	      continue;
314910d565efSmrg 	    }
315010d565efSmrg 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
315110d565efSmrg 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
315210d565efSmrg 	    /* If the destination block is the target of the jump,
315310d565efSmrg 	       keep the edge.  */
315410d565efSmrg 	    {
315510d565efSmrg 	      ei_next (&ei);
315610d565efSmrg 	      continue;
315710d565efSmrg 	    }
315810d565efSmrg 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
315910d565efSmrg 		   && returnjump_p (insn))
316010d565efSmrg 	    /* If the destination block is the exit block, and this
316110d565efSmrg 	       instruction is a return, then keep the edge.  */
316210d565efSmrg 	    {
316310d565efSmrg 	      ei_next (&ei);
316410d565efSmrg 	      continue;
316510d565efSmrg 	    }
316610d565efSmrg 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
316710d565efSmrg 	    /* Keep the edges that correspond to exceptions thrown by
316810d565efSmrg 	       this instruction and rematerialize the EDGE_ABNORMAL
316910d565efSmrg 	       flag we just cleared above.  */
317010d565efSmrg 	    {
317110d565efSmrg 	      e->flags |= EDGE_ABNORMAL;
317210d565efSmrg 	      ei_next (&ei);
317310d565efSmrg 	      continue;
317410d565efSmrg 	    }
317510d565efSmrg 
317610d565efSmrg 	  /* We do not need this edge.  */
317710d565efSmrg 	  df_set_bb_dirty (bb);
317810d565efSmrg 	  purged = true;
317910d565efSmrg 	  remove_edge (e);
318010d565efSmrg 	}
318110d565efSmrg 
318210d565efSmrg       if (EDGE_COUNT (bb->succs) == 0 || !purged)
318310d565efSmrg 	return purged;
318410d565efSmrg 
318510d565efSmrg       if (dump_file)
318610d565efSmrg 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
318710d565efSmrg 
318810d565efSmrg       if (!optimize)
318910d565efSmrg 	return purged;
319010d565efSmrg 
319110d565efSmrg       /* Redistribute probabilities.  */
319210d565efSmrg       if (single_succ_p (bb))
319310d565efSmrg 	{
3194c7a68eb7Smrg 	  single_succ_edge (bb)->probability = profile_probability::always ();
319510d565efSmrg 	}
319610d565efSmrg       else
319710d565efSmrg 	{
319810d565efSmrg 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
319910d565efSmrg 	  if (!note)
320010d565efSmrg 	    return purged;
320110d565efSmrg 
320210d565efSmrg 	  b = BRANCH_EDGE (bb);
320310d565efSmrg 	  f = FALLTHRU_EDGE (bb);
3204c7a68eb7Smrg 	  b->probability = profile_probability::from_reg_br_prob_note
3205c7a68eb7Smrg 					 (XINT (note, 0));
3206c7a68eb7Smrg 	  f->probability = b->probability.invert ();
320710d565efSmrg 	}
320810d565efSmrg 
320910d565efSmrg       return purged;
321010d565efSmrg     }
321110d565efSmrg   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
321210d565efSmrg     {
321310d565efSmrg       /* First, there should not be any EH or ABCALL edges resulting
321410d565efSmrg 	 from non-local gotos and the like.  If there were, we shouldn't
321510d565efSmrg 	 have created the sibcall in the first place.  Second, there
321610d565efSmrg 	 should of course never have been a fallthru edge.  */
321710d565efSmrg       gcc_assert (single_succ_p (bb));
321810d565efSmrg       gcc_assert (single_succ_edge (bb)->flags
321910d565efSmrg 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
322010d565efSmrg 
322110d565efSmrg       return 0;
322210d565efSmrg     }
322310d565efSmrg 
322410d565efSmrg   /* If we don't see a jump insn, we don't know exactly why the block would
322510d565efSmrg      have been broken at this point.  Look for a simple, non-fallthru edge,
322610d565efSmrg      as these are only created by conditional branches.  If we find such an
322710d565efSmrg      edge we know that there used to be a jump here and can then safely
322810d565efSmrg      remove all non-fallthru edges.  */
322910d565efSmrg   found = false;
323010d565efSmrg   FOR_EACH_EDGE (e, ei, bb->succs)
323110d565efSmrg     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
323210d565efSmrg       {
323310d565efSmrg 	found = true;
323410d565efSmrg 	break;
323510d565efSmrg       }
323610d565efSmrg 
323710d565efSmrg   if (!found)
323810d565efSmrg     return purged;
323910d565efSmrg 
324010d565efSmrg   /* Remove all but the fake and fallthru edges.  The fake edge may be
324110d565efSmrg      the only successor for this block in the case of noreturn
324210d565efSmrg      calls.  */
324310d565efSmrg   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
324410d565efSmrg     {
324510d565efSmrg       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
324610d565efSmrg 	{
324710d565efSmrg 	  df_set_bb_dirty (bb);
324810d565efSmrg 	  remove_edge (e);
324910d565efSmrg 	  purged = true;
325010d565efSmrg 	}
325110d565efSmrg       else
325210d565efSmrg 	ei_next (&ei);
325310d565efSmrg     }
325410d565efSmrg 
325510d565efSmrg   gcc_assert (single_succ_p (bb));
325610d565efSmrg 
3257c7a68eb7Smrg   single_succ_edge (bb)->probability = profile_probability::always ();
325810d565efSmrg 
325910d565efSmrg   if (dump_file)
326010d565efSmrg     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
326110d565efSmrg 	     bb->index);
326210d565efSmrg   return purged;
326310d565efSmrg }
326410d565efSmrg 
326510d565efSmrg /* Search all basic blocks for potentially dead edges and purge them.  Return
326610d565efSmrg    true if some edge has been eliminated.  */
326710d565efSmrg 
326810d565efSmrg bool
326910d565efSmrg purge_all_dead_edges (void)
327010d565efSmrg {
327110d565efSmrg   int purged = false;
327210d565efSmrg   basic_block bb;
327310d565efSmrg 
327410d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
327510d565efSmrg     {
327610d565efSmrg       bool purged_here = purge_dead_edges (bb);
327710d565efSmrg 
327810d565efSmrg       purged |= purged_here;
327910d565efSmrg     }
328010d565efSmrg 
328110d565efSmrg   return purged;
328210d565efSmrg }
328310d565efSmrg 
328410d565efSmrg /* This is used by a few passes that emit some instructions after abnormal
328510d565efSmrg    calls, moving the basic block's end, while they in fact do want to emit
328610d565efSmrg    them on the fallthru edge.  Look for abnormal call edges, find backward
328710d565efSmrg    the call in the block and insert the instructions on the edge instead.
328810d565efSmrg 
328910d565efSmrg    Similarly, handle instructions throwing exceptions internally.
329010d565efSmrg 
329110d565efSmrg    Return true when instructions have been found and inserted on edges.  */
329210d565efSmrg 
329310d565efSmrg bool
329410d565efSmrg fixup_abnormal_edges (void)
329510d565efSmrg {
329610d565efSmrg   bool inserted = false;
329710d565efSmrg   basic_block bb;
329810d565efSmrg 
329910d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
330010d565efSmrg     {
330110d565efSmrg       edge e;
330210d565efSmrg       edge_iterator ei;
330310d565efSmrg 
330410d565efSmrg       /* Look for cases we are interested in - calls or instructions causing
330510d565efSmrg          exceptions.  */
330610d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
330710d565efSmrg 	if ((e->flags & EDGE_ABNORMAL_CALL)
330810d565efSmrg 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
330910d565efSmrg 		== (EDGE_ABNORMAL | EDGE_EH)))
331010d565efSmrg 	  break;
331110d565efSmrg 
331210d565efSmrg       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
331310d565efSmrg 	{
331410d565efSmrg 	  rtx_insn *insn;
331510d565efSmrg 
331610d565efSmrg 	  /* Get past the new insns generated.  Allow notes, as the insns
331710d565efSmrg 	     may be already deleted.  */
331810d565efSmrg 	  insn = BB_END (bb);
331910d565efSmrg 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
332010d565efSmrg 		 && !can_throw_internal (insn)
332110d565efSmrg 		 && insn != BB_HEAD (bb))
332210d565efSmrg 	    insn = PREV_INSN (insn);
332310d565efSmrg 
332410d565efSmrg 	  if (CALL_P (insn) || can_throw_internal (insn))
332510d565efSmrg 	    {
332610d565efSmrg 	      rtx_insn *stop, *next;
332710d565efSmrg 
332810d565efSmrg 	      e = find_fallthru_edge (bb->succs);
332910d565efSmrg 
333010d565efSmrg 	      stop = NEXT_INSN (BB_END (bb));
333110d565efSmrg 	      BB_END (bb) = insn;
333210d565efSmrg 
333310d565efSmrg 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
333410d565efSmrg 		{
333510d565efSmrg 		  next = NEXT_INSN (insn);
333610d565efSmrg 		  if (INSN_P (insn))
333710d565efSmrg 		    {
333810d565efSmrg 		      delete_insn (insn);
333910d565efSmrg 
334010d565efSmrg 		      /* Sometimes there's still the return value USE.
334110d565efSmrg 			 If it's placed after a trapping call (i.e. that
334210d565efSmrg 			 call is the last insn anyway), we have no fallthru
334310d565efSmrg 			 edge.  Simply delete this use and don't try to insert
3344c7a68eb7Smrg 			 on the non-existent edge.
3345c7a68eb7Smrg 			 Similarly, sometimes a call that can throw is
3346c7a68eb7Smrg 			 followed in the source with __builtin_unreachable (),
3347c7a68eb7Smrg 			 meaning that there is UB if the call returns rather
3348c7a68eb7Smrg 			 than throws.  If there weren't any instructions
3349c7a68eb7Smrg 			 following such calls before, supposedly even the ones
3350c7a68eb7Smrg 			 we've deleted aren't significant and can be
3351c7a68eb7Smrg 			 removed.  */
3352c7a68eb7Smrg 		      if (e)
335310d565efSmrg 			{
335410d565efSmrg 			  /* We're not deleting it, we're moving it.  */
335510d565efSmrg 			  insn->set_undeleted ();
335610d565efSmrg 			  SET_PREV_INSN (insn) = NULL_RTX;
335710d565efSmrg 			  SET_NEXT_INSN (insn) = NULL_RTX;
335810d565efSmrg 
335910d565efSmrg 			  insert_insn_on_edge (insn, e);
336010d565efSmrg 			  inserted = true;
336110d565efSmrg 			}
336210d565efSmrg 		    }
336310d565efSmrg 		  else if (!BARRIER_P (insn))
336410d565efSmrg 		    set_block_for_insn (insn, NULL);
336510d565efSmrg 		}
336610d565efSmrg 	    }
336710d565efSmrg 
336810d565efSmrg 	  /* It may be that we don't find any trapping insn.  In this
336910d565efSmrg 	     case we discovered quite late that the insn that had been
337010d565efSmrg 	     marked as can_throw_internal in fact couldn't trap at all.
337110d565efSmrg 	     So we should in fact delete the EH edges out of the block.  */
337210d565efSmrg 	  else
337310d565efSmrg 	    purge_dead_edges (bb);
337410d565efSmrg 	}
337510d565efSmrg     }
337610d565efSmrg 
337710d565efSmrg   return inserted;
337810d565efSmrg }
337910d565efSmrg 
338010d565efSmrg /* Cut the insns from FIRST to LAST out of the insns stream.  */
338110d565efSmrg 
338210d565efSmrg rtx_insn *
338310d565efSmrg unlink_insn_chain (rtx_insn *first, rtx_insn *last)
338410d565efSmrg {
338510d565efSmrg   rtx_insn *prevfirst = PREV_INSN (first);
338610d565efSmrg   rtx_insn *nextlast = NEXT_INSN (last);
338710d565efSmrg 
338810d565efSmrg   SET_PREV_INSN (first) = NULL;
338910d565efSmrg   SET_NEXT_INSN (last) = NULL;
339010d565efSmrg   if (prevfirst)
339110d565efSmrg     SET_NEXT_INSN (prevfirst) = nextlast;
339210d565efSmrg   if (nextlast)
339310d565efSmrg     SET_PREV_INSN (nextlast) = prevfirst;
339410d565efSmrg   else
339510d565efSmrg     set_last_insn (prevfirst);
339610d565efSmrg   if (!prevfirst)
339710d565efSmrg     set_first_insn (nextlast);
339810d565efSmrg   return first;
339910d565efSmrg }
340010d565efSmrg 
340110d565efSmrg /* Skip over inter-block insns occurring after BB which are typically
340210d565efSmrg    associated with BB (e.g., barriers). If there are any such insns,
340310d565efSmrg    we return the last one. Otherwise, we return the end of BB.  */
340410d565efSmrg 
340510d565efSmrg static rtx_insn *
340610d565efSmrg skip_insns_after_block (basic_block bb)
340710d565efSmrg {
340810d565efSmrg   rtx_insn *insn, *last_insn, *next_head, *prev;
340910d565efSmrg 
341010d565efSmrg   next_head = NULL;
341110d565efSmrg   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
341210d565efSmrg     next_head = BB_HEAD (bb->next_bb);
341310d565efSmrg 
341410d565efSmrg   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
341510d565efSmrg     {
341610d565efSmrg       if (insn == next_head)
341710d565efSmrg 	break;
341810d565efSmrg 
341910d565efSmrg       switch (GET_CODE (insn))
342010d565efSmrg 	{
342110d565efSmrg 	case BARRIER:
342210d565efSmrg 	  last_insn = insn;
342310d565efSmrg 	  continue;
342410d565efSmrg 
342510d565efSmrg 	case NOTE:
342610d565efSmrg 	  switch (NOTE_KIND (insn))
342710d565efSmrg 	    {
342810d565efSmrg 	    case NOTE_INSN_BLOCK_END:
342910d565efSmrg 	      gcc_unreachable ();
343010d565efSmrg 	      continue;
343110d565efSmrg 	    default:
343210d565efSmrg 	      continue;
343310d565efSmrg 	      break;
343410d565efSmrg 	    }
343510d565efSmrg 	  break;
343610d565efSmrg 
343710d565efSmrg 	case CODE_LABEL:
343810d565efSmrg 	  if (NEXT_INSN (insn)
343910d565efSmrg 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
344010d565efSmrg 	    {
344110d565efSmrg 	      insn = NEXT_INSN (insn);
344210d565efSmrg 	      last_insn = insn;
344310d565efSmrg 	      continue;
344410d565efSmrg 	    }
344510d565efSmrg 	  break;
344610d565efSmrg 
344710d565efSmrg 	default:
344810d565efSmrg 	  break;
344910d565efSmrg 	}
345010d565efSmrg 
345110d565efSmrg       break;
345210d565efSmrg     }
345310d565efSmrg 
345410d565efSmrg   /* It is possible to hit contradictory sequence.  For instance:
345510d565efSmrg 
345610d565efSmrg      jump_insn
345710d565efSmrg      NOTE_INSN_BLOCK_BEG
345810d565efSmrg      barrier
345910d565efSmrg 
346010d565efSmrg      Where barrier belongs to jump_insn, but the note does not.  This can be
346110d565efSmrg      created by removing the basic block originally following
346210d565efSmrg      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
346310d565efSmrg 
346410d565efSmrg   for (insn = last_insn; insn != BB_END (bb); insn = prev)
346510d565efSmrg     {
346610d565efSmrg       prev = PREV_INSN (insn);
346710d565efSmrg       if (NOTE_P (insn))
346810d565efSmrg 	switch (NOTE_KIND (insn))
346910d565efSmrg 	  {
347010d565efSmrg 	  case NOTE_INSN_BLOCK_END:
347110d565efSmrg 	    gcc_unreachable ();
347210d565efSmrg 	    break;
347310d565efSmrg 	  case NOTE_INSN_DELETED:
347410d565efSmrg 	  case NOTE_INSN_DELETED_LABEL:
347510d565efSmrg 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
347610d565efSmrg 	    continue;
347710d565efSmrg 	  default:
347810d565efSmrg 	    reorder_insns (insn, insn, last_insn);
347910d565efSmrg 	  }
348010d565efSmrg     }
348110d565efSmrg 
348210d565efSmrg   return last_insn;
348310d565efSmrg }
348410d565efSmrg 
348510d565efSmrg /* Locate or create a label for a given basic block.  */
348610d565efSmrg 
348710d565efSmrg static rtx_insn *
348810d565efSmrg label_for_bb (basic_block bb)
348910d565efSmrg {
349010d565efSmrg   rtx_insn *label = BB_HEAD (bb);
349110d565efSmrg 
349210d565efSmrg   if (!LABEL_P (label))
349310d565efSmrg     {
349410d565efSmrg       if (dump_file)
349510d565efSmrg 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
349610d565efSmrg 
349710d565efSmrg       label = block_label (bb);
349810d565efSmrg     }
349910d565efSmrg 
350010d565efSmrg   return label;
350110d565efSmrg }
350210d565efSmrg 
350310d565efSmrg /* Locate the effective beginning and end of the insn chain for each
350410d565efSmrg    block, as defined by skip_insns_after_block above.  */
350510d565efSmrg 
350610d565efSmrg static void
350710d565efSmrg record_effective_endpoints (void)
350810d565efSmrg {
350910d565efSmrg   rtx_insn *next_insn;
351010d565efSmrg   basic_block bb;
351110d565efSmrg   rtx_insn *insn;
351210d565efSmrg 
351310d565efSmrg   for (insn = get_insns ();
351410d565efSmrg        insn
351510d565efSmrg        && NOTE_P (insn)
351610d565efSmrg        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
351710d565efSmrg        insn = NEXT_INSN (insn))
351810d565efSmrg     continue;
351910d565efSmrg   /* No basic blocks at all?  */
352010d565efSmrg   gcc_assert (insn);
352110d565efSmrg 
352210d565efSmrg   if (PREV_INSN (insn))
352310d565efSmrg     cfg_layout_function_header =
352410d565efSmrg 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
352510d565efSmrg   else
352610d565efSmrg     cfg_layout_function_header = NULL;
352710d565efSmrg 
352810d565efSmrg   next_insn = get_insns ();
352910d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
353010d565efSmrg     {
353110d565efSmrg       rtx_insn *end;
353210d565efSmrg 
353310d565efSmrg       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
353410d565efSmrg 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
353510d565efSmrg 						PREV_INSN (BB_HEAD (bb)));
353610d565efSmrg       end = skip_insns_after_block (bb);
353710d565efSmrg       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
353810d565efSmrg 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
353910d565efSmrg       next_insn = NEXT_INSN (BB_END (bb));
354010d565efSmrg     }
354110d565efSmrg 
354210d565efSmrg   cfg_layout_function_footer = next_insn;
354310d565efSmrg   if (cfg_layout_function_footer)
354410d565efSmrg     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
354510d565efSmrg }
354610d565efSmrg 
354710d565efSmrg namespace {
354810d565efSmrg 
354910d565efSmrg const pass_data pass_data_into_cfg_layout_mode =
355010d565efSmrg {
355110d565efSmrg   RTL_PASS, /* type */
355210d565efSmrg   "into_cfglayout", /* name */
355310d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
355410d565efSmrg   TV_CFG, /* tv_id */
355510d565efSmrg   0, /* properties_required */
355610d565efSmrg   PROP_cfglayout, /* properties_provided */
355710d565efSmrg   0, /* properties_destroyed */
355810d565efSmrg   0, /* todo_flags_start */
355910d565efSmrg   0, /* todo_flags_finish */
356010d565efSmrg };
356110d565efSmrg 
356210d565efSmrg class pass_into_cfg_layout_mode : public rtl_opt_pass
356310d565efSmrg {
356410d565efSmrg public:
356510d565efSmrg   pass_into_cfg_layout_mode (gcc::context *ctxt)
356610d565efSmrg     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
356710d565efSmrg   {}
356810d565efSmrg 
356910d565efSmrg   /* opt_pass methods: */
357010d565efSmrg   virtual unsigned int execute (function *)
357110d565efSmrg     {
357210d565efSmrg       cfg_layout_initialize (0);
357310d565efSmrg       return 0;
357410d565efSmrg     }
357510d565efSmrg 
357610d565efSmrg }; // class pass_into_cfg_layout_mode
357710d565efSmrg 
357810d565efSmrg } // anon namespace
357910d565efSmrg 
358010d565efSmrg rtl_opt_pass *
358110d565efSmrg make_pass_into_cfg_layout_mode (gcc::context *ctxt)
358210d565efSmrg {
358310d565efSmrg   return new pass_into_cfg_layout_mode (ctxt);
358410d565efSmrg }
358510d565efSmrg 
358610d565efSmrg namespace {
358710d565efSmrg 
358810d565efSmrg const pass_data pass_data_outof_cfg_layout_mode =
358910d565efSmrg {
359010d565efSmrg   RTL_PASS, /* type */
359110d565efSmrg   "outof_cfglayout", /* name */
359210d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
359310d565efSmrg   TV_CFG, /* tv_id */
359410d565efSmrg   0, /* properties_required */
359510d565efSmrg   0, /* properties_provided */
359610d565efSmrg   PROP_cfglayout, /* properties_destroyed */
359710d565efSmrg   0, /* todo_flags_start */
359810d565efSmrg   0, /* todo_flags_finish */
359910d565efSmrg };
360010d565efSmrg 
360110d565efSmrg class pass_outof_cfg_layout_mode : public rtl_opt_pass
360210d565efSmrg {
360310d565efSmrg public:
360410d565efSmrg   pass_outof_cfg_layout_mode (gcc::context *ctxt)
360510d565efSmrg     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
360610d565efSmrg   {}
360710d565efSmrg 
360810d565efSmrg   /* opt_pass methods: */
360910d565efSmrg   virtual unsigned int execute (function *);
361010d565efSmrg 
361110d565efSmrg }; // class pass_outof_cfg_layout_mode
361210d565efSmrg 
361310d565efSmrg unsigned int
361410d565efSmrg pass_outof_cfg_layout_mode::execute (function *fun)
361510d565efSmrg {
361610d565efSmrg   basic_block bb;
361710d565efSmrg 
361810d565efSmrg   FOR_EACH_BB_FN (bb, fun)
361910d565efSmrg     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
362010d565efSmrg       bb->aux = bb->next_bb;
362110d565efSmrg 
362210d565efSmrg   cfg_layout_finalize ();
362310d565efSmrg 
362410d565efSmrg   return 0;
362510d565efSmrg }
362610d565efSmrg 
362710d565efSmrg } // anon namespace
362810d565efSmrg 
362910d565efSmrg rtl_opt_pass *
363010d565efSmrg make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
363110d565efSmrg {
363210d565efSmrg   return new pass_outof_cfg_layout_mode (ctxt);
363310d565efSmrg }
363410d565efSmrg 
363510d565efSmrg 
363610d565efSmrg /* Link the basic blocks in the correct order, compacting the basic
363710d565efSmrg    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
363810d565efSmrg    function also clears the basic block header and footer fields.
363910d565efSmrg 
364010d565efSmrg    This function is usually called after a pass (e.g. tracer) finishes
364110d565efSmrg    some transformations while in cfglayout mode.  The required sequence
364210d565efSmrg    of the basic blocks is in a linked list along the bb->aux field.
364310d565efSmrg    This functions re-links the basic block prev_bb and next_bb pointers
364410d565efSmrg    accordingly, and it compacts and renumbers the blocks.
364510d565efSmrg 
364610d565efSmrg    FIXME: This currently works only for RTL, but the only RTL-specific
364710d565efSmrg    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
364810d565efSmrg    to GIMPLE a long time ago, but it doesn't relink the basic block
364910d565efSmrg    chain.  It could do that (to give better initial RTL) if this function
365010d565efSmrg    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
365110d565efSmrg 
365210d565efSmrg void
365310d565efSmrg relink_block_chain (bool stay_in_cfglayout_mode)
365410d565efSmrg {
365510d565efSmrg   basic_block bb, prev_bb;
365610d565efSmrg   int index;
365710d565efSmrg 
365810d565efSmrg   /* Maybe dump the re-ordered sequence.  */
365910d565efSmrg   if (dump_file)
366010d565efSmrg     {
366110d565efSmrg       fprintf (dump_file, "Reordered sequence:\n");
366210d565efSmrg       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
366310d565efSmrg 	   NUM_FIXED_BLOCKS;
366410d565efSmrg 	   bb;
366510d565efSmrg 	   bb = (basic_block) bb->aux, index++)
366610d565efSmrg 	{
366710d565efSmrg 	  fprintf (dump_file, " %i ", index);
366810d565efSmrg 	  if (get_bb_original (bb))
366910d565efSmrg 	    fprintf (dump_file, "duplicate of %i ",
367010d565efSmrg 		     get_bb_original (bb)->index);
367110d565efSmrg 	  else if (forwarder_block_p (bb)
367210d565efSmrg 		   && !LABEL_P (BB_HEAD (bb)))
367310d565efSmrg 	    fprintf (dump_file, "compensation ");
367410d565efSmrg 	  else
367510d565efSmrg 	    fprintf (dump_file, "bb %i ", bb->index);
367610d565efSmrg 	}
367710d565efSmrg     }
367810d565efSmrg 
367910d565efSmrg   /* Now reorder the blocks.  */
368010d565efSmrg   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
368110d565efSmrg   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
368210d565efSmrg   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
368310d565efSmrg     {
368410d565efSmrg       bb->prev_bb = prev_bb;
368510d565efSmrg       prev_bb->next_bb = bb;
368610d565efSmrg     }
368710d565efSmrg   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
368810d565efSmrg   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
368910d565efSmrg 
369010d565efSmrg   /* Then, clean up the aux fields.  */
369110d565efSmrg   FOR_ALL_BB_FN (bb, cfun)
369210d565efSmrg     {
369310d565efSmrg       bb->aux = NULL;
369410d565efSmrg       if (!stay_in_cfglayout_mode)
369510d565efSmrg 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
369610d565efSmrg     }
369710d565efSmrg 
369810d565efSmrg   /* Maybe reset the original copy tables, they are not valid anymore
369910d565efSmrg      when we renumber the basic blocks in compact_blocks.  If we are
370010d565efSmrg      are going out of cfglayout mode, don't re-allocate the tables.  */
370110d565efSmrg   if (original_copy_tables_initialized_p ())
370210d565efSmrg     free_original_copy_tables ();
370310d565efSmrg   if (stay_in_cfglayout_mode)
370410d565efSmrg     initialize_original_copy_tables ();
370510d565efSmrg 
370610d565efSmrg   /* Finally, put basic_block_info in the new order.  */
370710d565efSmrg   compact_blocks ();
370810d565efSmrg }
370910d565efSmrg 
371010d565efSmrg 
371110d565efSmrg /* Given a reorder chain, rearrange the code to match.  */
371210d565efSmrg 
371310d565efSmrg static void
371410d565efSmrg fixup_reorder_chain (void)
371510d565efSmrg {
371610d565efSmrg   basic_block bb;
371710d565efSmrg   rtx_insn *insn = NULL;
371810d565efSmrg 
371910d565efSmrg   if (cfg_layout_function_header)
372010d565efSmrg     {
372110d565efSmrg       set_first_insn (cfg_layout_function_header);
372210d565efSmrg       insn = cfg_layout_function_header;
372310d565efSmrg       while (NEXT_INSN (insn))
372410d565efSmrg 	insn = NEXT_INSN (insn);
372510d565efSmrg     }
372610d565efSmrg 
372710d565efSmrg   /* First do the bulk reordering -- rechain the blocks without regard to
372810d565efSmrg      the needed changes to jumps and labels.  */
372910d565efSmrg 
373010d565efSmrg   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
373110d565efSmrg        bb->aux)
373210d565efSmrg     {
373310d565efSmrg       if (BB_HEADER (bb))
373410d565efSmrg 	{
373510d565efSmrg 	  if (insn)
373610d565efSmrg 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
373710d565efSmrg 	  else
373810d565efSmrg 	    set_first_insn (BB_HEADER (bb));
373910d565efSmrg 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
374010d565efSmrg 	  insn = BB_HEADER (bb);
374110d565efSmrg 	  while (NEXT_INSN (insn))
374210d565efSmrg 	    insn = NEXT_INSN (insn);
374310d565efSmrg 	}
374410d565efSmrg       if (insn)
374510d565efSmrg 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
374610d565efSmrg       else
374710d565efSmrg 	set_first_insn (BB_HEAD (bb));
374810d565efSmrg       SET_PREV_INSN (BB_HEAD (bb)) = insn;
374910d565efSmrg       insn = BB_END (bb);
375010d565efSmrg       if (BB_FOOTER (bb))
375110d565efSmrg 	{
375210d565efSmrg 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
375310d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
375410d565efSmrg 	  while (NEXT_INSN (insn))
375510d565efSmrg 	    insn = NEXT_INSN (insn);
375610d565efSmrg 	}
375710d565efSmrg     }
375810d565efSmrg 
375910d565efSmrg   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
376010d565efSmrg   if (cfg_layout_function_footer)
376110d565efSmrg     SET_PREV_INSN (cfg_layout_function_footer) = insn;
376210d565efSmrg 
376310d565efSmrg   while (NEXT_INSN (insn))
376410d565efSmrg     insn = NEXT_INSN (insn);
376510d565efSmrg 
376610d565efSmrg   set_last_insn (insn);
376710d565efSmrg   if (flag_checking)
376810d565efSmrg     verify_insn_chain ();
376910d565efSmrg 
377010d565efSmrg   /* Now add jumps and labels as needed to match the blocks new
377110d565efSmrg      outgoing edges.  */
377210d565efSmrg 
377310d565efSmrg   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
377410d565efSmrg        bb->aux)
377510d565efSmrg     {
377610d565efSmrg       edge e_fall, e_taken, e;
377710d565efSmrg       rtx_insn *bb_end_insn;
377810d565efSmrg       rtx ret_label = NULL_RTX;
377910d565efSmrg       basic_block nb;
378010d565efSmrg       edge_iterator ei;
378110d565efSmrg 
378210d565efSmrg       if (EDGE_COUNT (bb->succs) == 0)
378310d565efSmrg 	continue;
378410d565efSmrg 
378510d565efSmrg       /* Find the old fallthru edge, and another non-EH edge for
378610d565efSmrg 	 a taken jump.  */
378710d565efSmrg       e_taken = e_fall = NULL;
378810d565efSmrg 
378910d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
379010d565efSmrg 	if (e->flags & EDGE_FALLTHRU)
379110d565efSmrg 	  e_fall = e;
379210d565efSmrg 	else if (! (e->flags & EDGE_EH))
379310d565efSmrg 	  e_taken = e;
379410d565efSmrg 
379510d565efSmrg       bb_end_insn = BB_END (bb);
379610d565efSmrg       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
379710d565efSmrg 	{
379810d565efSmrg 	  ret_label = JUMP_LABEL (bb_end_jump);
379910d565efSmrg 	  if (any_condjump_p (bb_end_jump))
380010d565efSmrg 	    {
380110d565efSmrg 	      /* This might happen if the conditional jump has side
380210d565efSmrg 		 effects and could therefore not be optimized away.
380310d565efSmrg 		 Make the basic block to end with a barrier in order
380410d565efSmrg 		 to prevent rtl_verify_flow_info from complaining.  */
380510d565efSmrg 	      if (!e_fall)
380610d565efSmrg 		{
380710d565efSmrg 		  gcc_assert (!onlyjump_p (bb_end_jump)
380810d565efSmrg 			      || returnjump_p (bb_end_jump)
380910d565efSmrg                               || (e_taken->flags & EDGE_CROSSING));
381010d565efSmrg 		  emit_barrier_after (bb_end_jump);
381110d565efSmrg 		  continue;
381210d565efSmrg 		}
381310d565efSmrg 
381410d565efSmrg 	      /* If the old fallthru is still next, nothing to do.  */
381510d565efSmrg 	      if (bb->aux == e_fall->dest
381610d565efSmrg 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
381710d565efSmrg 		continue;
381810d565efSmrg 
381910d565efSmrg 	      /* The degenerated case of conditional jump jumping to the next
382010d565efSmrg 		 instruction can happen for jumps with side effects.  We need
382110d565efSmrg 		 to construct a forwarder block and this will be done just
382210d565efSmrg 		 fine by force_nonfallthru below.  */
382310d565efSmrg 	      if (!e_taken)
382410d565efSmrg 		;
382510d565efSmrg 
382610d565efSmrg 	      /* There is another special case: if *neither* block is next,
382710d565efSmrg 		 such as happens at the very end of a function, then we'll
382810d565efSmrg 		 need to add a new unconditional jump.  Choose the taken
382910d565efSmrg 		 edge based on known or assumed probability.  */
383010d565efSmrg 	      else if (bb->aux != e_taken->dest)
383110d565efSmrg 		{
383210d565efSmrg 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
383310d565efSmrg 
383410d565efSmrg 		  if (note
3835c7a68eb7Smrg 		      && profile_probability::from_reg_br_prob_note
3836c7a68eb7Smrg 				 (XINT (note, 0)) < profile_probability::even ()
383710d565efSmrg 		      && invert_jump (bb_end_jump,
383810d565efSmrg 				      (e_fall->dest
383910d565efSmrg 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
384010d565efSmrg 				       ? NULL_RTX
384110d565efSmrg 				       : label_for_bb (e_fall->dest)), 0))
384210d565efSmrg 		    {
384310d565efSmrg 		      e_fall->flags &= ~EDGE_FALLTHRU;
384410d565efSmrg 		      gcc_checking_assert (could_fall_through
384510d565efSmrg 					   (e_taken->src, e_taken->dest));
384610d565efSmrg 		      e_taken->flags |= EDGE_FALLTHRU;
384710d565efSmrg 		      update_br_prob_note (bb);
384810d565efSmrg 		      e = e_fall, e_fall = e_taken, e_taken = e;
384910d565efSmrg 		    }
385010d565efSmrg 		}
385110d565efSmrg 
385210d565efSmrg 	      /* If the "jumping" edge is a crossing edge, and the fall
385310d565efSmrg 		 through edge is non-crossing, leave things as they are.  */
385410d565efSmrg 	      else if ((e_taken->flags & EDGE_CROSSING)
385510d565efSmrg 		       && !(e_fall->flags & EDGE_CROSSING))
385610d565efSmrg 		continue;
385710d565efSmrg 
385810d565efSmrg 	      /* Otherwise we can try to invert the jump.  This will
385910d565efSmrg 		 basically never fail, however, keep up the pretense.  */
386010d565efSmrg 	      else if (invert_jump (bb_end_jump,
386110d565efSmrg 				    (e_fall->dest
386210d565efSmrg 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
386310d565efSmrg 				     ? NULL_RTX
386410d565efSmrg 				     : label_for_bb (e_fall->dest)), 0))
386510d565efSmrg 		{
386610d565efSmrg 		  e_fall->flags &= ~EDGE_FALLTHRU;
386710d565efSmrg 		  gcc_checking_assert (could_fall_through
386810d565efSmrg 				       (e_taken->src, e_taken->dest));
386910d565efSmrg 		  e_taken->flags |= EDGE_FALLTHRU;
387010d565efSmrg 		  update_br_prob_note (bb);
387110d565efSmrg 		  if (LABEL_NUSES (ret_label) == 0
387210d565efSmrg 		      && single_pred_p (e_taken->dest))
387310d565efSmrg 		    delete_insn (as_a<rtx_insn *> (ret_label));
387410d565efSmrg 		  continue;
387510d565efSmrg 		}
387610d565efSmrg 	    }
387710d565efSmrg 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
387810d565efSmrg 	    {
387910d565efSmrg 	      /* If the old fallthru is still next or if
388010d565efSmrg 		 asm goto doesn't have a fallthru (e.g. when followed by
388110d565efSmrg 		 __builtin_unreachable ()), nothing to do.  */
388210d565efSmrg 	      if (! e_fall
388310d565efSmrg 		  || bb->aux == e_fall->dest
388410d565efSmrg 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
388510d565efSmrg 		continue;
388610d565efSmrg 
388710d565efSmrg 	      /* Otherwise we'll have to use the fallthru fixup below.  */
388810d565efSmrg 	    }
388910d565efSmrg 	  else
389010d565efSmrg 	    {
389110d565efSmrg 	      /* Otherwise we have some return, switch or computed
389210d565efSmrg 		 jump.  In the 99% case, there should not have been a
389310d565efSmrg 		 fallthru edge.  */
389410d565efSmrg 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
389510d565efSmrg 	      continue;
389610d565efSmrg 	    }
389710d565efSmrg 	}
389810d565efSmrg       else
389910d565efSmrg 	{
390010d565efSmrg 	  /* No fallthru implies a noreturn function with EH edges, or
390110d565efSmrg 	     something similarly bizarre.  In any case, we don't need to
390210d565efSmrg 	     do anything.  */
390310d565efSmrg 	  if (! e_fall)
390410d565efSmrg 	    continue;
390510d565efSmrg 
390610d565efSmrg 	  /* If the fallthru block is still next, nothing to do.  */
390710d565efSmrg 	  if (bb->aux == e_fall->dest)
390810d565efSmrg 	    continue;
390910d565efSmrg 
391010d565efSmrg 	  /* A fallthru to exit block.  */
391110d565efSmrg 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
391210d565efSmrg 	    continue;
391310d565efSmrg 	}
391410d565efSmrg 
391510d565efSmrg       /* We got here if we need to add a new jump insn.
391610d565efSmrg 	 Note force_nonfallthru can delete E_FALL and thus we have to
391710d565efSmrg 	 save E_FALL->src prior to the call to force_nonfallthru.  */
391810d565efSmrg       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
391910d565efSmrg       if (nb)
392010d565efSmrg 	{
392110d565efSmrg 	  nb->aux = bb->aux;
392210d565efSmrg 	  bb->aux = nb;
392310d565efSmrg 	  /* Don't process this new block.  */
392410d565efSmrg 	  bb = nb;
392510d565efSmrg 	}
392610d565efSmrg     }
392710d565efSmrg 
392810d565efSmrg   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
392910d565efSmrg 
393010d565efSmrg   /* Annoying special case - jump around dead jumptables left in the code.  */
393110d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
393210d565efSmrg     {
393310d565efSmrg       edge e = find_fallthru_edge (bb->succs);
393410d565efSmrg 
393510d565efSmrg       if (e && !can_fallthru (e->src, e->dest))
393610d565efSmrg 	force_nonfallthru (e);
393710d565efSmrg     }
393810d565efSmrg 
3939*0fc04c29Smrg   /* Ensure goto_locus from edges has some instructions with that locus in RTL
3940*0fc04c29Smrg      when not optimizing.  */
3941*0fc04c29Smrg   if (!optimize && !DECL_IGNORED_P (current_function_decl))
394210d565efSmrg     FOR_EACH_BB_FN (bb, cfun)
394310d565efSmrg       {
394410d565efSmrg         edge e;
394510d565efSmrg         edge_iterator ei;
394610d565efSmrg 
394710d565efSmrg         FOR_EACH_EDGE (e, ei, bb->succs)
394810d565efSmrg 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
394910d565efSmrg 	      && !(e->flags & EDGE_ABNORMAL))
395010d565efSmrg 	    {
395110d565efSmrg 	      edge e2;
395210d565efSmrg 	      edge_iterator ei2;
395310d565efSmrg 	      basic_block dest, nb;
395410d565efSmrg 	      rtx_insn *end;
395510d565efSmrg 
395610d565efSmrg 	      insn = BB_END (e->src);
395710d565efSmrg 	      end = PREV_INSN (BB_HEAD (e->src));
395810d565efSmrg 	      while (insn != end
395910d565efSmrg 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
396010d565efSmrg 		insn = PREV_INSN (insn);
396110d565efSmrg 	      if (insn != end
396210d565efSmrg 		  && INSN_LOCATION (insn) == e->goto_locus)
396310d565efSmrg 		continue;
396410d565efSmrg 	      if (simplejump_p (BB_END (e->src))
396510d565efSmrg 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
396610d565efSmrg 		{
396710d565efSmrg 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
396810d565efSmrg 		  continue;
396910d565efSmrg 		}
397010d565efSmrg 	      dest = e->dest;
397110d565efSmrg 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
397210d565efSmrg 		{
397310d565efSmrg 		  /* Non-fallthru edges to the exit block cannot be split.  */
397410d565efSmrg 		  if (!(e->flags & EDGE_FALLTHRU))
397510d565efSmrg 		    continue;
397610d565efSmrg 		}
397710d565efSmrg 	      else
397810d565efSmrg 		{
397910d565efSmrg 		  insn = BB_HEAD (dest);
398010d565efSmrg 		  end = NEXT_INSN (BB_END (dest));
398110d565efSmrg 		  while (insn != end && !NONDEBUG_INSN_P (insn))
398210d565efSmrg 		    insn = NEXT_INSN (insn);
398310d565efSmrg 		  if (insn != end && INSN_HAS_LOCATION (insn)
398410d565efSmrg 		      && INSN_LOCATION (insn) == e->goto_locus)
398510d565efSmrg 		    continue;
398610d565efSmrg 		}
398710d565efSmrg 	      nb = split_edge (e);
398810d565efSmrg 	      if (!INSN_P (BB_END (nb)))
398910d565efSmrg 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
399010d565efSmrg 							 nb);
399110d565efSmrg 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
399210d565efSmrg 
399310d565efSmrg 	      /* If there are other incoming edges to the destination block
399410d565efSmrg 		 with the same goto locus, redirect them to the new block as
399510d565efSmrg 		 well, this can prevent other such blocks from being created
399610d565efSmrg 		 in subsequent iterations of the loop.  */
399710d565efSmrg 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
399810d565efSmrg 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
399910d565efSmrg 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
400010d565efSmrg 		    && e->goto_locus == e2->goto_locus)
400110d565efSmrg 		  redirect_edge_and_branch (e2, nb);
400210d565efSmrg 		else
400310d565efSmrg 		  ei_next (&ei2);
400410d565efSmrg 	    }
400510d565efSmrg       }
400610d565efSmrg }
400710d565efSmrg 
400810d565efSmrg /* Perform sanity checks on the insn chain.
400910d565efSmrg    1. Check that next/prev pointers are consistent in both the forward and
401010d565efSmrg       reverse direction.
401110d565efSmrg    2. Count insns in chain, going both directions, and check if equal.
401210d565efSmrg    3. Check that get_last_insn () returns the actual end of chain.  */
401310d565efSmrg 
401410d565efSmrg DEBUG_FUNCTION void
401510d565efSmrg verify_insn_chain (void)
401610d565efSmrg {
401710d565efSmrg   rtx_insn *x, *prevx, *nextx;
401810d565efSmrg   int insn_cnt1, insn_cnt2;
401910d565efSmrg 
402010d565efSmrg   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
402110d565efSmrg        x != 0;
402210d565efSmrg        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
402310d565efSmrg     gcc_assert (PREV_INSN (x) == prevx);
402410d565efSmrg 
402510d565efSmrg   gcc_assert (prevx == get_last_insn ());
402610d565efSmrg 
402710d565efSmrg   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
402810d565efSmrg        x != 0;
402910d565efSmrg        nextx = x, insn_cnt2++, x = PREV_INSN (x))
403010d565efSmrg     gcc_assert (NEXT_INSN (x) == nextx);
403110d565efSmrg 
403210d565efSmrg   gcc_assert (insn_cnt1 == insn_cnt2);
403310d565efSmrg }
403410d565efSmrg 
403510d565efSmrg /* If we have assembler epilogues, the block falling through to exit must
403610d565efSmrg    be the last one in the reordered chain when we reach final.  Ensure
403710d565efSmrg    that this condition is met.  */
403810d565efSmrg static void
403910d565efSmrg fixup_fallthru_exit_predecessor (void)
404010d565efSmrg {
404110d565efSmrg   edge e;
404210d565efSmrg   basic_block bb = NULL;
404310d565efSmrg 
404410d565efSmrg   /* This transformation is not valid before reload, because we might
404510d565efSmrg      separate a call from the instruction that copies the return
404610d565efSmrg      value.  */
404710d565efSmrg   gcc_assert (reload_completed);
404810d565efSmrg 
404910d565efSmrg   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
405010d565efSmrg   if (e)
405110d565efSmrg     bb = e->src;
405210d565efSmrg 
405310d565efSmrg   if (bb && bb->aux)
405410d565efSmrg     {
405510d565efSmrg       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
405610d565efSmrg 
405710d565efSmrg       /* If the very first block is the one with the fall-through exit
405810d565efSmrg 	 edge, we have to split that block.  */
405910d565efSmrg       if (c == bb)
406010d565efSmrg 	{
406110d565efSmrg 	  bb = split_block_after_labels (bb)->dest;
406210d565efSmrg 	  bb->aux = c->aux;
406310d565efSmrg 	  c->aux = bb;
406410d565efSmrg 	  BB_FOOTER (bb) = BB_FOOTER (c);
406510d565efSmrg 	  BB_FOOTER (c) = NULL;
406610d565efSmrg 	}
406710d565efSmrg 
406810d565efSmrg       while (c->aux != bb)
406910d565efSmrg 	c = (basic_block) c->aux;
407010d565efSmrg 
407110d565efSmrg       c->aux = bb->aux;
407210d565efSmrg       while (c->aux)
407310d565efSmrg 	c = (basic_block) c->aux;
407410d565efSmrg 
407510d565efSmrg       c->aux = bb;
407610d565efSmrg       bb->aux = NULL;
407710d565efSmrg     }
407810d565efSmrg }
407910d565efSmrg 
408010d565efSmrg /* In case there are more than one fallthru predecessors of exit, force that
408110d565efSmrg    there is only one.  */
408210d565efSmrg 
408310d565efSmrg static void
408410d565efSmrg force_one_exit_fallthru (void)
408510d565efSmrg {
408610d565efSmrg   edge e, predecessor = NULL;
408710d565efSmrg   bool more = false;
408810d565efSmrg   edge_iterator ei;
408910d565efSmrg   basic_block forwarder, bb;
409010d565efSmrg 
409110d565efSmrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
409210d565efSmrg     if (e->flags & EDGE_FALLTHRU)
409310d565efSmrg       {
409410d565efSmrg 	if (predecessor == NULL)
409510d565efSmrg 	  predecessor = e;
409610d565efSmrg 	else
409710d565efSmrg 	  {
409810d565efSmrg 	    more = true;
409910d565efSmrg 	    break;
410010d565efSmrg 	  }
410110d565efSmrg       }
410210d565efSmrg 
410310d565efSmrg   if (!more)
410410d565efSmrg     return;
410510d565efSmrg 
410610d565efSmrg   /* Exit has several fallthru predecessors.  Create a forwarder block for
410710d565efSmrg      them.  */
410810d565efSmrg   forwarder = split_edge (predecessor);
410910d565efSmrg   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
411010d565efSmrg        (e = ei_safe_edge (ei)); )
411110d565efSmrg     {
411210d565efSmrg       if (e->src == forwarder
411310d565efSmrg 	  || !(e->flags & EDGE_FALLTHRU))
411410d565efSmrg 	ei_next (&ei);
411510d565efSmrg       else
411610d565efSmrg 	redirect_edge_and_branch_force (e, forwarder);
411710d565efSmrg     }
411810d565efSmrg 
411910d565efSmrg   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
412010d565efSmrg      exit block.  */
412110d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
412210d565efSmrg     {
412310d565efSmrg       if (bb->aux == NULL && bb != forwarder)
412410d565efSmrg 	{
412510d565efSmrg 	  bb->aux = forwarder;
412610d565efSmrg 	  break;
412710d565efSmrg 	}
412810d565efSmrg     }
412910d565efSmrg }
413010d565efSmrg 
413110d565efSmrg /* Return true in case it is possible to duplicate the basic block BB.  */
413210d565efSmrg 
413310d565efSmrg static bool
413410d565efSmrg cfg_layout_can_duplicate_bb_p (const_basic_block bb)
413510d565efSmrg {
413610d565efSmrg   /* Do not attempt to duplicate tablejumps, as we need to unshare
413710d565efSmrg      the dispatch table.  This is difficult to do, as the instructions
413810d565efSmrg      computing jump destination may be hoisted outside the basic block.  */
413910d565efSmrg   if (tablejump_p (BB_END (bb), NULL, NULL))
414010d565efSmrg     return false;
414110d565efSmrg 
414210d565efSmrg   /* Do not duplicate blocks containing insns that can't be copied.  */
414310d565efSmrg   if (targetm.cannot_copy_insn_p)
414410d565efSmrg     {
414510d565efSmrg       rtx_insn *insn = BB_HEAD (bb);
414610d565efSmrg       while (1)
414710d565efSmrg 	{
414810d565efSmrg 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
414910d565efSmrg 	    return false;
415010d565efSmrg 	  if (insn == BB_END (bb))
415110d565efSmrg 	    break;
415210d565efSmrg 	  insn = NEXT_INSN (insn);
415310d565efSmrg 	}
415410d565efSmrg     }
415510d565efSmrg 
415610d565efSmrg   return true;
415710d565efSmrg }
415810d565efSmrg 
415910d565efSmrg rtx_insn *
416010d565efSmrg duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
416110d565efSmrg {
416210d565efSmrg   rtx_insn *insn, *next, *copy;
416310d565efSmrg   rtx_note *last;
416410d565efSmrg 
416510d565efSmrg   /* Avoid updating of boundaries of previous basic block.  The
416610d565efSmrg      note will get removed from insn stream in fixup.  */
416710d565efSmrg   last = emit_note (NOTE_INSN_DELETED);
416810d565efSmrg 
416910d565efSmrg   /* Create copy at the end of INSN chain.  The chain will
417010d565efSmrg      be reordered later.  */
417110d565efSmrg   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
417210d565efSmrg     {
417310d565efSmrg       switch (GET_CODE (insn))
417410d565efSmrg 	{
417510d565efSmrg 	case DEBUG_INSN:
417610d565efSmrg 	  /* Don't duplicate label debug insns.  */
4177c7a68eb7Smrg 	  if (DEBUG_BIND_INSN_P (insn)
4178c7a68eb7Smrg 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
417910d565efSmrg 	    break;
418010d565efSmrg 	  /* FALLTHRU */
418110d565efSmrg 	case INSN:
418210d565efSmrg 	case CALL_INSN:
418310d565efSmrg 	case JUMP_INSN:
418410d565efSmrg 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
418510d565efSmrg 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
418610d565efSmrg 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
418710d565efSmrg 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
418810d565efSmrg           maybe_copy_prologue_epilogue_insn (insn, copy);
418910d565efSmrg 	  break;
419010d565efSmrg 
419110d565efSmrg 	case JUMP_TABLE_DATA:
419210d565efSmrg 	  /* Avoid copying of dispatch tables.  We never duplicate
419310d565efSmrg 	     tablejumps, so this can hit only in case the table got
419410d565efSmrg 	     moved far from original jump.
419510d565efSmrg 	     Avoid copying following barrier as well if any
419610d565efSmrg 	     (and debug insns in between).  */
419710d565efSmrg 	  for (next = NEXT_INSN (insn);
419810d565efSmrg 	       next != NEXT_INSN (to);
419910d565efSmrg 	       next = NEXT_INSN (next))
420010d565efSmrg 	    if (!DEBUG_INSN_P (next))
420110d565efSmrg 	      break;
420210d565efSmrg 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
420310d565efSmrg 	    insn = next;
420410d565efSmrg 	  break;
420510d565efSmrg 
420610d565efSmrg 	case CODE_LABEL:
420710d565efSmrg 	  break;
420810d565efSmrg 
420910d565efSmrg 	case BARRIER:
421010d565efSmrg 	  emit_barrier ();
421110d565efSmrg 	  break;
421210d565efSmrg 
421310d565efSmrg 	case NOTE:
421410d565efSmrg 	  switch (NOTE_KIND (insn))
421510d565efSmrg 	    {
421610d565efSmrg 	      /* In case prologue is empty and function contain label
421710d565efSmrg 		 in first BB, we may want to copy the block.  */
421810d565efSmrg 	    case NOTE_INSN_PROLOGUE_END:
421910d565efSmrg 
422010d565efSmrg 	    case NOTE_INSN_DELETED:
422110d565efSmrg 	    case NOTE_INSN_DELETED_LABEL:
422210d565efSmrg 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
422310d565efSmrg 	      /* No problem to strip these.  */
422410d565efSmrg 	    case NOTE_INSN_FUNCTION_BEG:
422510d565efSmrg 	      /* There is always just single entry to function.  */
422610d565efSmrg 	    case NOTE_INSN_BASIC_BLOCK:
422710d565efSmrg               /* We should only switch text sections once.  */
422810d565efSmrg 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
422910d565efSmrg 	      break;
423010d565efSmrg 
423110d565efSmrg 	    case NOTE_INSN_EPILOGUE_BEG:
423210d565efSmrg 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
423310d565efSmrg 	      emit_note_copy (as_a <rtx_note *> (insn));
423410d565efSmrg 	      break;
423510d565efSmrg 
423610d565efSmrg 	    default:
423710d565efSmrg 	      /* All other notes should have already been eliminated.  */
423810d565efSmrg 	      gcc_unreachable ();
423910d565efSmrg 	    }
424010d565efSmrg 	  break;
424110d565efSmrg 	default:
424210d565efSmrg 	  gcc_unreachable ();
424310d565efSmrg 	}
424410d565efSmrg     }
424510d565efSmrg   insn = NEXT_INSN (last);
424610d565efSmrg   delete_insn (last);
424710d565efSmrg   return insn;
424810d565efSmrg }
424910d565efSmrg 
425010d565efSmrg /* Create a duplicate of the basic block BB.  */
425110d565efSmrg 
425210d565efSmrg static basic_block
425321ff1670Smrg cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
425410d565efSmrg {
425510d565efSmrg   rtx_insn *insn;
425610d565efSmrg   basic_block new_bb;
425710d565efSmrg 
425810d565efSmrg   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
425910d565efSmrg   new_bb = create_basic_block (insn,
426010d565efSmrg 			       insn ? get_last_insn () : NULL,
426110d565efSmrg 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
426210d565efSmrg 
426310d565efSmrg   BB_COPY_PARTITION (new_bb, bb);
426410d565efSmrg   if (BB_HEADER (bb))
426510d565efSmrg     {
426610d565efSmrg       insn = BB_HEADER (bb);
426710d565efSmrg       while (NEXT_INSN (insn))
426810d565efSmrg 	insn = NEXT_INSN (insn);
426910d565efSmrg       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
427010d565efSmrg       if (insn)
427110d565efSmrg 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
427210d565efSmrg     }
427310d565efSmrg 
427410d565efSmrg   if (BB_FOOTER (bb))
427510d565efSmrg     {
427610d565efSmrg       insn = BB_FOOTER (bb);
427710d565efSmrg       while (NEXT_INSN (insn))
427810d565efSmrg 	insn = NEXT_INSN (insn);
427910d565efSmrg       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
428010d565efSmrg       if (insn)
428110d565efSmrg 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
428210d565efSmrg     }
428310d565efSmrg 
428410d565efSmrg   return new_bb;
428510d565efSmrg }
428610d565efSmrg 
428710d565efSmrg 
428810d565efSmrg /* Main entry point to this module - initialize the datastructures for
428910d565efSmrg    CFG layout changes.  It keeps LOOPS up-to-date if not null.
429010d565efSmrg 
429110d565efSmrg    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
429210d565efSmrg 
429310d565efSmrg void
4294c7a68eb7Smrg cfg_layout_initialize (int flags)
429510d565efSmrg {
429610d565efSmrg   rtx_insn_list *x;
429710d565efSmrg   basic_block bb;
429810d565efSmrg 
429910d565efSmrg   /* Once bb partitioning is complete, cfg layout mode should not be
430010d565efSmrg      re-entered.  Entering cfg layout mode may require fixups.  As an
430110d565efSmrg      example, if edge forwarding performed when optimizing the cfg
430210d565efSmrg      layout required moving a block from the hot to the cold
430310d565efSmrg      section. This would create an illegal partitioning unless some
430410d565efSmrg      manual fixup was performed.  */
4305c7a68eb7Smrg   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
430610d565efSmrg 
430710d565efSmrg   initialize_original_copy_tables ();
430810d565efSmrg 
430910d565efSmrg   cfg_layout_rtl_register_cfg_hooks ();
431010d565efSmrg 
431110d565efSmrg   record_effective_endpoints ();
431210d565efSmrg 
431310d565efSmrg   /* Make sure that the targets of non local gotos are marked.  */
431410d565efSmrg   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
431510d565efSmrg     {
431610d565efSmrg       bb = BLOCK_FOR_INSN (x->insn ());
431710d565efSmrg       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
431810d565efSmrg     }
431910d565efSmrg 
432010d565efSmrg   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
432110d565efSmrg }
432210d565efSmrg 
432310d565efSmrg /* Splits superblocks.  */
432410d565efSmrg void
432510d565efSmrg break_superblocks (void)
432610d565efSmrg {
432710d565efSmrg   bool need = false;
432810d565efSmrg   basic_block bb;
432910d565efSmrg 
433010d565efSmrg   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
433110d565efSmrg   bitmap_clear (superblocks);
433210d565efSmrg 
433310d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
433410d565efSmrg     if (bb->flags & BB_SUPERBLOCK)
433510d565efSmrg       {
433610d565efSmrg 	bb->flags &= ~BB_SUPERBLOCK;
433710d565efSmrg 	bitmap_set_bit (superblocks, bb->index);
433810d565efSmrg 	need = true;
433910d565efSmrg       }
434010d565efSmrg 
434110d565efSmrg   if (need)
434210d565efSmrg     {
434310d565efSmrg       rebuild_jump_labels (get_insns ());
434410d565efSmrg       find_many_sub_basic_blocks (superblocks);
434510d565efSmrg     }
434610d565efSmrg }
434710d565efSmrg 
434810d565efSmrg /* Finalize the changes: reorder insn list according to the sequence specified
434910d565efSmrg    by aux pointers, enter compensation code, rebuild scope forest.  */
435010d565efSmrg 
435110d565efSmrg void
435210d565efSmrg cfg_layout_finalize (void)
435310d565efSmrg {
435410d565efSmrg   free_dominance_info (CDI_DOMINATORS);
435510d565efSmrg   force_one_exit_fallthru ();
435610d565efSmrg   rtl_register_cfg_hooks ();
435710d565efSmrg   if (reload_completed && !targetm.have_epilogue ())
435810d565efSmrg     fixup_fallthru_exit_predecessor ();
435910d565efSmrg   fixup_reorder_chain ();
436010d565efSmrg 
436110d565efSmrg   rebuild_jump_labels (get_insns ());
436210d565efSmrg   delete_dead_jumptables ();
436310d565efSmrg 
436410d565efSmrg   if (flag_checking)
436510d565efSmrg     verify_insn_chain ();
436610d565efSmrg   checking_verify_flow_info ();
436710d565efSmrg }
436810d565efSmrg 
436910d565efSmrg 
437010d565efSmrg /* Same as split_block but update cfg_layout structures.  */
437110d565efSmrg 
437210d565efSmrg static basic_block
437310d565efSmrg cfg_layout_split_block (basic_block bb, void *insnp)
437410d565efSmrg {
437510d565efSmrg   rtx insn = (rtx) insnp;
437610d565efSmrg   basic_block new_bb = rtl_split_block (bb, insn);
437710d565efSmrg 
437810d565efSmrg   BB_FOOTER (new_bb) = BB_FOOTER (bb);
437910d565efSmrg   BB_FOOTER (bb) = NULL;
438010d565efSmrg 
438110d565efSmrg   return new_bb;
438210d565efSmrg }
438310d565efSmrg 
438410d565efSmrg /* Redirect Edge to DEST.  */
438510d565efSmrg static edge
438610d565efSmrg cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
438710d565efSmrg {
438810d565efSmrg   basic_block src = e->src;
438910d565efSmrg   edge ret;
439010d565efSmrg 
439110d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
439210d565efSmrg     return NULL;
439310d565efSmrg 
439410d565efSmrg   if (e->dest == dest)
439510d565efSmrg     return e;
439610d565efSmrg 
4397c7a68eb7Smrg   if (e->flags & EDGE_CROSSING
4398c7a68eb7Smrg       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4399c7a68eb7Smrg       && simplejump_p (BB_END (src)))
4400c7a68eb7Smrg     {
4401c7a68eb7Smrg       if (dump_file)
4402c7a68eb7Smrg 	fprintf (dump_file,
4403c7a68eb7Smrg 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4404c7a68eb7Smrg 		 e->src->index, dest->index);
4405c7a68eb7Smrg       delete_insn (BB_END (src));
4406c7a68eb7Smrg       remove_barriers_from_footer (src);
4407c7a68eb7Smrg       e->flags |= EDGE_FALLTHRU;
4408c7a68eb7Smrg     }
4409c7a68eb7Smrg 
441010d565efSmrg   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
441110d565efSmrg       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
441210d565efSmrg     {
441310d565efSmrg       df_set_bb_dirty (src);
441410d565efSmrg       return ret;
441510d565efSmrg     }
441610d565efSmrg 
441710d565efSmrg   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
441810d565efSmrg       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
441910d565efSmrg     {
442010d565efSmrg       if (dump_file)
442110d565efSmrg 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
442210d565efSmrg 		 e->src->index, dest->index);
442310d565efSmrg 
442410d565efSmrg       df_set_bb_dirty (e->src);
442510d565efSmrg       redirect_edge_succ (e, dest);
442610d565efSmrg       return e;
442710d565efSmrg     }
442810d565efSmrg 
442910d565efSmrg   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
443010d565efSmrg      in the case the basic block appears to be in sequence.  Avoid this
443110d565efSmrg      transformation.  */
443210d565efSmrg 
443310d565efSmrg   if (e->flags & EDGE_FALLTHRU)
443410d565efSmrg     {
443510d565efSmrg       /* Redirect any branch edges unified with the fallthru one.  */
443610d565efSmrg       if (JUMP_P (BB_END (src))
443710d565efSmrg 	  && label_is_jump_target_p (BB_HEAD (e->dest),
443810d565efSmrg 				     BB_END (src)))
443910d565efSmrg 	{
444010d565efSmrg 	  edge redirected;
444110d565efSmrg 
444210d565efSmrg 	  if (dump_file)
444310d565efSmrg 	    fprintf (dump_file, "Fallthru edge unified with branch "
444410d565efSmrg 		     "%i->%i redirected to %i\n",
444510d565efSmrg 		     e->src->index, e->dest->index, dest->index);
444610d565efSmrg 	  e->flags &= ~EDGE_FALLTHRU;
444710d565efSmrg 	  redirected = redirect_branch_edge (e, dest);
444810d565efSmrg 	  gcc_assert (redirected);
444910d565efSmrg 	  redirected->flags |= EDGE_FALLTHRU;
445010d565efSmrg 	  df_set_bb_dirty (redirected->src);
445110d565efSmrg 	  return redirected;
445210d565efSmrg 	}
445310d565efSmrg       /* In case we are redirecting fallthru edge to the branch edge
445410d565efSmrg 	 of conditional jump, remove it.  */
445510d565efSmrg       if (EDGE_COUNT (src->succs) == 2)
445610d565efSmrg 	{
445710d565efSmrg 	  /* Find the edge that is different from E.  */
445810d565efSmrg 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
445910d565efSmrg 
446010d565efSmrg 	  if (s->dest == dest
446110d565efSmrg 	      && any_condjump_p (BB_END (src))
446210d565efSmrg 	      && onlyjump_p (BB_END (src)))
446310d565efSmrg 	    delete_insn (BB_END (src));
446410d565efSmrg 	}
446510d565efSmrg       if (dump_file)
446610d565efSmrg 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
446710d565efSmrg 		 e->src->index, e->dest->index, dest->index);
446810d565efSmrg       ret = redirect_edge_succ_nodup (e, dest);
446910d565efSmrg     }
447010d565efSmrg   else
447110d565efSmrg     ret = redirect_branch_edge (e, dest);
447210d565efSmrg 
4473c7a68eb7Smrg   if (!ret)
4474c7a68eb7Smrg     return NULL;
4475c7a68eb7Smrg 
4476c7a68eb7Smrg   fixup_partition_crossing (ret);
447710d565efSmrg   /* We don't want simplejumps in the insn stream during cfglayout.  */
4478c7a68eb7Smrg   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
447910d565efSmrg 
448010d565efSmrg   df_set_bb_dirty (src);
448110d565efSmrg   return ret;
448210d565efSmrg }
448310d565efSmrg 
448410d565efSmrg /* Simple wrapper as we always can redirect fallthru edges.  */
448510d565efSmrg static basic_block
448610d565efSmrg cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
448710d565efSmrg {
448810d565efSmrg   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
448910d565efSmrg 
449010d565efSmrg   gcc_assert (redirected);
449110d565efSmrg   return NULL;
449210d565efSmrg }
449310d565efSmrg 
449410d565efSmrg /* Same as delete_basic_block but update cfg_layout structures.  */
449510d565efSmrg 
449610d565efSmrg static void
449710d565efSmrg cfg_layout_delete_block (basic_block bb)
449810d565efSmrg {
449910d565efSmrg   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
450010d565efSmrg   rtx_insn **to;
450110d565efSmrg 
450210d565efSmrg   if (BB_HEADER (bb))
450310d565efSmrg     {
450410d565efSmrg       next = BB_HEAD (bb);
450510d565efSmrg       if (prev)
450610d565efSmrg 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
450710d565efSmrg       else
450810d565efSmrg 	set_first_insn (BB_HEADER (bb));
450910d565efSmrg       SET_PREV_INSN (BB_HEADER (bb)) = prev;
451010d565efSmrg       insn = BB_HEADER (bb);
451110d565efSmrg       while (NEXT_INSN (insn))
451210d565efSmrg 	insn = NEXT_INSN (insn);
451310d565efSmrg       SET_NEXT_INSN (insn) = next;
451410d565efSmrg       SET_PREV_INSN (next) = insn;
451510d565efSmrg     }
451610d565efSmrg   next = NEXT_INSN (BB_END (bb));
451710d565efSmrg   if (BB_FOOTER (bb))
451810d565efSmrg     {
451910d565efSmrg       insn = BB_FOOTER (bb);
452010d565efSmrg       while (insn)
452110d565efSmrg 	{
452210d565efSmrg 	  if (BARRIER_P (insn))
452310d565efSmrg 	    {
452410d565efSmrg 	      if (PREV_INSN (insn))
452510d565efSmrg 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
452610d565efSmrg 	      else
452710d565efSmrg 		BB_FOOTER (bb) = NEXT_INSN (insn);
452810d565efSmrg 	      if (NEXT_INSN (insn))
452910d565efSmrg 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
453010d565efSmrg 	    }
453110d565efSmrg 	  if (LABEL_P (insn))
453210d565efSmrg 	    break;
453310d565efSmrg 	  insn = NEXT_INSN (insn);
453410d565efSmrg 	}
453510d565efSmrg       if (BB_FOOTER (bb))
453610d565efSmrg 	{
453710d565efSmrg 	  insn = BB_END (bb);
453810d565efSmrg 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
453910d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
454010d565efSmrg 	  while (NEXT_INSN (insn))
454110d565efSmrg 	    insn = NEXT_INSN (insn);
454210d565efSmrg 	  SET_NEXT_INSN (insn) = next;
454310d565efSmrg 	  if (next)
454410d565efSmrg 	    SET_PREV_INSN (next) = insn;
454510d565efSmrg 	  else
454610d565efSmrg 	    set_last_insn (insn);
454710d565efSmrg 	}
454810d565efSmrg     }
454910d565efSmrg   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
455010d565efSmrg     to = &BB_HEADER (bb->next_bb);
455110d565efSmrg   else
455210d565efSmrg     to = &cfg_layout_function_footer;
455310d565efSmrg 
455410d565efSmrg   rtl_delete_block (bb);
455510d565efSmrg 
455610d565efSmrg   if (prev)
455710d565efSmrg     prev = NEXT_INSN (prev);
455810d565efSmrg   else
455910d565efSmrg     prev = get_insns ();
456010d565efSmrg   if (next)
456110d565efSmrg     next = PREV_INSN (next);
456210d565efSmrg   else
456310d565efSmrg     next = get_last_insn ();
456410d565efSmrg 
456510d565efSmrg   if (next && NEXT_INSN (next) != prev)
456610d565efSmrg     {
456710d565efSmrg       remaints = unlink_insn_chain (prev, next);
456810d565efSmrg       insn = remaints;
456910d565efSmrg       while (NEXT_INSN (insn))
457010d565efSmrg 	insn = NEXT_INSN (insn);
457110d565efSmrg       SET_NEXT_INSN (insn) = *to;
457210d565efSmrg       if (*to)
457310d565efSmrg 	SET_PREV_INSN (*to) = insn;
457410d565efSmrg       *to = remaints;
457510d565efSmrg     }
457610d565efSmrg }
457710d565efSmrg 
457810d565efSmrg /* Return true when blocks A and B can be safely merged.  */
457910d565efSmrg 
458010d565efSmrg static bool
458110d565efSmrg cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
458210d565efSmrg {
458310d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
458410d565efSmrg      mess up unconditional or indirect jumps that cross between hot
458510d565efSmrg      and cold sections.
458610d565efSmrg 
458710d565efSmrg      Basic block partitioning may result in some jumps that appear to
458810d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
458910d565efSmrg      must be left untouched (they are required to make it safely across
459010d565efSmrg      partition boundaries).  See  the comments at the top of
459110d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
459210d565efSmrg 
459310d565efSmrg   if (BB_PARTITION (a) != BB_PARTITION (b))
459410d565efSmrg     return false;
459510d565efSmrg 
459610d565efSmrg   /* Protect the loop latches.  */
459710d565efSmrg   if (current_loops && b->loop_father->latch == b)
459810d565efSmrg     return false;
459910d565efSmrg 
460010d565efSmrg   /* If we would end up moving B's instructions, make sure it doesn't fall
460110d565efSmrg      through into the exit block, since we cannot recover from a fallthrough
460210d565efSmrg      edge into the exit block occurring in the middle of a function.  */
460310d565efSmrg   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
460410d565efSmrg     {
460510d565efSmrg       edge e = find_fallthru_edge (b->succs);
460610d565efSmrg       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
460710d565efSmrg 	return false;
460810d565efSmrg     }
460910d565efSmrg 
461010d565efSmrg   /* There must be exactly one edge in between the blocks.  */
461110d565efSmrg   return (single_succ_p (a)
461210d565efSmrg 	  && single_succ (a) == b
461310d565efSmrg 	  && single_pred_p (b) == 1
461410d565efSmrg 	  && a != b
461510d565efSmrg 	  /* Must be simple edge.  */
461610d565efSmrg 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
461710d565efSmrg 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
461810d565efSmrg 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
461910d565efSmrg 	  /* If the jump insn has side effects, we can't kill the edge.
462010d565efSmrg 	     When not optimizing, try_redirect_by_replacing_jump will
462110d565efSmrg 	     not allow us to redirect an edge by replacing a table jump.  */
462210d565efSmrg 	  && (!JUMP_P (BB_END (a))
462310d565efSmrg 	      || ((!optimize || reload_completed)
462410d565efSmrg 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
462510d565efSmrg }
462610d565efSmrg 
462710d565efSmrg /* Merge block A and B.  The blocks must be mergeable.  */
462810d565efSmrg 
462910d565efSmrg static void
463010d565efSmrg cfg_layout_merge_blocks (basic_block a, basic_block b)
463110d565efSmrg {
4632*0fc04c29Smrg   /* If B is a forwarder block whose outgoing edge has no location, we'll
4633*0fc04c29Smrg      propagate the locus of the edge between A and B onto it.  */
4634*0fc04c29Smrg   const bool forward_edge_locus
4635*0fc04c29Smrg     = (b->flags & BB_FORWARDER_BLOCK) != 0
4636*0fc04c29Smrg       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
463710d565efSmrg   rtx_insn *insn;
463810d565efSmrg 
463910d565efSmrg   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
464010d565efSmrg 
464110d565efSmrg   if (dump_file)
464210d565efSmrg     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
464310d565efSmrg 			 a->index);
464410d565efSmrg 
464510d565efSmrg   /* If there was a CODE_LABEL beginning B, delete it.  */
464610d565efSmrg   if (LABEL_P (BB_HEAD (b)))
464710d565efSmrg     {
464810d565efSmrg       delete_insn (BB_HEAD (b));
464910d565efSmrg     }
465010d565efSmrg 
465110d565efSmrg   /* We should have fallthru edge in a, or we can do dummy redirection to get
465210d565efSmrg      it cleaned up.  */
465310d565efSmrg   if (JUMP_P (BB_END (a)))
465410d565efSmrg     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
465510d565efSmrg   gcc_assert (!JUMP_P (BB_END (a)));
465610d565efSmrg 
4657*0fc04c29Smrg   /* If not optimizing, preserve the locus of the single edge between
4658*0fc04c29Smrg      blocks A and B if necessary by emitting a nop.  */
4659*0fc04c29Smrg   if (!optimize
4660*0fc04c29Smrg       && !forward_edge_locus
4661*0fc04c29Smrg       && !DECL_IGNORED_P (current_function_decl))
466210d565efSmrg     emit_nop_for_unique_locus_between (a, b);
466310d565efSmrg 
466410d565efSmrg   /* Move things from b->footer after a->footer.  */
466510d565efSmrg   if (BB_FOOTER (b))
466610d565efSmrg     {
466710d565efSmrg       if (!BB_FOOTER (a))
466810d565efSmrg 	BB_FOOTER (a) = BB_FOOTER (b);
466910d565efSmrg       else
467010d565efSmrg 	{
467110d565efSmrg 	  rtx_insn *last = BB_FOOTER (a);
467210d565efSmrg 
467310d565efSmrg 	  while (NEXT_INSN (last))
467410d565efSmrg 	    last = NEXT_INSN (last);
467510d565efSmrg 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
467610d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
467710d565efSmrg 	}
467810d565efSmrg       BB_FOOTER (b) = NULL;
467910d565efSmrg     }
468010d565efSmrg 
468110d565efSmrg   /* Move things from b->header before a->footer.
468210d565efSmrg      Note that this may include dead tablejump data, but we don't clean
468310d565efSmrg      those up until we go out of cfglayout mode.  */
468410d565efSmrg    if (BB_HEADER (b))
468510d565efSmrg      {
468610d565efSmrg       if (! BB_FOOTER (a))
468710d565efSmrg 	BB_FOOTER (a) = BB_HEADER (b);
468810d565efSmrg       else
468910d565efSmrg 	{
469010d565efSmrg 	  rtx_insn *last = BB_HEADER (b);
469110d565efSmrg 
469210d565efSmrg 	  while (NEXT_INSN (last))
469310d565efSmrg 	    last = NEXT_INSN (last);
469410d565efSmrg 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
469510d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
469610d565efSmrg 	  BB_FOOTER (a) = BB_HEADER (b);
469710d565efSmrg 	}
469810d565efSmrg       BB_HEADER (b) = NULL;
469910d565efSmrg     }
470010d565efSmrg 
470110d565efSmrg   /* In the case basic blocks are not adjacent, move them around.  */
470210d565efSmrg   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
470310d565efSmrg     {
470410d565efSmrg       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
470510d565efSmrg 
470610d565efSmrg       emit_insn_after_noloc (insn, BB_END (a), a);
470710d565efSmrg     }
470810d565efSmrg   /* Otherwise just re-associate the instructions.  */
470910d565efSmrg   else
471010d565efSmrg     {
471110d565efSmrg       insn = BB_HEAD (b);
471210d565efSmrg       BB_END (a) = BB_END (b);
471310d565efSmrg     }
471410d565efSmrg 
471510d565efSmrg   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
471610d565efSmrg      We need to explicitly call. */
471710d565efSmrg   update_bb_for_insn_chain (insn, BB_END (b), a);
471810d565efSmrg 
471910d565efSmrg   /* Skip possible DELETED_LABEL insn.  */
472010d565efSmrg   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
472110d565efSmrg     insn = NEXT_INSN (insn);
472210d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
472310d565efSmrg   BB_HEAD (b) = BB_END (b) = NULL;
472410d565efSmrg   delete_insn (insn);
472510d565efSmrg 
472610d565efSmrg   df_bb_delete (b->index);
472710d565efSmrg 
4728*0fc04c29Smrg   if (forward_edge_locus)
472910d565efSmrg     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
473010d565efSmrg 
473110d565efSmrg   if (dump_file)
473210d565efSmrg     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
473310d565efSmrg }
473410d565efSmrg 
473510d565efSmrg /* Split edge E.  */
473610d565efSmrg 
473710d565efSmrg static basic_block
473810d565efSmrg cfg_layout_split_edge (edge e)
473910d565efSmrg {
474010d565efSmrg   basic_block new_bb =
474110d565efSmrg     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
474210d565efSmrg 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
474310d565efSmrg 			NULL_RTX, e->src);
474410d565efSmrg 
474510d565efSmrg   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
474610d565efSmrg     BB_COPY_PARTITION (new_bb, e->src);
474710d565efSmrg   else
474810d565efSmrg     BB_COPY_PARTITION (new_bb, e->dest);
474910d565efSmrg   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
475010d565efSmrg   redirect_edge_and_branch_force (e, new_bb);
475110d565efSmrg 
475210d565efSmrg   return new_bb;
475310d565efSmrg }
475410d565efSmrg 
475510d565efSmrg /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
475610d565efSmrg 
475710d565efSmrg static void
475810d565efSmrg rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
475910d565efSmrg {
476010d565efSmrg }
476110d565efSmrg 
476210d565efSmrg /* Return true if BB contains only labels or non-executable
476310d565efSmrg    instructions.  */
476410d565efSmrg 
476510d565efSmrg static bool
476610d565efSmrg rtl_block_empty_p (basic_block bb)
476710d565efSmrg {
476810d565efSmrg   rtx_insn *insn;
476910d565efSmrg 
477010d565efSmrg   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
477110d565efSmrg       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
477210d565efSmrg     return true;
477310d565efSmrg 
477410d565efSmrg   FOR_BB_INSNS (bb, insn)
477510d565efSmrg     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
477610d565efSmrg       return false;
477710d565efSmrg 
477810d565efSmrg   return true;
477910d565efSmrg }
478010d565efSmrg 
478110d565efSmrg /* Split a basic block if it ends with a conditional branch and if
478210d565efSmrg    the other part of the block is not empty.  */
478310d565efSmrg 
478410d565efSmrg static basic_block
478510d565efSmrg rtl_split_block_before_cond_jump (basic_block bb)
478610d565efSmrg {
478710d565efSmrg   rtx_insn *insn;
478810d565efSmrg   rtx_insn *split_point = NULL;
478910d565efSmrg   rtx_insn *last = NULL;
479010d565efSmrg   bool found_code = false;
479110d565efSmrg 
479210d565efSmrg   FOR_BB_INSNS (bb, insn)
479310d565efSmrg     {
479410d565efSmrg       if (any_condjump_p (insn))
479510d565efSmrg 	split_point = last;
479610d565efSmrg       else if (NONDEBUG_INSN_P (insn))
479710d565efSmrg 	found_code = true;
479810d565efSmrg       last = insn;
479910d565efSmrg     }
480010d565efSmrg 
480110d565efSmrg   /* Did not find everything.  */
480210d565efSmrg   if (found_code && split_point)
480310d565efSmrg     return split_block (bb, split_point)->dest;
480410d565efSmrg   else
480510d565efSmrg     return NULL;
480610d565efSmrg }
480710d565efSmrg 
480810d565efSmrg /* Return 1 if BB ends with a call, possibly followed by some
480910d565efSmrg    instructions that must stay with the call, 0 otherwise.  */
481010d565efSmrg 
481110d565efSmrg static bool
481210d565efSmrg rtl_block_ends_with_call_p (basic_block bb)
481310d565efSmrg {
481410d565efSmrg   rtx_insn *insn = BB_END (bb);
481510d565efSmrg 
481610d565efSmrg   while (!CALL_P (insn)
481710d565efSmrg 	 && insn != BB_HEAD (bb)
481810d565efSmrg 	 && (keep_with_call_p (insn)
481910d565efSmrg 	     || NOTE_P (insn)
482010d565efSmrg 	     || DEBUG_INSN_P (insn)))
482110d565efSmrg     insn = PREV_INSN (insn);
482210d565efSmrg   return (CALL_P (insn));
482310d565efSmrg }
482410d565efSmrg 
482510d565efSmrg /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
482610d565efSmrg 
482710d565efSmrg static bool
482810d565efSmrg rtl_block_ends_with_condjump_p (const_basic_block bb)
482910d565efSmrg {
483010d565efSmrg   return any_condjump_p (BB_END (bb));
483110d565efSmrg }
483210d565efSmrg 
483310d565efSmrg /* Return true if we need to add fake edge to exit.
483410d565efSmrg    Helper function for rtl_flow_call_edges_add.  */
483510d565efSmrg 
483610d565efSmrg static bool
483710d565efSmrg need_fake_edge_p (const rtx_insn *insn)
483810d565efSmrg {
483910d565efSmrg   if (!INSN_P (insn))
484010d565efSmrg     return false;
484110d565efSmrg 
484210d565efSmrg   if ((CALL_P (insn)
484310d565efSmrg        && !SIBLING_CALL_P (insn)
484410d565efSmrg        && !find_reg_note (insn, REG_NORETURN, NULL)
484510d565efSmrg        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
484610d565efSmrg     return true;
484710d565efSmrg 
484810d565efSmrg   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
484910d565efSmrg 	   && MEM_VOLATILE_P (PATTERN (insn)))
485010d565efSmrg 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
485110d565efSmrg 	      && asm_noperands (insn) != -1
485210d565efSmrg 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
485310d565efSmrg 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
485410d565efSmrg }
485510d565efSmrg 
485610d565efSmrg /* Add fake edges to the function exit for any non constant and non noreturn
485710d565efSmrg    calls, volatile inline assembly in the bitmap of blocks specified by
485810d565efSmrg    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
485910d565efSmrg    that were split.
486010d565efSmrg 
486110d565efSmrg    The goal is to expose cases in which entering a basic block does not imply
486210d565efSmrg    that all subsequent instructions must be executed.  */
486310d565efSmrg 
486410d565efSmrg static int
486510d565efSmrg rtl_flow_call_edges_add (sbitmap blocks)
486610d565efSmrg {
486710d565efSmrg   int i;
486810d565efSmrg   int blocks_split = 0;
486910d565efSmrg   int last_bb = last_basic_block_for_fn (cfun);
487010d565efSmrg   bool check_last_block = false;
487110d565efSmrg 
487210d565efSmrg   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
487310d565efSmrg     return 0;
487410d565efSmrg 
487510d565efSmrg   if (! blocks)
487610d565efSmrg     check_last_block = true;
487710d565efSmrg   else
487810d565efSmrg     check_last_block = bitmap_bit_p (blocks,
487910d565efSmrg 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
488010d565efSmrg 
488110d565efSmrg   /* In the last basic block, before epilogue generation, there will be
488210d565efSmrg      a fallthru edge to EXIT.  Special care is required if the last insn
488310d565efSmrg      of the last basic block is a call because make_edge folds duplicate
488410d565efSmrg      edges, which would result in the fallthru edge also being marked
488510d565efSmrg      fake, which would result in the fallthru edge being removed by
488610d565efSmrg      remove_fake_edges, which would result in an invalid CFG.
488710d565efSmrg 
488810d565efSmrg      Moreover, we can't elide the outgoing fake edge, since the block
488910d565efSmrg      profiler needs to take this into account in order to solve the minimal
489010d565efSmrg      spanning tree in the case that the call doesn't return.
489110d565efSmrg 
489210d565efSmrg      Handle this by adding a dummy instruction in a new last basic block.  */
489310d565efSmrg   if (check_last_block)
489410d565efSmrg     {
489510d565efSmrg       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
489610d565efSmrg       rtx_insn *insn = BB_END (bb);
489710d565efSmrg 
489810d565efSmrg       /* Back up past insns that must be kept in the same block as a call.  */
489910d565efSmrg       while (insn != BB_HEAD (bb)
490010d565efSmrg 	     && keep_with_call_p (insn))
490110d565efSmrg 	insn = PREV_INSN (insn);
490210d565efSmrg 
490310d565efSmrg       if (need_fake_edge_p (insn))
490410d565efSmrg 	{
490510d565efSmrg 	  edge e;
490610d565efSmrg 
490710d565efSmrg 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
490810d565efSmrg 	  if (e)
490910d565efSmrg 	    {
491010d565efSmrg 	      insert_insn_on_edge (gen_use (const0_rtx), e);
491110d565efSmrg 	      commit_edge_insertions ();
491210d565efSmrg 	    }
491310d565efSmrg 	}
491410d565efSmrg     }
491510d565efSmrg 
491610d565efSmrg   /* Now add fake edges to the function exit for any non constant
491710d565efSmrg      calls since there is no way that we can determine if they will
491810d565efSmrg      return or not...  */
491910d565efSmrg 
492010d565efSmrg   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
492110d565efSmrg     {
492210d565efSmrg       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
492310d565efSmrg       rtx_insn *insn;
492410d565efSmrg       rtx_insn *prev_insn;
492510d565efSmrg 
492610d565efSmrg       if (!bb)
492710d565efSmrg 	continue;
492810d565efSmrg 
492910d565efSmrg       if (blocks && !bitmap_bit_p (blocks, i))
493010d565efSmrg 	continue;
493110d565efSmrg 
493210d565efSmrg       for (insn = BB_END (bb); ; insn = prev_insn)
493310d565efSmrg 	{
493410d565efSmrg 	  prev_insn = PREV_INSN (insn);
493510d565efSmrg 	  if (need_fake_edge_p (insn))
493610d565efSmrg 	    {
493710d565efSmrg 	      edge e;
493810d565efSmrg 	      rtx_insn *split_at_insn = insn;
493910d565efSmrg 
494010d565efSmrg 	      /* Don't split the block between a call and an insn that should
494110d565efSmrg 		 remain in the same block as the call.  */
494210d565efSmrg 	      if (CALL_P (insn))
494310d565efSmrg 		while (split_at_insn != BB_END (bb)
494410d565efSmrg 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
494510d565efSmrg 		  split_at_insn = NEXT_INSN (split_at_insn);
494610d565efSmrg 
494710d565efSmrg 	      /* The handling above of the final block before the epilogue
494810d565efSmrg 		 should be enough to verify that there is no edge to the exit
494910d565efSmrg 		 block in CFG already.  Calling make_edge in such case would
495010d565efSmrg 		 cause us to mark that edge as fake and remove it later.  */
495110d565efSmrg 
495210d565efSmrg 	      if (flag_checking && split_at_insn == BB_END (bb))
495310d565efSmrg 		{
495410d565efSmrg 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
495510d565efSmrg 		  gcc_assert (e == NULL);
495610d565efSmrg 		}
495710d565efSmrg 
495810d565efSmrg 	      /* Note that the following may create a new basic block
495910d565efSmrg 		 and renumber the existing basic blocks.  */
496010d565efSmrg 	      if (split_at_insn != BB_END (bb))
496110d565efSmrg 		{
496210d565efSmrg 		  e = split_block (bb, split_at_insn);
496310d565efSmrg 		  if (e)
496410d565efSmrg 		    blocks_split++;
496510d565efSmrg 		}
496610d565efSmrg 
4967c7a68eb7Smrg 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4968c7a68eb7Smrg 	      ne->probability = profile_probability::guessed_never ();
496910d565efSmrg 	    }
497010d565efSmrg 
497110d565efSmrg 	  if (insn == BB_HEAD (bb))
497210d565efSmrg 	    break;
497310d565efSmrg 	}
497410d565efSmrg     }
497510d565efSmrg 
497610d565efSmrg   if (blocks_split)
497710d565efSmrg     verify_flow_info ();
497810d565efSmrg 
497910d565efSmrg   return blocks_split;
498010d565efSmrg }
498110d565efSmrg 
498210d565efSmrg /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
498310d565efSmrg    the conditional branch target, SECOND_HEAD should be the fall-thru
498410d565efSmrg    there is no need to handle this here the loop versioning code handles
498510d565efSmrg    this.  the reason for SECON_HEAD is that it is needed for condition
498610d565efSmrg    in trees, and this should be of the same type since it is a hook.  */
498710d565efSmrg static void
498810d565efSmrg rtl_lv_add_condition_to_bb (basic_block first_head ,
498910d565efSmrg 			    basic_block second_head ATTRIBUTE_UNUSED,
499010d565efSmrg 			    basic_block cond_bb, void *comp_rtx)
499110d565efSmrg {
499210d565efSmrg   rtx_code_label *label;
499310d565efSmrg   rtx_insn *seq, *jump;
499410d565efSmrg   rtx op0 = XEXP ((rtx)comp_rtx, 0);
499510d565efSmrg   rtx op1 = XEXP ((rtx)comp_rtx, 1);
499610d565efSmrg   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
499710d565efSmrg   machine_mode mode;
499810d565efSmrg 
499910d565efSmrg 
500010d565efSmrg   label = block_label (first_head);
500110d565efSmrg   mode = GET_MODE (op0);
500210d565efSmrg   if (mode == VOIDmode)
500310d565efSmrg     mode = GET_MODE (op1);
500410d565efSmrg 
500510d565efSmrg   start_sequence ();
500610d565efSmrg   op0 = force_operand (op0, NULL_RTX);
500710d565efSmrg   op1 = force_operand (op1, NULL_RTX);
5008c7a68eb7Smrg   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5009c7a68eb7Smrg 			   profile_probability::uninitialized ());
501010d565efSmrg   jump = get_last_insn ();
501110d565efSmrg   JUMP_LABEL (jump) = label;
501210d565efSmrg   LABEL_NUSES (label)++;
501310d565efSmrg   seq = get_insns ();
501410d565efSmrg   end_sequence ();
501510d565efSmrg 
501610d565efSmrg   /* Add the new cond, in the new head.  */
501710d565efSmrg   emit_insn_after (seq, BB_END (cond_bb));
501810d565efSmrg }
501910d565efSmrg 
502010d565efSmrg 
502110d565efSmrg /* Given a block B with unconditional branch at its end, get the
502210d565efSmrg    store the return the branch edge and the fall-thru edge in
502310d565efSmrg    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
502410d565efSmrg static void
502510d565efSmrg rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
502610d565efSmrg 			   edge *fallthru_edge)
502710d565efSmrg {
502810d565efSmrg   edge e = EDGE_SUCC (b, 0);
502910d565efSmrg 
503010d565efSmrg   if (e->flags & EDGE_FALLTHRU)
503110d565efSmrg     {
503210d565efSmrg       *fallthru_edge = e;
503310d565efSmrg       *branch_edge = EDGE_SUCC (b, 1);
503410d565efSmrg     }
503510d565efSmrg   else
503610d565efSmrg     {
503710d565efSmrg       *branch_edge = e;
503810d565efSmrg       *fallthru_edge = EDGE_SUCC (b, 1);
503910d565efSmrg     }
504010d565efSmrg }
504110d565efSmrg 
504210d565efSmrg void
504310d565efSmrg init_rtl_bb_info (basic_block bb)
504410d565efSmrg {
504510d565efSmrg   gcc_assert (!bb->il.x.rtl);
504610d565efSmrg   bb->il.x.head_ = NULL;
504710d565efSmrg   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
504810d565efSmrg }
504910d565efSmrg 
505010d565efSmrg /* Returns true if it is possible to remove edge E by redirecting
505110d565efSmrg    it to the destination of the other edge from E->src.  */
505210d565efSmrg 
505310d565efSmrg static bool
505410d565efSmrg rtl_can_remove_branch_p (const_edge e)
505510d565efSmrg {
505610d565efSmrg   const_basic_block src = e->src;
505710d565efSmrg   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
505810d565efSmrg   const rtx_insn *insn = BB_END (src);
505910d565efSmrg   rtx set;
506010d565efSmrg 
506110d565efSmrg   /* The conditions are taken from try_redirect_by_replacing_jump.  */
506210d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
506310d565efSmrg     return false;
506410d565efSmrg 
506510d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
506610d565efSmrg     return false;
506710d565efSmrg 
506810d565efSmrg   if (BB_PARTITION (src) != BB_PARTITION (target))
506910d565efSmrg     return false;
507010d565efSmrg 
507110d565efSmrg   if (!onlyjump_p (insn)
507210d565efSmrg       || tablejump_p (insn, NULL, NULL))
507310d565efSmrg     return false;
507410d565efSmrg 
507510d565efSmrg   set = single_set (insn);
507610d565efSmrg   if (!set || side_effects_p (set))
507710d565efSmrg     return false;
507810d565efSmrg 
507910d565efSmrg   return true;
508010d565efSmrg }
508110d565efSmrg 
508210d565efSmrg static basic_block
508321ff1670Smrg rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
508410d565efSmrg {
508521ff1670Smrg   bb = cfg_layout_duplicate_bb (bb, id);
508610d565efSmrg   bb->aux = NULL;
508710d565efSmrg   return bb;
508810d565efSmrg }
508910d565efSmrg 
509010d565efSmrg /* Do book-keeping of basic block BB for the profile consistency checker.
5091*0fc04c29Smrg    Store the counting in RECORD.  */
509210d565efSmrg static void
5093*0fc04c29Smrg rtl_account_profile_record (basic_block bb, struct profile_record *record)
509410d565efSmrg {
509510d565efSmrg   rtx_insn *insn;
509610d565efSmrg   FOR_BB_INSNS (bb, insn)
509710d565efSmrg     if (INSN_P (insn))
509810d565efSmrg       {
5099*0fc04c29Smrg 	record->size += insn_cost (insn, false);
5100c7a68eb7Smrg 	if (bb->count.initialized_p ())
5101*0fc04c29Smrg 	  record->time
5102c7a68eb7Smrg 	    += insn_cost (insn, true) * bb->count.to_gcov_type ();
510310d565efSmrg 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5104*0fc04c29Smrg 	  record->time
5105c7a68eb7Smrg 	    += insn_cost (insn, true) * bb->count.to_frequency (cfun);
510610d565efSmrg       }
510710d565efSmrg }
510810d565efSmrg 
510910d565efSmrg /* Implementation of CFG manipulation for linearized RTL.  */
511010d565efSmrg struct cfg_hooks rtl_cfg_hooks = {
511110d565efSmrg   "rtl",
511210d565efSmrg   rtl_verify_flow_info,
511310d565efSmrg   rtl_dump_bb,
511410d565efSmrg   rtl_dump_bb_for_graph,
511510d565efSmrg   rtl_create_basic_block,
511610d565efSmrg   rtl_redirect_edge_and_branch,
511710d565efSmrg   rtl_redirect_edge_and_branch_force,
511810d565efSmrg   rtl_can_remove_branch_p,
511910d565efSmrg   rtl_delete_block,
512010d565efSmrg   rtl_split_block,
512110d565efSmrg   rtl_move_block_after,
512210d565efSmrg   rtl_can_merge_blocks,  /* can_merge_blocks_p */
512310d565efSmrg   rtl_merge_blocks,
512410d565efSmrg   rtl_predict_edge,
512510d565efSmrg   rtl_predicted_by_p,
512610d565efSmrg   cfg_layout_can_duplicate_bb_p,
512710d565efSmrg   rtl_duplicate_bb,
512810d565efSmrg   rtl_split_edge,
512910d565efSmrg   rtl_make_forwarder_block,
513010d565efSmrg   rtl_tidy_fallthru_edge,
513110d565efSmrg   rtl_force_nonfallthru,
513210d565efSmrg   rtl_block_ends_with_call_p,
513310d565efSmrg   rtl_block_ends_with_condjump_p,
513410d565efSmrg   rtl_flow_call_edges_add,
513510d565efSmrg   NULL, /* execute_on_growing_pred */
513610d565efSmrg   NULL, /* execute_on_shrinking_pred */
513710d565efSmrg   NULL, /* duplicate loop for trees */
513810d565efSmrg   NULL, /* lv_add_condition_to_bb */
513910d565efSmrg   NULL, /* lv_adjust_loop_header_phi*/
514010d565efSmrg   NULL, /* extract_cond_bb_edges */
514110d565efSmrg   NULL, /* flush_pending_stmts */
514210d565efSmrg   rtl_block_empty_p, /* block_empty_p */
514310d565efSmrg   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
514410d565efSmrg   rtl_account_profile_record,
514510d565efSmrg };
514610d565efSmrg 
514710d565efSmrg /* Implementation of CFG manipulation for cfg layout RTL, where
514810d565efSmrg    basic block connected via fallthru edges does not have to be adjacent.
514910d565efSmrg    This representation will hopefully become the default one in future
515010d565efSmrg    version of the compiler.  */
515110d565efSmrg 
515210d565efSmrg struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
515310d565efSmrg   "cfglayout mode",
515410d565efSmrg   rtl_verify_flow_info_1,
515510d565efSmrg   rtl_dump_bb,
515610d565efSmrg   rtl_dump_bb_for_graph,
515710d565efSmrg   cfg_layout_create_basic_block,
515810d565efSmrg   cfg_layout_redirect_edge_and_branch,
515910d565efSmrg   cfg_layout_redirect_edge_and_branch_force,
516010d565efSmrg   rtl_can_remove_branch_p,
516110d565efSmrg   cfg_layout_delete_block,
516210d565efSmrg   cfg_layout_split_block,
516310d565efSmrg   rtl_move_block_after,
516410d565efSmrg   cfg_layout_can_merge_blocks_p,
516510d565efSmrg   cfg_layout_merge_blocks,
516610d565efSmrg   rtl_predict_edge,
516710d565efSmrg   rtl_predicted_by_p,
516810d565efSmrg   cfg_layout_can_duplicate_bb_p,
516910d565efSmrg   cfg_layout_duplicate_bb,
517010d565efSmrg   cfg_layout_split_edge,
517110d565efSmrg   rtl_make_forwarder_block,
517210d565efSmrg   NULL, /* tidy_fallthru_edge */
517310d565efSmrg   rtl_force_nonfallthru,
517410d565efSmrg   rtl_block_ends_with_call_p,
517510d565efSmrg   rtl_block_ends_with_condjump_p,
517610d565efSmrg   rtl_flow_call_edges_add,
517710d565efSmrg   NULL, /* execute_on_growing_pred */
517810d565efSmrg   NULL, /* execute_on_shrinking_pred */
517910d565efSmrg   duplicate_loop_to_header_edge, /* duplicate loop for trees */
518010d565efSmrg   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
518110d565efSmrg   NULL, /* lv_adjust_loop_header_phi*/
518210d565efSmrg   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
518310d565efSmrg   NULL, /* flush_pending_stmts */
518410d565efSmrg   rtl_block_empty_p, /* block_empty_p */
518510d565efSmrg   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
518610d565efSmrg   rtl_account_profile_record,
518710d565efSmrg };
518810d565efSmrg 
518910d565efSmrg #include "gt-cfgrtl.h"
5190