xref: /netbsd/external/gpl3/gcc.old/dist/gcc/cfgrtl.c (revision ec02198a)
110d565efSmrg /* Control flow graph manipulation code for GNU compiler.
2*ec02198aSmrg    Copyright (C) 1987-2020 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg /* This file contains low level functions to manipulate the CFG and analyze it
2110d565efSmrg    that are aware of the RTL intermediate language.
2210d565efSmrg 
2310d565efSmrg    Available functionality:
2410d565efSmrg      - Basic CFG/RTL manipulation API documented in cfghooks.h
2510d565efSmrg      - CFG-aware instruction chain manipulation
2610d565efSmrg 	 delete_insn, delete_insn_chain
2710d565efSmrg      - Edge splitting and committing to edges
2810d565efSmrg 	 insert_insn_on_edge, commit_edge_insertions
2910d565efSmrg      - CFG updating after insn simplification
3010d565efSmrg 	 purge_dead_edges, purge_all_dead_edges
3110d565efSmrg      - CFG fixing after coarse manipulation
3210d565efSmrg 	fixup_abnormal_edges
3310d565efSmrg 
3410d565efSmrg    Functions not supposed for generic use:
3510d565efSmrg      - Infrastructure to determine quickly basic block for insn
3610d565efSmrg 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
3710d565efSmrg      - Edge redirection with updating and optimizing of insn chain
3810d565efSmrg 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
3910d565efSmrg 
4010d565efSmrg #include "config.h"
4110d565efSmrg #include "system.h"
4210d565efSmrg #include "coretypes.h"
4310d565efSmrg #include "backend.h"
4410d565efSmrg #include "target.h"
4510d565efSmrg #include "rtl.h"
4610d565efSmrg #include "tree.h"
4710d565efSmrg #include "cfghooks.h"
4810d565efSmrg #include "df.h"
4910d565efSmrg #include "insn-config.h"
5010d565efSmrg #include "memmodel.h"
5110d565efSmrg #include "emit-rtl.h"
5210d565efSmrg #include "cfgrtl.h"
5310d565efSmrg #include "cfganal.h"
5410d565efSmrg #include "cfgbuild.h"
5510d565efSmrg #include "cfgcleanup.h"
5610d565efSmrg #include "bb-reorder.h"
5710d565efSmrg #include "rtl-error.h"
5810d565efSmrg #include "insn-attr.h"
5910d565efSmrg #include "dojump.h"
6010d565efSmrg #include "expr.h"
6110d565efSmrg #include "cfgloop.h"
6210d565efSmrg #include "tree-pass.h"
6310d565efSmrg #include "print-rtl.h"
6410d565efSmrg 
65*ec02198aSmrg /* Disable warnings about missing quoting in GCC diagnostics.  */
66*ec02198aSmrg #if __GNUC__ >= 10
67*ec02198aSmrg #  pragma GCC diagnostic push
68*ec02198aSmrg #  pragma GCC diagnostic ignored "-Wformat-diag"
69*ec02198aSmrg #endif
70*ec02198aSmrg 
7110d565efSmrg /* Holds the interesting leading and trailing notes for the function.
7210d565efSmrg    Only applicable if the CFG is in cfglayout mode.  */
7310d565efSmrg static GTY(()) rtx_insn *cfg_layout_function_footer;
7410d565efSmrg static GTY(()) rtx_insn *cfg_layout_function_header;
7510d565efSmrg 
7610d565efSmrg static rtx_insn *skip_insns_after_block (basic_block);
7710d565efSmrg static void record_effective_endpoints (void);
7810d565efSmrg static void fixup_reorder_chain (void);
7910d565efSmrg 
8010d565efSmrg void verify_insn_chain (void);
8110d565efSmrg static void fixup_fallthru_exit_predecessor (void);
8210d565efSmrg static int can_delete_note_p (const rtx_note *);
8310d565efSmrg static int can_delete_label_p (const rtx_code_label *);
8410d565efSmrg static basic_block rtl_split_edge (edge);
8510d565efSmrg static bool rtl_move_block_after (basic_block, basic_block);
8610d565efSmrg static int rtl_verify_flow_info (void);
8710d565efSmrg static basic_block cfg_layout_split_block (basic_block, void *);
8810d565efSmrg static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
8910d565efSmrg static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
9010d565efSmrg static void cfg_layout_delete_block (basic_block);
9110d565efSmrg static void rtl_delete_block (basic_block);
9210d565efSmrg static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
9310d565efSmrg static edge rtl_redirect_edge_and_branch (edge, basic_block);
9410d565efSmrg static basic_block rtl_split_block (basic_block, void *);
95c7a68eb7Smrg static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
9610d565efSmrg static int rtl_verify_flow_info_1 (void);
9710d565efSmrg static void rtl_make_forwarder_block (edge);
9810d565efSmrg 
9910d565efSmrg /* Return true if NOTE is not one of the ones that must be kept paired,
10010d565efSmrg    so that we may simply delete it.  */
10110d565efSmrg 
10210d565efSmrg static int
can_delete_note_p(const rtx_note * note)10310d565efSmrg can_delete_note_p (const rtx_note *note)
10410d565efSmrg {
10510d565efSmrg   switch (NOTE_KIND (note))
10610d565efSmrg     {
10710d565efSmrg     case NOTE_INSN_DELETED:
10810d565efSmrg     case NOTE_INSN_BASIC_BLOCK:
10910d565efSmrg     case NOTE_INSN_EPILOGUE_BEG:
11010d565efSmrg       return true;
11110d565efSmrg 
11210d565efSmrg     default:
11310d565efSmrg       return false;
11410d565efSmrg     }
11510d565efSmrg }
11610d565efSmrg 
11710d565efSmrg /* True if a given label can be deleted.  */
11810d565efSmrg 
11910d565efSmrg static int
can_delete_label_p(const rtx_code_label * label)12010d565efSmrg can_delete_label_p (const rtx_code_label *label)
12110d565efSmrg {
12210d565efSmrg   return (!LABEL_PRESERVE_P (label)
12310d565efSmrg 	  /* User declared labels must be preserved.  */
12410d565efSmrg 	  && LABEL_NAME (label) == 0
12510d565efSmrg 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
12610d565efSmrg 					     const_cast<rtx_code_label *> (label)));
12710d565efSmrg }
12810d565efSmrg 
12910d565efSmrg /* Delete INSN by patching it out.  */
13010d565efSmrg 
13110d565efSmrg void
delete_insn(rtx_insn * insn)13210d565efSmrg delete_insn (rtx_insn *insn)
13310d565efSmrg {
13410d565efSmrg   rtx note;
13510d565efSmrg   bool really_delete = true;
13610d565efSmrg 
13710d565efSmrg   if (LABEL_P (insn))
13810d565efSmrg     {
13910d565efSmrg       /* Some labels can't be directly removed from the INSN chain, as they
14010d565efSmrg 	 might be references via variables, constant pool etc.
14110d565efSmrg 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
14210d565efSmrg       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
14310d565efSmrg 	{
14410d565efSmrg 	  const char *name = LABEL_NAME (insn);
14510d565efSmrg 	  basic_block bb = BLOCK_FOR_INSN (insn);
14610d565efSmrg 	  rtx_insn *bb_note = NEXT_INSN (insn);
14710d565efSmrg 
14810d565efSmrg 	  really_delete = false;
14910d565efSmrg 	  PUT_CODE (insn, NOTE);
15010d565efSmrg 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
15110d565efSmrg 	  NOTE_DELETED_LABEL_NAME (insn) = name;
15210d565efSmrg 
15310d565efSmrg 	  /* If the note following the label starts a basic block, and the
15410d565efSmrg 	     label is a member of the same basic block, interchange the two.  */
15510d565efSmrg 	  if (bb_note != NULL_RTX
15610d565efSmrg 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
15710d565efSmrg 	      && bb != NULL
15810d565efSmrg 	      && bb == BLOCK_FOR_INSN (bb_note))
15910d565efSmrg 	    {
16010d565efSmrg 	      reorder_insns_nobb (insn, insn, bb_note);
16110d565efSmrg 	      BB_HEAD (bb) = bb_note;
16210d565efSmrg 	      if (BB_END (bb) == bb_note)
16310d565efSmrg 		BB_END (bb) = insn;
16410d565efSmrg 	    }
16510d565efSmrg 	}
16610d565efSmrg 
16710d565efSmrg       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
16810d565efSmrg     }
16910d565efSmrg 
17010d565efSmrg   if (really_delete)
17110d565efSmrg     {
17210d565efSmrg       /* If this insn has already been deleted, something is very wrong.  */
17310d565efSmrg       gcc_assert (!insn->deleted ());
17410d565efSmrg       if (INSN_P (insn))
17510d565efSmrg 	df_insn_delete (insn);
17610d565efSmrg       remove_insn (insn);
17710d565efSmrg       insn->set_deleted ();
17810d565efSmrg     }
17910d565efSmrg 
18010d565efSmrg   /* If deleting a jump, decrement the use count of the label.  Deleting
18110d565efSmrg      the label itself should happen in the normal course of block merging.  */
18210d565efSmrg   if (JUMP_P (insn))
18310d565efSmrg     {
18410d565efSmrg       if (JUMP_LABEL (insn)
18510d565efSmrg 	  && LABEL_P (JUMP_LABEL (insn)))
18610d565efSmrg 	LABEL_NUSES (JUMP_LABEL (insn))--;
18710d565efSmrg 
18810d565efSmrg       /* If there are more targets, remove them too.  */
18910d565efSmrg       while ((note
19010d565efSmrg 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
19110d565efSmrg 	     && LABEL_P (XEXP (note, 0)))
19210d565efSmrg 	{
19310d565efSmrg 	  LABEL_NUSES (XEXP (note, 0))--;
19410d565efSmrg 	  remove_note (insn, note);
19510d565efSmrg 	}
19610d565efSmrg     }
19710d565efSmrg 
19810d565efSmrg   /* Also if deleting any insn that references a label as an operand.  */
19910d565efSmrg   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
20010d565efSmrg 	 && LABEL_P (XEXP (note, 0)))
20110d565efSmrg     {
20210d565efSmrg       LABEL_NUSES (XEXP (note, 0))--;
20310d565efSmrg       remove_note (insn, note);
20410d565efSmrg     }
20510d565efSmrg 
20610d565efSmrg   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
20710d565efSmrg     {
20810d565efSmrg       rtvec vec = table->get_labels ();
20910d565efSmrg       int len = GET_NUM_ELEM (vec);
21010d565efSmrg       int i;
21110d565efSmrg 
21210d565efSmrg       for (i = 0; i < len; i++)
21310d565efSmrg 	{
21410d565efSmrg 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
21510d565efSmrg 
21610d565efSmrg 	  /* When deleting code in bulk (e.g. removing many unreachable
21710d565efSmrg 	     blocks) we can delete a label that's a target of the vector
21810d565efSmrg 	     before deleting the vector itself.  */
21910d565efSmrg 	  if (!NOTE_P (label))
22010d565efSmrg 	    LABEL_NUSES (label)--;
22110d565efSmrg 	}
22210d565efSmrg     }
22310d565efSmrg }
22410d565efSmrg 
22510d565efSmrg /* Like delete_insn but also purge dead edges from BB.
22610d565efSmrg    Return true if any edges are eliminated.  */
22710d565efSmrg 
22810d565efSmrg bool
delete_insn_and_edges(rtx_insn * insn)22910d565efSmrg delete_insn_and_edges (rtx_insn *insn)
23010d565efSmrg {
23110d565efSmrg   bool purge = false;
23210d565efSmrg 
233*ec02198aSmrg   if (NONDEBUG_INSN_P (insn) && BLOCK_FOR_INSN (insn))
234*ec02198aSmrg     {
235*ec02198aSmrg       basic_block bb = BLOCK_FOR_INSN (insn);
236*ec02198aSmrg       if (BB_END (bb) == insn)
23710d565efSmrg 	purge = true;
238*ec02198aSmrg       else if (DEBUG_INSN_P (BB_END (bb)))
239*ec02198aSmrg 	for (rtx_insn *dinsn = NEXT_INSN (insn);
240*ec02198aSmrg 	     DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
241*ec02198aSmrg 	  if (BB_END (bb) == dinsn)
242*ec02198aSmrg 	    {
243*ec02198aSmrg 	      purge = true;
244*ec02198aSmrg 	      break;
245*ec02198aSmrg 	    }
246*ec02198aSmrg     }
24710d565efSmrg   delete_insn (insn);
24810d565efSmrg   if (purge)
24910d565efSmrg     return purge_dead_edges (BLOCK_FOR_INSN (insn));
25010d565efSmrg   return false;
25110d565efSmrg }
25210d565efSmrg 
25310d565efSmrg /* Unlink a chain of insns between START and FINISH, leaving notes
25410d565efSmrg    that must be paired.  If CLEAR_BB is true, we set bb field for
25510d565efSmrg    insns that cannot be removed to NULL.  */
25610d565efSmrg 
25710d565efSmrg void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)25810d565efSmrg delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
25910d565efSmrg {
26010d565efSmrg   /* Unchain the insns one by one.  It would be quicker to delete all of these
26110d565efSmrg      with a single unchaining, rather than one at a time, but we need to keep
26210d565efSmrg      the NOTE's.  */
26310d565efSmrg   rtx_insn *current = finish;
26410d565efSmrg   while (1)
26510d565efSmrg     {
26610d565efSmrg       rtx_insn *prev = PREV_INSN (current);
26710d565efSmrg       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
26810d565efSmrg 	;
26910d565efSmrg       else
27010d565efSmrg 	delete_insn (current);
27110d565efSmrg 
27210d565efSmrg       if (clear_bb && !current->deleted ())
27310d565efSmrg 	set_block_for_insn (current, NULL);
27410d565efSmrg 
27510d565efSmrg       if (current == start)
27610d565efSmrg 	break;
27710d565efSmrg       current = prev;
27810d565efSmrg     }
27910d565efSmrg }
28010d565efSmrg 
28110d565efSmrg /* Create a new basic block consisting of the instructions between HEAD and END
28210d565efSmrg    inclusive.  This function is designed to allow fast BB construction - reuses
28310d565efSmrg    the note and basic block struct in BB_NOTE, if any and do not grow
28410d565efSmrg    BASIC_BLOCK chain and should be used directly only by CFG construction code.
28510d565efSmrg    END can be NULL in to create new empty basic block before HEAD.  Both END
28610d565efSmrg    and HEAD can be NULL to create basic block at the end of INSN chain.
28710d565efSmrg    AFTER is the basic block we should be put after.  */
28810d565efSmrg 
28910d565efSmrg basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)29010d565efSmrg create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
29110d565efSmrg 			      basic_block after)
29210d565efSmrg {
29310d565efSmrg   basic_block bb;
29410d565efSmrg 
29510d565efSmrg   if (bb_note
29610d565efSmrg       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
29710d565efSmrg       && bb->aux == NULL)
29810d565efSmrg     {
29910d565efSmrg       /* If we found an existing note, thread it back onto the chain.  */
30010d565efSmrg 
30110d565efSmrg       rtx_insn *after;
30210d565efSmrg 
30310d565efSmrg       if (LABEL_P (head))
30410d565efSmrg 	after = head;
30510d565efSmrg       else
30610d565efSmrg 	{
30710d565efSmrg 	  after = PREV_INSN (head);
30810d565efSmrg 	  head = bb_note;
30910d565efSmrg 	}
31010d565efSmrg 
31110d565efSmrg       if (after != bb_note && NEXT_INSN (after) != bb_note)
31210d565efSmrg 	reorder_insns_nobb (bb_note, bb_note, after);
31310d565efSmrg     }
31410d565efSmrg   else
31510d565efSmrg     {
31610d565efSmrg       /* Otherwise we must create a note and a basic block structure.  */
31710d565efSmrg 
31810d565efSmrg       bb = alloc_block ();
31910d565efSmrg 
32010d565efSmrg       init_rtl_bb_info (bb);
32110d565efSmrg       if (!head && !end)
32210d565efSmrg 	head = end = bb_note
32310d565efSmrg 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
32410d565efSmrg       else if (LABEL_P (head) && end)
32510d565efSmrg 	{
32610d565efSmrg 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
32710d565efSmrg 	  if (head == end)
32810d565efSmrg 	    end = bb_note;
32910d565efSmrg 	}
33010d565efSmrg       else
33110d565efSmrg 	{
33210d565efSmrg 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
33310d565efSmrg 	  head = bb_note;
33410d565efSmrg 	  if (!end)
33510d565efSmrg 	    end = head;
33610d565efSmrg 	}
33710d565efSmrg 
33810d565efSmrg       NOTE_BASIC_BLOCK (bb_note) = bb;
33910d565efSmrg     }
34010d565efSmrg 
34110d565efSmrg   /* Always include the bb note in the block.  */
34210d565efSmrg   if (NEXT_INSN (end) == bb_note)
34310d565efSmrg     end = bb_note;
34410d565efSmrg 
34510d565efSmrg   BB_HEAD (bb) = head;
34610d565efSmrg   BB_END (bb) = end;
34710d565efSmrg   bb->index = last_basic_block_for_fn (cfun)++;
34810d565efSmrg   bb->flags = BB_NEW | BB_RTL;
34910d565efSmrg   link_block (bb, after);
35010d565efSmrg   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
35110d565efSmrg   df_bb_refs_record (bb->index, false);
35210d565efSmrg   update_bb_for_insn (bb);
35310d565efSmrg   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
35410d565efSmrg 
35510d565efSmrg   /* Tag the block so that we know it has been used when considering
35610d565efSmrg      other basic block notes.  */
35710d565efSmrg   bb->aux = bb;
35810d565efSmrg 
35910d565efSmrg   return bb;
36010d565efSmrg }
36110d565efSmrg 
36210d565efSmrg /* Create new basic block consisting of instructions in between HEAD and END
36310d565efSmrg    and place it to the BB chain after block AFTER.  END can be NULL to
36410d565efSmrg    create a new empty basic block before HEAD.  Both END and HEAD can be
36510d565efSmrg    NULL to create basic block at the end of INSN chain.  */
36610d565efSmrg 
36710d565efSmrg static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)36810d565efSmrg rtl_create_basic_block (void *headp, void *endp, basic_block after)
36910d565efSmrg {
37010d565efSmrg   rtx_insn *head = (rtx_insn *) headp;
37110d565efSmrg   rtx_insn *end = (rtx_insn *) endp;
37210d565efSmrg   basic_block bb;
37310d565efSmrg 
37410d565efSmrg   /* Grow the basic block array if needed.  */
37510d565efSmrg   if ((size_t) last_basic_block_for_fn (cfun)
37610d565efSmrg       >= basic_block_info_for_fn (cfun)->length ())
37710d565efSmrg     {
37810d565efSmrg       size_t new_size =
37910d565efSmrg 	(last_basic_block_for_fn (cfun)
38010d565efSmrg 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
38110d565efSmrg       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
38210d565efSmrg     }
38310d565efSmrg 
38410d565efSmrg   n_basic_blocks_for_fn (cfun)++;
38510d565efSmrg 
38610d565efSmrg   bb = create_basic_block_structure (head, end, NULL, after);
38710d565efSmrg   bb->aux = NULL;
38810d565efSmrg   return bb;
38910d565efSmrg }
39010d565efSmrg 
39110d565efSmrg static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)39210d565efSmrg cfg_layout_create_basic_block (void *head, void *end, basic_block after)
39310d565efSmrg {
39410d565efSmrg   basic_block newbb = rtl_create_basic_block (head, end, after);
39510d565efSmrg 
39610d565efSmrg   return newbb;
39710d565efSmrg }
39810d565efSmrg 
39910d565efSmrg /* Delete the insns in a (non-live) block.  We physically delete every
40010d565efSmrg    non-deleted-note insn, and update the flow graph appropriately.
40110d565efSmrg 
40210d565efSmrg    Return nonzero if we deleted an exception handler.  */
40310d565efSmrg 
40410d565efSmrg /* ??? Preserving all such notes strikes me as wrong.  It would be nice
40510d565efSmrg    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
40610d565efSmrg 
40710d565efSmrg static void
rtl_delete_block(basic_block b)40810d565efSmrg rtl_delete_block (basic_block b)
40910d565efSmrg {
41010d565efSmrg   rtx_insn *insn, *end;
41110d565efSmrg 
41210d565efSmrg   /* If the head of this block is a CODE_LABEL, then it might be the
41310d565efSmrg      label for an exception handler which can't be reached.  We need
41410d565efSmrg      to remove the label from the exception_handler_label list.  */
41510d565efSmrg   insn = BB_HEAD (b);
41610d565efSmrg 
41710d565efSmrg   end = get_last_bb_insn (b);
41810d565efSmrg 
41910d565efSmrg   /* Selectively delete the entire chain.  */
42010d565efSmrg   BB_HEAD (b) = NULL;
42110d565efSmrg   delete_insn_chain (insn, end, true);
42210d565efSmrg 
42310d565efSmrg 
42410d565efSmrg   if (dump_file)
42510d565efSmrg     fprintf (dump_file, "deleting block %d\n", b->index);
42610d565efSmrg   df_bb_delete (b->index);
42710d565efSmrg }
42810d565efSmrg 
42910d565efSmrg /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
43010d565efSmrg 
43110d565efSmrg void
compute_bb_for_insn(void)43210d565efSmrg compute_bb_for_insn (void)
43310d565efSmrg {
43410d565efSmrg   basic_block bb;
43510d565efSmrg 
43610d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
43710d565efSmrg     {
43810d565efSmrg       rtx_insn *end = BB_END (bb);
43910d565efSmrg       rtx_insn *insn;
44010d565efSmrg 
44110d565efSmrg       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
44210d565efSmrg 	{
44310d565efSmrg 	  BLOCK_FOR_INSN (insn) = bb;
44410d565efSmrg 	  if (insn == end)
44510d565efSmrg 	    break;
44610d565efSmrg 	}
44710d565efSmrg     }
44810d565efSmrg }
44910d565efSmrg 
45010d565efSmrg /* Release the basic_block_for_insn array.  */
45110d565efSmrg 
45210d565efSmrg unsigned int
free_bb_for_insn(void)45310d565efSmrg free_bb_for_insn (void)
45410d565efSmrg {
45510d565efSmrg   rtx_insn *insn;
45610d565efSmrg   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
45710d565efSmrg     if (!BARRIER_P (insn))
45810d565efSmrg       BLOCK_FOR_INSN (insn) = NULL;
45910d565efSmrg   return 0;
46010d565efSmrg }
46110d565efSmrg 
46210d565efSmrg namespace {
46310d565efSmrg 
46410d565efSmrg const pass_data pass_data_free_cfg =
46510d565efSmrg {
46610d565efSmrg   RTL_PASS, /* type */
46710d565efSmrg   "*free_cfg", /* name */
46810d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
46910d565efSmrg   TV_NONE, /* tv_id */
47010d565efSmrg   0, /* properties_required */
47110d565efSmrg   0, /* properties_provided */
47210d565efSmrg   PROP_cfg, /* properties_destroyed */
47310d565efSmrg   0, /* todo_flags_start */
47410d565efSmrg   0, /* todo_flags_finish */
47510d565efSmrg };
47610d565efSmrg 
47710d565efSmrg class pass_free_cfg : public rtl_opt_pass
47810d565efSmrg {
47910d565efSmrg public:
pass_free_cfg(gcc::context * ctxt)48010d565efSmrg   pass_free_cfg (gcc::context *ctxt)
48110d565efSmrg     : rtl_opt_pass (pass_data_free_cfg, ctxt)
48210d565efSmrg   {}
48310d565efSmrg 
48410d565efSmrg   /* opt_pass methods: */
48510d565efSmrg   virtual unsigned int execute (function *);
48610d565efSmrg 
48710d565efSmrg }; // class pass_free_cfg
48810d565efSmrg 
48910d565efSmrg unsigned int
execute(function *)49010d565efSmrg pass_free_cfg::execute (function *)
49110d565efSmrg {
49210d565efSmrg   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
49310d565efSmrg      valid at that point so it would be too late to call df_analyze.  */
49410d565efSmrg   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
49510d565efSmrg     {
49610d565efSmrg       df_note_add_problem ();
49710d565efSmrg       df_analyze ();
49810d565efSmrg     }
49910d565efSmrg 
50010d565efSmrg   if (crtl->has_bb_partition)
50110d565efSmrg     insert_section_boundary_note ();
50210d565efSmrg 
50310d565efSmrg   free_bb_for_insn ();
50410d565efSmrg   return 0;
50510d565efSmrg }
50610d565efSmrg 
50710d565efSmrg } // anon namespace
50810d565efSmrg 
50910d565efSmrg rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)51010d565efSmrg make_pass_free_cfg (gcc::context *ctxt)
51110d565efSmrg {
51210d565efSmrg   return new pass_free_cfg (ctxt);
51310d565efSmrg }
51410d565efSmrg 
51510d565efSmrg /* Return RTX to emit after when we want to emit code on the entry of function.  */
51610d565efSmrg rtx_insn *
entry_of_function(void)51710d565efSmrg entry_of_function (void)
51810d565efSmrg {
51910d565efSmrg   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
52010d565efSmrg 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
52110d565efSmrg }
52210d565efSmrg 
52310d565efSmrg /* Emit INSN at the entry point of the function, ensuring that it is only
52410d565efSmrg    executed once per function.  */
52510d565efSmrg void
emit_insn_at_entry(rtx insn)52610d565efSmrg emit_insn_at_entry (rtx insn)
52710d565efSmrg {
52810d565efSmrg   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
52910d565efSmrg   edge e = ei_safe_edge (ei);
53010d565efSmrg   gcc_assert (e->flags & EDGE_FALLTHRU);
53110d565efSmrg 
53210d565efSmrg   insert_insn_on_edge (insn, e);
53310d565efSmrg   commit_edge_insertions ();
53410d565efSmrg }
53510d565efSmrg 
53610d565efSmrg /* Update BLOCK_FOR_INSN of insns between BEGIN and END
53710d565efSmrg    (or BARRIER if found) and notify df of the bb change.
53810d565efSmrg    The insn chain range is inclusive
53910d565efSmrg    (i.e. both BEGIN and END will be updated. */
54010d565efSmrg 
54110d565efSmrg static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)54210d565efSmrg update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
54310d565efSmrg {
54410d565efSmrg   rtx_insn *insn;
54510d565efSmrg 
54610d565efSmrg   end = NEXT_INSN (end);
54710d565efSmrg   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
54810d565efSmrg     if (!BARRIER_P (insn))
54910d565efSmrg       df_insn_change_bb (insn, bb);
55010d565efSmrg }
55110d565efSmrg 
55210d565efSmrg /* Update BLOCK_FOR_INSN of insns in BB to BB,
55310d565efSmrg    and notify df of the change.  */
55410d565efSmrg 
55510d565efSmrg void
update_bb_for_insn(basic_block bb)55610d565efSmrg update_bb_for_insn (basic_block bb)
55710d565efSmrg {
55810d565efSmrg   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
55910d565efSmrg }
56010d565efSmrg 
56110d565efSmrg 
562*ec02198aSmrg /* Like active_insn_p, except keep the return value use or clobber around
56310d565efSmrg    even after reload.  */
56410d565efSmrg 
56510d565efSmrg static bool
flow_active_insn_p(const rtx_insn * insn)56610d565efSmrg flow_active_insn_p (const rtx_insn *insn)
56710d565efSmrg {
56810d565efSmrg   if (active_insn_p (insn))
56910d565efSmrg     return true;
57010d565efSmrg 
57110d565efSmrg   /* A clobber of the function return value exists for buggy
57210d565efSmrg      programs that fail to return a value.  Its effect is to
57310d565efSmrg      keep the return value from being live across the entire
57410d565efSmrg      function.  If we allow it to be skipped, we introduce the
575*ec02198aSmrg      possibility for register lifetime confusion.
576*ec02198aSmrg      Similarly, keep a USE of the function return value, otherwise
577*ec02198aSmrg      the USE is dropped and we could fail to thread jump if USE
578*ec02198aSmrg      appears on some paths and not on others, see PR90257.  */
579*ec02198aSmrg   if ((GET_CODE (PATTERN (insn)) == CLOBBER
580*ec02198aSmrg        || GET_CODE (PATTERN (insn)) == USE)
58110d565efSmrg       && REG_P (XEXP (PATTERN (insn), 0))
58210d565efSmrg       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
58310d565efSmrg     return true;
58410d565efSmrg 
58510d565efSmrg   return false;
58610d565efSmrg }
58710d565efSmrg 
58810d565efSmrg /* Return true if the block has no effect and only forwards control flow to
58910d565efSmrg    its single destination.  */
59010d565efSmrg 
59110d565efSmrg bool
contains_no_active_insn_p(const_basic_block bb)59210d565efSmrg contains_no_active_insn_p (const_basic_block bb)
59310d565efSmrg {
59410d565efSmrg   rtx_insn *insn;
59510d565efSmrg 
59610d565efSmrg   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
59710d565efSmrg       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
59810d565efSmrg       || !single_succ_p (bb)
59910d565efSmrg       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
60010d565efSmrg     return false;
60110d565efSmrg 
60210d565efSmrg   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
60310d565efSmrg     if (INSN_P (insn) && flow_active_insn_p (insn))
60410d565efSmrg       return false;
60510d565efSmrg 
60610d565efSmrg   return (!INSN_P (insn)
60710d565efSmrg 	  || (JUMP_P (insn) && simplejump_p (insn))
60810d565efSmrg 	  || !flow_active_insn_p (insn));
60910d565efSmrg }
61010d565efSmrg 
61110d565efSmrg /* Likewise, but protect loop latches, headers and preheaders.  */
61210d565efSmrg /* FIXME: Make this a cfg hook.  */
61310d565efSmrg 
61410d565efSmrg bool
forwarder_block_p(const_basic_block bb)61510d565efSmrg forwarder_block_p (const_basic_block bb)
61610d565efSmrg {
61710d565efSmrg   if (!contains_no_active_insn_p (bb))
61810d565efSmrg     return false;
61910d565efSmrg 
62010d565efSmrg   /* Protect loop latches, headers and preheaders.  */
62110d565efSmrg   if (current_loops)
62210d565efSmrg     {
62310d565efSmrg       basic_block dest;
62410d565efSmrg       if (bb->loop_father->header == bb)
62510d565efSmrg 	return false;
62610d565efSmrg       dest = EDGE_SUCC (bb, 0)->dest;
62710d565efSmrg       if (dest->loop_father->header == dest)
62810d565efSmrg 	return false;
62910d565efSmrg     }
63010d565efSmrg 
63110d565efSmrg   return true;
63210d565efSmrg }
63310d565efSmrg 
63410d565efSmrg /* Return nonzero if we can reach target from src by falling through.  */
63510d565efSmrg /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
63610d565efSmrg 
63710d565efSmrg bool
can_fallthru(basic_block src,basic_block target)63810d565efSmrg can_fallthru (basic_block src, basic_block target)
63910d565efSmrg {
64010d565efSmrg   rtx_insn *insn = BB_END (src);
64110d565efSmrg   rtx_insn *insn2;
64210d565efSmrg   edge e;
64310d565efSmrg   edge_iterator ei;
64410d565efSmrg 
64510d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
64610d565efSmrg     return true;
64710d565efSmrg   if (src->next_bb != target)
64810d565efSmrg     return false;
64910d565efSmrg 
65010d565efSmrg   /* ??? Later we may add code to move jump tables offline.  */
65110d565efSmrg   if (tablejump_p (insn, NULL, NULL))
65210d565efSmrg     return false;
65310d565efSmrg 
65410d565efSmrg   FOR_EACH_EDGE (e, ei, src->succs)
65510d565efSmrg     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
65610d565efSmrg 	&& e->flags & EDGE_FALLTHRU)
65710d565efSmrg       return false;
65810d565efSmrg 
65910d565efSmrg   insn2 = BB_HEAD (target);
66010d565efSmrg   if (!active_insn_p (insn2))
66110d565efSmrg     insn2 = next_active_insn (insn2);
66210d565efSmrg 
66310d565efSmrg   return next_active_insn (insn) == insn2;
66410d565efSmrg }
66510d565efSmrg 
66610d565efSmrg /* Return nonzero if we could reach target from src by falling through,
66710d565efSmrg    if the target was made adjacent.  If we already have a fall-through
66810d565efSmrg    edge to the exit block, we can't do that.  */
66910d565efSmrg static bool
could_fall_through(basic_block src,basic_block target)67010d565efSmrg could_fall_through (basic_block src, basic_block target)
67110d565efSmrg {
67210d565efSmrg   edge e;
67310d565efSmrg   edge_iterator ei;
67410d565efSmrg 
67510d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
67610d565efSmrg     return true;
67710d565efSmrg   FOR_EACH_EDGE (e, ei, src->succs)
67810d565efSmrg     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
67910d565efSmrg 	&& e->flags & EDGE_FALLTHRU)
68010d565efSmrg       return 0;
68110d565efSmrg   return true;
68210d565efSmrg }
68310d565efSmrg 
68410d565efSmrg /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
68510d565efSmrg rtx_note *
bb_note(basic_block bb)68610d565efSmrg bb_note (basic_block bb)
68710d565efSmrg {
68810d565efSmrg   rtx_insn *note;
68910d565efSmrg 
69010d565efSmrg   note = BB_HEAD (bb);
69110d565efSmrg   if (LABEL_P (note))
69210d565efSmrg     note = NEXT_INSN (note);
69310d565efSmrg 
69410d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
69510d565efSmrg   return as_a <rtx_note *> (note);
69610d565efSmrg }
69710d565efSmrg 
69810d565efSmrg /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
69910d565efSmrg    note associated with the BLOCK.  */
70010d565efSmrg 
70110d565efSmrg static rtx_insn *
first_insn_after_basic_block_note(basic_block block)70210d565efSmrg first_insn_after_basic_block_note (basic_block block)
70310d565efSmrg {
70410d565efSmrg   rtx_insn *insn;
70510d565efSmrg 
70610d565efSmrg   /* Get the first instruction in the block.  */
70710d565efSmrg   insn = BB_HEAD (block);
70810d565efSmrg 
70910d565efSmrg   if (insn == NULL_RTX)
71010d565efSmrg     return NULL;
71110d565efSmrg   if (LABEL_P (insn))
71210d565efSmrg     insn = NEXT_INSN (insn);
71310d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
71410d565efSmrg 
71510d565efSmrg   return NEXT_INSN (insn);
71610d565efSmrg }
71710d565efSmrg 
71810d565efSmrg /* Creates a new basic block just after basic block BB by splitting
71910d565efSmrg    everything after specified instruction INSNP.  */
72010d565efSmrg 
72110d565efSmrg static basic_block
rtl_split_block(basic_block bb,void * insnp)72210d565efSmrg rtl_split_block (basic_block bb, void *insnp)
72310d565efSmrg {
72410d565efSmrg   basic_block new_bb;
72510d565efSmrg   rtx_insn *insn = (rtx_insn *) insnp;
72610d565efSmrg   edge e;
72710d565efSmrg   edge_iterator ei;
72810d565efSmrg 
72910d565efSmrg   if (!insn)
73010d565efSmrg     {
73110d565efSmrg       insn = first_insn_after_basic_block_note (bb);
73210d565efSmrg 
73310d565efSmrg       if (insn)
73410d565efSmrg 	{
73510d565efSmrg 	  rtx_insn *next = insn;
73610d565efSmrg 
73710d565efSmrg 	  insn = PREV_INSN (insn);
73810d565efSmrg 
73910d565efSmrg 	  /* If the block contains only debug insns, insn would have
74010d565efSmrg 	     been NULL in a non-debug compilation, and then we'd end
74110d565efSmrg 	     up emitting a DELETED note.  For -fcompare-debug
74210d565efSmrg 	     stability, emit the note too.  */
74310d565efSmrg 	  if (insn != BB_END (bb)
74410d565efSmrg 	      && DEBUG_INSN_P (next)
74510d565efSmrg 	      && DEBUG_INSN_P (BB_END (bb)))
74610d565efSmrg 	    {
74710d565efSmrg 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
74810d565efSmrg 		next = NEXT_INSN (next);
74910d565efSmrg 
75010d565efSmrg 	      if (next == BB_END (bb))
75110d565efSmrg 		emit_note_after (NOTE_INSN_DELETED, next);
75210d565efSmrg 	    }
75310d565efSmrg 	}
75410d565efSmrg       else
75510d565efSmrg 	insn = get_last_insn ();
75610d565efSmrg     }
75710d565efSmrg 
75810d565efSmrg   /* We probably should check type of the insn so that we do not create
75910d565efSmrg      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
76010d565efSmrg      bother.  */
76110d565efSmrg   if (insn == BB_END (bb))
76210d565efSmrg     emit_note_after (NOTE_INSN_DELETED, insn);
76310d565efSmrg 
76410d565efSmrg   /* Create the new basic block.  */
76510d565efSmrg   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
76610d565efSmrg   BB_COPY_PARTITION (new_bb, bb);
76710d565efSmrg   BB_END (bb) = insn;
76810d565efSmrg 
76910d565efSmrg   /* Redirect the outgoing edges.  */
77010d565efSmrg   new_bb->succs = bb->succs;
77110d565efSmrg   bb->succs = NULL;
77210d565efSmrg   FOR_EACH_EDGE (e, ei, new_bb->succs)
77310d565efSmrg     e->src = new_bb;
77410d565efSmrg 
77510d565efSmrg   /* The new block starts off being dirty.  */
77610d565efSmrg   df_set_bb_dirty (bb);
77710d565efSmrg   return new_bb;
77810d565efSmrg }
77910d565efSmrg 
78010d565efSmrg /* Return true if the single edge between blocks A and B is the only place
78110d565efSmrg    in RTL which holds some unique locus.  */
78210d565efSmrg 
78310d565efSmrg static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)78410d565efSmrg unique_locus_on_edge_between_p (basic_block a, basic_block b)
78510d565efSmrg {
78610d565efSmrg   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
78710d565efSmrg   rtx_insn *insn, *end;
78810d565efSmrg 
78910d565efSmrg   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
79010d565efSmrg     return false;
79110d565efSmrg 
79210d565efSmrg   /* First scan block A backward.  */
79310d565efSmrg   insn = BB_END (a);
79410d565efSmrg   end = PREV_INSN (BB_HEAD (a));
79510d565efSmrg   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
79610d565efSmrg     insn = PREV_INSN (insn);
79710d565efSmrg 
79810d565efSmrg   if (insn != end && INSN_LOCATION (insn) == goto_locus)
79910d565efSmrg     return false;
80010d565efSmrg 
80110d565efSmrg   /* Then scan block B forward.  */
80210d565efSmrg   insn = BB_HEAD (b);
80310d565efSmrg   if (insn)
80410d565efSmrg     {
80510d565efSmrg       end = NEXT_INSN (BB_END (b));
80610d565efSmrg       while (insn != end && !NONDEBUG_INSN_P (insn))
80710d565efSmrg 	insn = NEXT_INSN (insn);
80810d565efSmrg 
80910d565efSmrg       if (insn != end && INSN_HAS_LOCATION (insn)
81010d565efSmrg 	  && INSN_LOCATION (insn) == goto_locus)
81110d565efSmrg 	return false;
81210d565efSmrg     }
81310d565efSmrg 
81410d565efSmrg   return true;
81510d565efSmrg }
81610d565efSmrg 
81710d565efSmrg /* If the single edge between blocks A and B is the only place in RTL which
81810d565efSmrg    holds some unique locus, emit a nop with that locus between the blocks.  */
81910d565efSmrg 
82010d565efSmrg static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)82110d565efSmrg emit_nop_for_unique_locus_between (basic_block a, basic_block b)
82210d565efSmrg {
82310d565efSmrg   if (!unique_locus_on_edge_between_p (a, b))
82410d565efSmrg     return;
82510d565efSmrg 
82610d565efSmrg   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
82710d565efSmrg   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
82810d565efSmrg }
82910d565efSmrg 
83010d565efSmrg /* Blocks A and B are to be merged into a single block A.  The insns
83110d565efSmrg    are already contiguous.  */
83210d565efSmrg 
83310d565efSmrg static void
rtl_merge_blocks(basic_block a,basic_block b)83410d565efSmrg rtl_merge_blocks (basic_block a, basic_block b)
83510d565efSmrg {
8360fc04c29Smrg   /* If B is a forwarder block whose outgoing edge has no location, we'll
8370fc04c29Smrg      propagate the locus of the edge between A and B onto it.  */
8380fc04c29Smrg   const bool forward_edge_locus
8390fc04c29Smrg     = (b->flags & BB_FORWARDER_BLOCK) != 0
8400fc04c29Smrg       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
84110d565efSmrg   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
84210d565efSmrg   rtx_insn *del_first = NULL, *del_last = NULL;
84310d565efSmrg   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
84410d565efSmrg   int b_empty = 0;
84510d565efSmrg 
84610d565efSmrg   if (dump_file)
84710d565efSmrg     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
84810d565efSmrg 	     a->index);
84910d565efSmrg 
85010d565efSmrg   while (DEBUG_INSN_P (b_end))
85110d565efSmrg     b_end = PREV_INSN (b_debug_start = b_end);
85210d565efSmrg 
85310d565efSmrg   /* If there was a CODE_LABEL beginning B, delete it.  */
85410d565efSmrg   if (LABEL_P (b_head))
85510d565efSmrg     {
85610d565efSmrg       /* Detect basic blocks with nothing but a label.  This can happen
85710d565efSmrg 	 in particular at the end of a function.  */
85810d565efSmrg       if (b_head == b_end)
85910d565efSmrg 	b_empty = 1;
86010d565efSmrg 
86110d565efSmrg       del_first = del_last = b_head;
86210d565efSmrg       b_head = NEXT_INSN (b_head);
86310d565efSmrg     }
86410d565efSmrg 
86510d565efSmrg   /* Delete the basic block note and handle blocks containing just that
86610d565efSmrg      note.  */
86710d565efSmrg   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
86810d565efSmrg     {
86910d565efSmrg       if (b_head == b_end)
87010d565efSmrg 	b_empty = 1;
87110d565efSmrg       if (! del_last)
87210d565efSmrg 	del_first = b_head;
87310d565efSmrg 
87410d565efSmrg       del_last = b_head;
87510d565efSmrg       b_head = NEXT_INSN (b_head);
87610d565efSmrg     }
87710d565efSmrg 
87810d565efSmrg   /* If there was a jump out of A, delete it.  */
87910d565efSmrg   if (JUMP_P (a_end))
88010d565efSmrg     {
88110d565efSmrg       rtx_insn *prev;
88210d565efSmrg 
88310d565efSmrg       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
88410d565efSmrg 	if (!NOTE_P (prev)
88510d565efSmrg 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
88610d565efSmrg 	    || prev == BB_HEAD (a))
88710d565efSmrg 	  break;
88810d565efSmrg 
88910d565efSmrg       del_first = a_end;
89010d565efSmrg 
89110d565efSmrg       /* If this was a conditional jump, we need to also delete
89210d565efSmrg 	 the insn that set cc0.  */
89310d565efSmrg       if (HAVE_cc0 && only_sets_cc0_p (prev))
89410d565efSmrg 	{
89510d565efSmrg 	  rtx_insn *tmp = prev;
89610d565efSmrg 
89710d565efSmrg 	  prev = prev_nonnote_insn (prev);
89810d565efSmrg 	  if (!prev)
89910d565efSmrg 	    prev = BB_HEAD (a);
90010d565efSmrg 	  del_first = tmp;
90110d565efSmrg 	}
90210d565efSmrg 
90310d565efSmrg       a_end = PREV_INSN (del_first);
90410d565efSmrg     }
90510d565efSmrg   else if (BARRIER_P (NEXT_INSN (a_end)))
90610d565efSmrg     del_first = NEXT_INSN (a_end);
90710d565efSmrg 
90810d565efSmrg   /* Delete everything marked above as well as crap that might be
90910d565efSmrg      hanging out between the two blocks.  */
91010d565efSmrg   BB_END (a) = a_end;
91110d565efSmrg   BB_HEAD (b) = b_empty ? NULL : b_head;
91210d565efSmrg   delete_insn_chain (del_first, del_last, true);
91310d565efSmrg 
9140fc04c29Smrg   /* If not optimizing, preserve the locus of the single edge between
9150fc04c29Smrg      blocks A and B if necessary by emitting a nop.  */
9160fc04c29Smrg   if (!optimize
9170fc04c29Smrg       && !forward_edge_locus
9180fc04c29Smrg       && !DECL_IGNORED_P (current_function_decl))
91910d565efSmrg     {
92010d565efSmrg       emit_nop_for_unique_locus_between (a, b);
92110d565efSmrg       a_end = BB_END (a);
92210d565efSmrg     }
92310d565efSmrg 
92410d565efSmrg   /* Reassociate the insns of B with A.  */
92510d565efSmrg   if (!b_empty)
92610d565efSmrg     {
92710d565efSmrg       update_bb_for_insn_chain (a_end, b_debug_end, a);
92810d565efSmrg 
92910d565efSmrg       BB_END (a) = b_debug_end;
93010d565efSmrg       BB_HEAD (b) = NULL;
93110d565efSmrg     }
93210d565efSmrg   else if (b_end != b_debug_end)
93310d565efSmrg     {
93410d565efSmrg       /* Move any deleted labels and other notes between the end of A
93510d565efSmrg 	 and the debug insns that make up B after the debug insns,
93610d565efSmrg 	 bringing the debug insns into A while keeping the notes after
93710d565efSmrg 	 the end of A.  */
93810d565efSmrg       if (NEXT_INSN (a_end) != b_debug_start)
93910d565efSmrg 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
94010d565efSmrg 			    b_debug_end);
94110d565efSmrg       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
94210d565efSmrg       BB_END (a) = b_debug_end;
94310d565efSmrg     }
94410d565efSmrg 
94510d565efSmrg   df_bb_delete (b->index);
94610d565efSmrg 
9470fc04c29Smrg   if (forward_edge_locus)
94810d565efSmrg     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
94910d565efSmrg 
95010d565efSmrg   if (dump_file)
95110d565efSmrg     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
95210d565efSmrg }
95310d565efSmrg 
95410d565efSmrg 
95510d565efSmrg /* Return true when block A and B can be merged.  */
95610d565efSmrg 
95710d565efSmrg static bool
rtl_can_merge_blocks(basic_block a,basic_block b)95810d565efSmrg rtl_can_merge_blocks (basic_block a, basic_block b)
95910d565efSmrg {
96010d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
96110d565efSmrg      mess up unconditional or indirect jumps that cross between hot
96210d565efSmrg      and cold sections.
96310d565efSmrg 
96410d565efSmrg      Basic block partitioning may result in some jumps that appear to
96510d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
96610d565efSmrg      must be left untouched (they are required to make it safely across
96710d565efSmrg      partition boundaries).  See  the comments at the top of
96810d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
96910d565efSmrg 
97010d565efSmrg   if (BB_PARTITION (a) != BB_PARTITION (b))
97110d565efSmrg     return false;
97210d565efSmrg 
97310d565efSmrg   /* Protect the loop latches.  */
97410d565efSmrg   if (current_loops && b->loop_father->latch == b)
97510d565efSmrg     return false;
97610d565efSmrg 
97710d565efSmrg   /* There must be exactly one edge in between the blocks.  */
97810d565efSmrg   return (single_succ_p (a)
97910d565efSmrg 	  && single_succ (a) == b
98010d565efSmrg 	  && single_pred_p (b)
98110d565efSmrg 	  && a != b
98210d565efSmrg 	  /* Must be simple edge.  */
98310d565efSmrg 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
98410d565efSmrg 	  && a->next_bb == b
98510d565efSmrg 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
98610d565efSmrg 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
98710d565efSmrg 	  /* If the jump insn has side effects,
98810d565efSmrg 	     we can't kill the edge.  */
98910d565efSmrg 	  && (!JUMP_P (BB_END (a))
99010d565efSmrg 	      || (reload_completed
99110d565efSmrg 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
99210d565efSmrg }
99310d565efSmrg 
99410d565efSmrg /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
99510d565efSmrg    exist.  */
99610d565efSmrg 
99710d565efSmrg rtx_code_label *
block_label(basic_block block)99810d565efSmrg block_label (basic_block block)
99910d565efSmrg {
100010d565efSmrg   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
100110d565efSmrg     return NULL;
100210d565efSmrg 
100310d565efSmrg   if (!LABEL_P (BB_HEAD (block)))
100410d565efSmrg     {
100510d565efSmrg       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
100610d565efSmrg     }
100710d565efSmrg 
100810d565efSmrg   return as_a <rtx_code_label *> (BB_HEAD (block));
100910d565efSmrg }
101010d565efSmrg 
1011c7a68eb7Smrg /* Remove all barriers from BB_FOOTER of a BB.  */
1012c7a68eb7Smrg 
1013c7a68eb7Smrg static void
remove_barriers_from_footer(basic_block bb)1014c7a68eb7Smrg remove_barriers_from_footer (basic_block bb)
1015c7a68eb7Smrg {
1016c7a68eb7Smrg   rtx_insn *insn = BB_FOOTER (bb);
1017c7a68eb7Smrg 
1018c7a68eb7Smrg   /* Remove barriers but keep jumptables.  */
1019c7a68eb7Smrg   while (insn)
1020c7a68eb7Smrg     {
1021c7a68eb7Smrg       if (BARRIER_P (insn))
1022c7a68eb7Smrg 	{
1023c7a68eb7Smrg 	  if (PREV_INSN (insn))
1024c7a68eb7Smrg 	    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1025c7a68eb7Smrg 	  else
1026c7a68eb7Smrg 	    BB_FOOTER (bb) = NEXT_INSN (insn);
1027c7a68eb7Smrg 	  if (NEXT_INSN (insn))
1028c7a68eb7Smrg 	    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1029c7a68eb7Smrg 	}
1030c7a68eb7Smrg       if (LABEL_P (insn))
1031c7a68eb7Smrg 	return;
1032c7a68eb7Smrg       insn = NEXT_INSN (insn);
1033c7a68eb7Smrg     }
1034c7a68eb7Smrg }
1035c7a68eb7Smrg 
103610d565efSmrg /* Attempt to perform edge redirection by replacing possibly complex jump
103710d565efSmrg    instruction by unconditional jump or removing jump completely.  This can
103810d565efSmrg    apply only if all edges now point to the same block.  The parameters and
103910d565efSmrg    return values are equivalent to redirect_edge_and_branch.  */
104010d565efSmrg 
104110d565efSmrg edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)104210d565efSmrg try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
104310d565efSmrg {
104410d565efSmrg   basic_block src = e->src;
104510d565efSmrg   rtx_insn *insn = BB_END (src), *kill_from;
104610d565efSmrg   rtx set;
104710d565efSmrg   int fallthru = 0;
104810d565efSmrg 
104910d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
105010d565efSmrg      mess up unconditional or indirect jumps that cross between hot
105110d565efSmrg      and cold sections.
105210d565efSmrg 
105310d565efSmrg      Basic block partitioning may result in some jumps that appear to
105410d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
105510d565efSmrg      must be left untouched (they are required to make it safely across
105610d565efSmrg      partition boundaries).  See  the comments at the top of
105710d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
105810d565efSmrg 
105910d565efSmrg   if (BB_PARTITION (src) != BB_PARTITION (target))
106010d565efSmrg     return NULL;
106110d565efSmrg 
106210d565efSmrg   /* We can replace or remove a complex jump only when we have exactly
106310d565efSmrg      two edges.  Also, if we have exactly one outgoing edge, we can
106410d565efSmrg      redirect that.  */
106510d565efSmrg   if (EDGE_COUNT (src->succs) >= 3
106610d565efSmrg       /* Verify that all targets will be TARGET.  Specifically, the
106710d565efSmrg 	 edge that is not E must also go to TARGET.  */
106810d565efSmrg       || (EDGE_COUNT (src->succs) == 2
106910d565efSmrg 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
107010d565efSmrg     return NULL;
107110d565efSmrg 
107210d565efSmrg   if (!onlyjump_p (insn))
107310d565efSmrg     return NULL;
107410d565efSmrg   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
107510d565efSmrg     return NULL;
107610d565efSmrg 
107710d565efSmrg   /* Avoid removing branch with side effects.  */
107810d565efSmrg   set = single_set (insn);
107910d565efSmrg   if (!set || side_effects_p (set))
108010d565efSmrg     return NULL;
108110d565efSmrg 
108210d565efSmrg   /* In case we zap a conditional jump, we'll need to kill
108310d565efSmrg      the cc0 setter too.  */
108410d565efSmrg   kill_from = insn;
108510d565efSmrg   if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
108610d565efSmrg       && only_sets_cc0_p (PREV_INSN (insn)))
108710d565efSmrg     kill_from = PREV_INSN (insn);
108810d565efSmrg 
108910d565efSmrg   /* See if we can create the fallthru edge.  */
109010d565efSmrg   if (in_cfglayout || can_fallthru (src, target))
109110d565efSmrg     {
109210d565efSmrg       if (dump_file)
109310d565efSmrg 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
109410d565efSmrg       fallthru = 1;
109510d565efSmrg 
109610d565efSmrg       /* Selectively unlink whole insn chain.  */
109710d565efSmrg       if (in_cfglayout)
109810d565efSmrg 	{
109910d565efSmrg 	  delete_insn_chain (kill_from, BB_END (src), false);
1100c7a68eb7Smrg 	  remove_barriers_from_footer (src);
110110d565efSmrg 	}
110210d565efSmrg       else
110310d565efSmrg 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
110410d565efSmrg 			   false);
110510d565efSmrg     }
110610d565efSmrg 
110710d565efSmrg   /* If this already is simplejump, redirect it.  */
110810d565efSmrg   else if (simplejump_p (insn))
110910d565efSmrg     {
111010d565efSmrg       if (e->dest == target)
111110d565efSmrg 	return NULL;
111210d565efSmrg       if (dump_file)
111310d565efSmrg 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
111410d565efSmrg 		 INSN_UID (insn), e->dest->index, target->index);
111510d565efSmrg       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
111610d565efSmrg 			  block_label (target), 0))
111710d565efSmrg 	{
111810d565efSmrg 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
111910d565efSmrg 	  return NULL;
112010d565efSmrg 	}
112110d565efSmrg     }
112210d565efSmrg 
112310d565efSmrg   /* Cannot do anything for target exit block.  */
112410d565efSmrg   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
112510d565efSmrg     return NULL;
112610d565efSmrg 
112710d565efSmrg   /* Or replace possibly complicated jump insn by simple jump insn.  */
112810d565efSmrg   else
112910d565efSmrg     {
113010d565efSmrg       rtx_code_label *target_label = block_label (target);
113110d565efSmrg       rtx_insn *barrier;
113210d565efSmrg       rtx_insn *label;
113310d565efSmrg       rtx_jump_table_data *table;
113410d565efSmrg 
113510d565efSmrg       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
113610d565efSmrg       JUMP_LABEL (BB_END (src)) = target_label;
113710d565efSmrg       LABEL_NUSES (target_label)++;
113810d565efSmrg       if (dump_file)
113910d565efSmrg 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
114010d565efSmrg 		 INSN_UID (insn), INSN_UID (BB_END (src)));
114110d565efSmrg 
114210d565efSmrg 
114310d565efSmrg       delete_insn_chain (kill_from, insn, false);
114410d565efSmrg 
114510d565efSmrg       /* Recognize a tablejump that we are converting to a
114610d565efSmrg 	 simple jump and remove its associated CODE_LABEL
114710d565efSmrg 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
114810d565efSmrg       if (tablejump_p (insn, &label, &table))
114910d565efSmrg 	delete_insn_chain (label, table, false);
115010d565efSmrg 
1151c7a68eb7Smrg       barrier = next_nonnote_nondebug_insn (BB_END (src));
115210d565efSmrg       if (!barrier || !BARRIER_P (barrier))
115310d565efSmrg 	emit_barrier_after (BB_END (src));
115410d565efSmrg       else
115510d565efSmrg 	{
115610d565efSmrg 	  if (barrier != NEXT_INSN (BB_END (src)))
115710d565efSmrg 	    {
115810d565efSmrg 	      /* Move the jump before barrier so that the notes
115910d565efSmrg 		 which originally were or were created before jump table are
116010d565efSmrg 		 inside the basic block.  */
116110d565efSmrg 	      rtx_insn *new_insn = BB_END (src);
116210d565efSmrg 
116310d565efSmrg 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
116410d565efSmrg 				        PREV_INSN (barrier), src);
116510d565efSmrg 
116610d565efSmrg 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
116710d565efSmrg 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
116810d565efSmrg 
116910d565efSmrg 	      SET_NEXT_INSN (new_insn) = barrier;
117010d565efSmrg 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
117110d565efSmrg 
117210d565efSmrg 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
117310d565efSmrg 	      SET_PREV_INSN (barrier) = new_insn;
117410d565efSmrg 	    }
117510d565efSmrg 	}
117610d565efSmrg     }
117710d565efSmrg 
117810d565efSmrg   /* Keep only one edge out and set proper flags.  */
117910d565efSmrg   if (!single_succ_p (src))
118010d565efSmrg     remove_edge (e);
118110d565efSmrg   gcc_assert (single_succ_p (src));
118210d565efSmrg 
118310d565efSmrg   e = single_succ_edge (src);
118410d565efSmrg   if (fallthru)
118510d565efSmrg     e->flags = EDGE_FALLTHRU;
118610d565efSmrg   else
118710d565efSmrg     e->flags = 0;
118810d565efSmrg 
1189c7a68eb7Smrg   e->probability = profile_probability::always ();
119010d565efSmrg 
119110d565efSmrg   if (e->dest != target)
119210d565efSmrg     redirect_edge_succ (e, target);
119310d565efSmrg   return e;
119410d565efSmrg }
119510d565efSmrg 
119610d565efSmrg /* Subroutine of redirect_branch_edge that tries to patch the jump
119710d565efSmrg    instruction INSN so that it reaches block NEW.  Do this
119810d565efSmrg    only when it originally reached block OLD.  Return true if this
119910d565efSmrg    worked or the original target wasn't OLD, return false if redirection
120010d565efSmrg    doesn't work.  */
120110d565efSmrg 
120210d565efSmrg static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)120310d565efSmrg patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
120410d565efSmrg {
120510d565efSmrg   rtx_jump_table_data *table;
120610d565efSmrg   rtx tmp;
120710d565efSmrg   /* Recognize a tablejump and adjust all matching cases.  */
120810d565efSmrg   if (tablejump_p (insn, NULL, &table))
120910d565efSmrg     {
121010d565efSmrg       rtvec vec;
121110d565efSmrg       int j;
121210d565efSmrg       rtx_code_label *new_label = block_label (new_bb);
121310d565efSmrg 
121410d565efSmrg       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
121510d565efSmrg 	return false;
121610d565efSmrg       vec = table->get_labels ();
121710d565efSmrg 
121810d565efSmrg       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
121910d565efSmrg 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
122010d565efSmrg 	  {
122110d565efSmrg 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
122210d565efSmrg 	    --LABEL_NUSES (old_label);
122310d565efSmrg 	    ++LABEL_NUSES (new_label);
122410d565efSmrg 	  }
122510d565efSmrg 
122610d565efSmrg       /* Handle casesi dispatch insns.  */
1227*ec02198aSmrg       if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
122810d565efSmrg 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
122910d565efSmrg 	{
123010d565efSmrg 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
123110d565efSmrg 						       new_label);
123210d565efSmrg 	  --LABEL_NUSES (old_label);
123310d565efSmrg 	  ++LABEL_NUSES (new_label);
123410d565efSmrg 	}
123510d565efSmrg     }
123610d565efSmrg   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
123710d565efSmrg     {
123810d565efSmrg       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
123910d565efSmrg       rtx note;
124010d565efSmrg 
124110d565efSmrg       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
124210d565efSmrg 	return false;
124310d565efSmrg       rtx_code_label *new_label = block_label (new_bb);
124410d565efSmrg 
124510d565efSmrg       for (i = 0; i < n; ++i)
124610d565efSmrg 	{
124710d565efSmrg 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
124810d565efSmrg 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
124910d565efSmrg 	  if (XEXP (old_ref, 0) == old_label)
125010d565efSmrg 	    {
125110d565efSmrg 	      ASM_OPERANDS_LABEL (tmp, i)
125210d565efSmrg 		= gen_rtx_LABEL_REF (Pmode, new_label);
125310d565efSmrg 	      --LABEL_NUSES (old_label);
125410d565efSmrg 	      ++LABEL_NUSES (new_label);
125510d565efSmrg 	    }
125610d565efSmrg 	}
125710d565efSmrg 
125810d565efSmrg       if (JUMP_LABEL (insn) == old_label)
125910d565efSmrg 	{
126010d565efSmrg 	  JUMP_LABEL (insn) = new_label;
126110d565efSmrg 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
126210d565efSmrg 	  if (note)
126310d565efSmrg 	    remove_note (insn, note);
126410d565efSmrg 	}
126510d565efSmrg       else
126610d565efSmrg 	{
126710d565efSmrg 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
126810d565efSmrg 	  if (note)
126910d565efSmrg 	    remove_note (insn, note);
127010d565efSmrg 	  if (JUMP_LABEL (insn) != new_label
127110d565efSmrg 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
127210d565efSmrg 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
127310d565efSmrg 	}
127410d565efSmrg       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
127510d565efSmrg 	     != NULL_RTX)
127610d565efSmrg 	XEXP (note, 0) = new_label;
127710d565efSmrg     }
127810d565efSmrg   else
127910d565efSmrg     {
128010d565efSmrg       /* ?? We may play the games with moving the named labels from
128110d565efSmrg 	 one basic block to the other in case only one computed_jump is
128210d565efSmrg 	 available.  */
128310d565efSmrg       if (computed_jump_p (insn)
128410d565efSmrg 	  /* A return instruction can't be redirected.  */
128510d565efSmrg 	  || returnjump_p (insn))
128610d565efSmrg 	return false;
128710d565efSmrg 
128810d565efSmrg       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
128910d565efSmrg 	{
129010d565efSmrg 	  /* If the insn doesn't go where we think, we're confused.  */
129110d565efSmrg 	  gcc_assert (JUMP_LABEL (insn) == old_label);
129210d565efSmrg 
129310d565efSmrg 	  /* If the substitution doesn't succeed, die.  This can happen
129410d565efSmrg 	     if the back end emitted unrecognizable instructions or if
1295c7a68eb7Smrg 	     target is exit block on some arches.  Or for crossing
1296c7a68eb7Smrg 	     jumps.  */
129710d565efSmrg 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
129810d565efSmrg 			      block_label (new_bb), 0))
129910d565efSmrg 	    {
1300c7a68eb7Smrg 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1301c7a68eb7Smrg 			  || CROSSING_JUMP_P (insn));
130210d565efSmrg 	      return false;
130310d565efSmrg 	    }
130410d565efSmrg 	}
130510d565efSmrg     }
130610d565efSmrg   return true;
130710d565efSmrg }
130810d565efSmrg 
130910d565efSmrg 
131010d565efSmrg /* Redirect edge representing branch of (un)conditional jump or tablejump,
131110d565efSmrg    NULL on failure  */
131210d565efSmrg static edge
redirect_branch_edge(edge e,basic_block target)131310d565efSmrg redirect_branch_edge (edge e, basic_block target)
131410d565efSmrg {
131510d565efSmrg   rtx_insn *old_label = BB_HEAD (e->dest);
131610d565efSmrg   basic_block src = e->src;
131710d565efSmrg   rtx_insn *insn = BB_END (src);
131810d565efSmrg 
131910d565efSmrg   /* We can only redirect non-fallthru edges of jump insn.  */
132010d565efSmrg   if (e->flags & EDGE_FALLTHRU)
132110d565efSmrg     return NULL;
132210d565efSmrg   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
132310d565efSmrg     return NULL;
132410d565efSmrg 
132510d565efSmrg   if (!currently_expanding_to_rtl)
132610d565efSmrg     {
132710d565efSmrg       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
132810d565efSmrg 	return NULL;
132910d565efSmrg     }
133010d565efSmrg   else
133110d565efSmrg     /* When expanding this BB might actually contain multiple
133210d565efSmrg        jumps (i.e. not yet split by find_many_sub_basic_blocks).
133310d565efSmrg        Redirect all of those that match our label.  */
133410d565efSmrg     FOR_BB_INSNS (src, insn)
133510d565efSmrg       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
133610d565efSmrg 					     old_label, target))
133710d565efSmrg 	return NULL;
133810d565efSmrg 
133910d565efSmrg   if (dump_file)
134010d565efSmrg     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
134110d565efSmrg 	     e->src->index, e->dest->index, target->index);
134210d565efSmrg 
134310d565efSmrg   if (e->dest != target)
134410d565efSmrg     e = redirect_edge_succ_nodup (e, target);
134510d565efSmrg 
134610d565efSmrg   return e;
134710d565efSmrg }
134810d565efSmrg 
134910d565efSmrg /* Called when edge E has been redirected to a new destination,
135010d565efSmrg    in order to update the region crossing flag on the edge and
135110d565efSmrg    jump.  */
135210d565efSmrg 
135310d565efSmrg static void
fixup_partition_crossing(edge e)135410d565efSmrg fixup_partition_crossing (edge e)
135510d565efSmrg {
135610d565efSmrg   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
135710d565efSmrg       == EXIT_BLOCK_PTR_FOR_FN (cfun))
135810d565efSmrg     return;
135910d565efSmrg   /* If we redirected an existing edge, it may already be marked
136010d565efSmrg      crossing, even though the new src is missing a reg crossing note.
136110d565efSmrg      But make sure reg crossing note doesn't already exist before
136210d565efSmrg      inserting.  */
136310d565efSmrg   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
136410d565efSmrg     {
136510d565efSmrg       e->flags |= EDGE_CROSSING;
1366c7a68eb7Smrg       if (JUMP_P (BB_END (e->src)))
136710d565efSmrg 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
136810d565efSmrg     }
136910d565efSmrg   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
137010d565efSmrg     {
137110d565efSmrg       e->flags &= ~EDGE_CROSSING;
137210d565efSmrg       /* Remove the section crossing note from jump at end of
137310d565efSmrg          src if it exists, and if no other successors are
137410d565efSmrg          still crossing.  */
137510d565efSmrg       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
137610d565efSmrg         {
137710d565efSmrg           bool has_crossing_succ = false;
137810d565efSmrg           edge e2;
137910d565efSmrg           edge_iterator ei;
138010d565efSmrg           FOR_EACH_EDGE (e2, ei, e->src->succs)
138110d565efSmrg             {
138210d565efSmrg               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
138310d565efSmrg               if (has_crossing_succ)
138410d565efSmrg                 break;
138510d565efSmrg             }
138610d565efSmrg           if (!has_crossing_succ)
138710d565efSmrg 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
138810d565efSmrg         }
138910d565efSmrg     }
139010d565efSmrg }
139110d565efSmrg 
139210d565efSmrg /* Called when block BB has been reassigned to the cold partition,
139310d565efSmrg    because it is now dominated by another cold block,
139410d565efSmrg    to ensure that the region crossing attributes are updated.  */
139510d565efSmrg 
139610d565efSmrg static void
fixup_new_cold_bb(basic_block bb)139710d565efSmrg fixup_new_cold_bb (basic_block bb)
139810d565efSmrg {
139910d565efSmrg   edge e;
140010d565efSmrg   edge_iterator ei;
140110d565efSmrg 
140210d565efSmrg   /* This is called when a hot bb is found to now be dominated
140310d565efSmrg      by a cold bb and therefore needs to become cold. Therefore,
140410d565efSmrg      its preds will no longer be region crossing. Any non-dominating
140510d565efSmrg      preds that were previously hot would also have become cold
140610d565efSmrg      in the caller for the same region. Any preds that were previously
140710d565efSmrg      region-crossing will be adjusted in fixup_partition_crossing.  */
140810d565efSmrg   FOR_EACH_EDGE (e, ei, bb->preds)
140910d565efSmrg     {
141010d565efSmrg       fixup_partition_crossing (e);
141110d565efSmrg     }
141210d565efSmrg 
141310d565efSmrg   /* Possibly need to make bb's successor edges region crossing,
141410d565efSmrg      or remove stale region crossing.  */
141510d565efSmrg   FOR_EACH_EDGE (e, ei, bb->succs)
141610d565efSmrg     {
141710d565efSmrg       /* We can't have fall-through edges across partition boundaries.
141810d565efSmrg          Note that force_nonfallthru will do any necessary partition
141910d565efSmrg          boundary fixup by calling fixup_partition_crossing itself.  */
142010d565efSmrg       if ((e->flags & EDGE_FALLTHRU)
142110d565efSmrg           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
142210d565efSmrg 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
142310d565efSmrg         force_nonfallthru (e);
142410d565efSmrg       else
142510d565efSmrg         fixup_partition_crossing (e);
142610d565efSmrg     }
142710d565efSmrg }
142810d565efSmrg 
142910d565efSmrg /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
143010d565efSmrg    expense of adding new instructions or reordering basic blocks.
143110d565efSmrg 
143210d565efSmrg    Function can be also called with edge destination equivalent to the TARGET.
143310d565efSmrg    Then it should try the simplifications and do nothing if none is possible.
143410d565efSmrg 
143510d565efSmrg    Return edge representing the branch if transformation succeeded.  Return NULL
143610d565efSmrg    on failure.
143710d565efSmrg    We still return NULL in case E already destinated TARGET and we didn't
143810d565efSmrg    managed to simplify instruction stream.  */
143910d565efSmrg 
144010d565efSmrg static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)144110d565efSmrg rtl_redirect_edge_and_branch (edge e, basic_block target)
144210d565efSmrg {
144310d565efSmrg   edge ret;
144410d565efSmrg   basic_block src = e->src;
144510d565efSmrg   basic_block dest = e->dest;
144610d565efSmrg 
144710d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
144810d565efSmrg     return NULL;
144910d565efSmrg 
145010d565efSmrg   if (dest == target)
145110d565efSmrg     return e;
145210d565efSmrg 
145310d565efSmrg   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
145410d565efSmrg     {
145510d565efSmrg       df_set_bb_dirty (src);
145610d565efSmrg       fixup_partition_crossing (ret);
145710d565efSmrg       return ret;
145810d565efSmrg     }
145910d565efSmrg 
146010d565efSmrg   ret = redirect_branch_edge (e, target);
146110d565efSmrg   if (!ret)
146210d565efSmrg     return NULL;
146310d565efSmrg 
146410d565efSmrg   df_set_bb_dirty (src);
146510d565efSmrg   fixup_partition_crossing (ret);
146610d565efSmrg   return ret;
146710d565efSmrg }
146810d565efSmrg 
146910d565efSmrg /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
147010d565efSmrg 
147110d565efSmrg void
emit_barrier_after_bb(basic_block bb)147210d565efSmrg emit_barrier_after_bb (basic_block bb)
147310d565efSmrg {
147410d565efSmrg   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
147510d565efSmrg   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
147610d565efSmrg               || current_ir_type () == IR_RTL_CFGLAYOUT);
147710d565efSmrg   if (current_ir_type () == IR_RTL_CFGLAYOUT)
147810d565efSmrg     {
147910d565efSmrg       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
148010d565efSmrg 
148110d565efSmrg       if (BB_FOOTER (bb))
148210d565efSmrg 	{
148310d565efSmrg           rtx_insn *footer_tail = BB_FOOTER (bb);
148410d565efSmrg 
148510d565efSmrg           while (NEXT_INSN (footer_tail))
148610d565efSmrg             footer_tail = NEXT_INSN (footer_tail);
148710d565efSmrg           if (!BARRIER_P (footer_tail))
148810d565efSmrg             {
148910d565efSmrg               SET_NEXT_INSN (footer_tail) = insn;
149010d565efSmrg               SET_PREV_INSN (insn) = footer_tail;
149110d565efSmrg             }
149210d565efSmrg 	}
149310d565efSmrg       else
149410d565efSmrg         BB_FOOTER (bb) = insn;
149510d565efSmrg     }
149610d565efSmrg }
149710d565efSmrg 
149810d565efSmrg /* Like force_nonfallthru below, but additionally performs redirection
149910d565efSmrg    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
150010d565efSmrg    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
150110d565efSmrg    simple_return_rtx, indicating which kind of returnjump to create.
150210d565efSmrg    It should be NULL otherwise.  */
150310d565efSmrg 
150410d565efSmrg basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)150510d565efSmrg force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
150610d565efSmrg {
150710d565efSmrg   basic_block jump_block, new_bb = NULL, src = e->src;
150810d565efSmrg   rtx note;
150910d565efSmrg   edge new_edge;
151010d565efSmrg   int abnormal_edge_flags = 0;
151110d565efSmrg   bool asm_goto_edge = false;
151210d565efSmrg   int loc;
151310d565efSmrg 
151410d565efSmrg   /* In the case the last instruction is conditional jump to the next
151510d565efSmrg      instruction, first redirect the jump itself and then continue
151610d565efSmrg      by creating a basic block afterwards to redirect fallthru edge.  */
151710d565efSmrg   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
151810d565efSmrg       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
151910d565efSmrg       && any_condjump_p (BB_END (e->src))
152010d565efSmrg       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
152110d565efSmrg     {
152210d565efSmrg       rtx note;
152310d565efSmrg       edge b = unchecked_make_edge (e->src, target, 0);
152410d565efSmrg       bool redirected;
152510d565efSmrg 
152610d565efSmrg       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
152710d565efSmrg 				  block_label (target), 0);
152810d565efSmrg       gcc_assert (redirected);
152910d565efSmrg 
153010d565efSmrg       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
153110d565efSmrg       if (note)
153210d565efSmrg 	{
153310d565efSmrg 	  int prob = XINT (note, 0);
153410d565efSmrg 
1535c7a68eb7Smrg 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
153610d565efSmrg 	  e->probability -= e->probability;
153710d565efSmrg 	}
153810d565efSmrg     }
153910d565efSmrg 
154010d565efSmrg   if (e->flags & EDGE_ABNORMAL)
154110d565efSmrg     {
154210d565efSmrg       /* Irritating special case - fallthru edge to the same block as abnormal
154310d565efSmrg 	 edge.
154410d565efSmrg 	 We can't redirect abnormal edge, but we still can split the fallthru
154510d565efSmrg 	 one and create separate abnormal edge to original destination.
154610d565efSmrg 	 This allows bb-reorder to make such edge non-fallthru.  */
154710d565efSmrg       gcc_assert (e->dest == target);
154810d565efSmrg       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
154910d565efSmrg       e->flags &= EDGE_FALLTHRU;
155010d565efSmrg     }
155110d565efSmrg   else
155210d565efSmrg     {
155310d565efSmrg       gcc_assert (e->flags & EDGE_FALLTHRU);
155410d565efSmrg       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
155510d565efSmrg 	{
155610d565efSmrg 	  /* We can't redirect the entry block.  Create an empty block
155710d565efSmrg 	     at the start of the function which we use to add the new
155810d565efSmrg 	     jump.  */
155910d565efSmrg 	  edge tmp;
156010d565efSmrg 	  edge_iterator ei;
156110d565efSmrg 	  bool found = false;
156210d565efSmrg 
156310d565efSmrg 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
156410d565efSmrg 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1565c7a68eb7Smrg 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
156610d565efSmrg 
156710d565efSmrg 	  /* Make sure new block ends up in correct hot/cold section.  */
156810d565efSmrg 	  BB_COPY_PARTITION (bb, e->dest);
156910d565efSmrg 
157010d565efSmrg 	  /* Change the existing edge's source to be the new block, and add
157110d565efSmrg 	     a new edge from the entry block to the new block.  */
157210d565efSmrg 	  e->src = bb;
157310d565efSmrg 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
157410d565efSmrg 	       (tmp = ei_safe_edge (ei)); )
157510d565efSmrg 	    {
157610d565efSmrg 	      if (tmp == e)
157710d565efSmrg 		{
157810d565efSmrg 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
157910d565efSmrg 		  found = true;
158010d565efSmrg 		  break;
158110d565efSmrg 		}
158210d565efSmrg 	      else
158310d565efSmrg 		ei_next (&ei);
158410d565efSmrg 	    }
158510d565efSmrg 
158610d565efSmrg 	  gcc_assert (found);
158710d565efSmrg 
158810d565efSmrg 	  vec_safe_push (bb->succs, e);
158910d565efSmrg 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
159010d565efSmrg 				 EDGE_FALLTHRU);
159110d565efSmrg 	}
159210d565efSmrg     }
159310d565efSmrg 
159410d565efSmrg   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
159510d565efSmrg      don't point to the target or fallthru label.  */
159610d565efSmrg   if (JUMP_P (BB_END (e->src))
159710d565efSmrg       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
159810d565efSmrg       && (e->flags & EDGE_FALLTHRU)
159910d565efSmrg       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
160010d565efSmrg     {
160110d565efSmrg       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
160210d565efSmrg       bool adjust_jump_target = false;
160310d565efSmrg 
160410d565efSmrg       for (i = 0; i < n; ++i)
160510d565efSmrg 	{
160610d565efSmrg 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
160710d565efSmrg 	    {
160810d565efSmrg 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
160910d565efSmrg 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
161010d565efSmrg 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
161110d565efSmrg 	      adjust_jump_target = true;
161210d565efSmrg 	    }
161310d565efSmrg 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
161410d565efSmrg 	    asm_goto_edge = true;
161510d565efSmrg 	}
161610d565efSmrg       if (adjust_jump_target)
161710d565efSmrg 	{
161810d565efSmrg 	  rtx_insn *insn = BB_END (e->src);
161910d565efSmrg 	  rtx note;
162010d565efSmrg 	  rtx_insn *old_label = BB_HEAD (e->dest);
162110d565efSmrg 	  rtx_insn *new_label = BB_HEAD (target);
162210d565efSmrg 
162310d565efSmrg 	  if (JUMP_LABEL (insn) == old_label)
162410d565efSmrg 	    {
162510d565efSmrg 	      JUMP_LABEL (insn) = new_label;
162610d565efSmrg 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
162710d565efSmrg 	      if (note)
162810d565efSmrg 		remove_note (insn, note);
162910d565efSmrg 	    }
163010d565efSmrg 	  else
163110d565efSmrg 	    {
163210d565efSmrg 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
163310d565efSmrg 	      if (note)
163410d565efSmrg 		remove_note (insn, note);
163510d565efSmrg 	      if (JUMP_LABEL (insn) != new_label
163610d565efSmrg 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
163710d565efSmrg 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
163810d565efSmrg 	    }
163910d565efSmrg 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
164010d565efSmrg 		 != NULL_RTX)
164110d565efSmrg 	    XEXP (note, 0) = new_label;
164210d565efSmrg 	}
164310d565efSmrg     }
164410d565efSmrg 
164510d565efSmrg   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
164610d565efSmrg     {
164710d565efSmrg       rtx_insn *new_head;
1648c7a68eb7Smrg       profile_count count = e->count ();
1649c7a68eb7Smrg       profile_probability probability = e->probability;
165010d565efSmrg       /* Create the new structures.  */
165110d565efSmrg 
165210d565efSmrg       /* If the old block ended with a tablejump, skip its table
165310d565efSmrg 	 by searching forward from there.  Otherwise start searching
165410d565efSmrg 	 forward from the last instruction of the old block.  */
165510d565efSmrg       rtx_jump_table_data *table;
165610d565efSmrg       if (tablejump_p (BB_END (e->src), NULL, &table))
165710d565efSmrg 	new_head = table;
165810d565efSmrg       else
165910d565efSmrg 	new_head = BB_END (e->src);
166010d565efSmrg       new_head = NEXT_INSN (new_head);
166110d565efSmrg 
166210d565efSmrg       jump_block = create_basic_block (new_head, NULL, e->src);
166310d565efSmrg       jump_block->count = count;
166410d565efSmrg 
166510d565efSmrg       /* Make sure new block ends up in correct hot/cold section.  */
166610d565efSmrg 
166710d565efSmrg       BB_COPY_PARTITION (jump_block, e->src);
166810d565efSmrg 
166910d565efSmrg       /* Wire edge in.  */
167010d565efSmrg       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
167110d565efSmrg       new_edge->probability = probability;
167210d565efSmrg 
167310d565efSmrg       /* Redirect old edge.  */
167410d565efSmrg       redirect_edge_pred (e, jump_block);
1675c7a68eb7Smrg       e->probability = profile_probability::always ();
167610d565efSmrg 
167710d565efSmrg       /* If e->src was previously region crossing, it no longer is
167810d565efSmrg          and the reg crossing note should be removed.  */
167910d565efSmrg       fixup_partition_crossing (new_edge);
168010d565efSmrg 
168110d565efSmrg       /* If asm goto has any label refs to target's label,
168210d565efSmrg 	 add also edge from asm goto bb to target.  */
168310d565efSmrg       if (asm_goto_edge)
168410d565efSmrg 	{
1685c7a68eb7Smrg 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1686c7a68eb7Smrg 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1687c7a68eb7Smrg 	  edge new_edge2 = make_edge (new_edge->src, target,
168810d565efSmrg 				      e->flags & ~EDGE_FALLTHRU);
1689c7a68eb7Smrg 	  new_edge2->probability = probability - new_edge->probability;
169010d565efSmrg 	}
169110d565efSmrg 
169210d565efSmrg       new_bb = jump_block;
169310d565efSmrg     }
169410d565efSmrg   else
169510d565efSmrg     jump_block = e->src;
169610d565efSmrg 
169710d565efSmrg   loc = e->goto_locus;
169810d565efSmrg   e->flags &= ~EDGE_FALLTHRU;
169910d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
170010d565efSmrg     {
170110d565efSmrg       if (jump_label == ret_rtx)
170210d565efSmrg 	emit_jump_insn_after_setloc (targetm.gen_return (),
170310d565efSmrg 				     BB_END (jump_block), loc);
170410d565efSmrg       else
170510d565efSmrg 	{
170610d565efSmrg 	  gcc_assert (jump_label == simple_return_rtx);
170710d565efSmrg 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
170810d565efSmrg 				       BB_END (jump_block), loc);
170910d565efSmrg 	}
171010d565efSmrg       set_return_jump_label (BB_END (jump_block));
171110d565efSmrg     }
171210d565efSmrg   else
171310d565efSmrg     {
171410d565efSmrg       rtx_code_label *label = block_label (target);
171510d565efSmrg       emit_jump_insn_after_setloc (targetm.gen_jump (label),
171610d565efSmrg 				   BB_END (jump_block), loc);
171710d565efSmrg       JUMP_LABEL (BB_END (jump_block)) = label;
171810d565efSmrg       LABEL_NUSES (label)++;
171910d565efSmrg     }
172010d565efSmrg 
172110d565efSmrg   /* We might be in cfg layout mode, and if so, the following routine will
172210d565efSmrg      insert the barrier correctly.  */
172310d565efSmrg   emit_barrier_after_bb (jump_block);
172410d565efSmrg   redirect_edge_succ_nodup (e, target);
172510d565efSmrg 
172610d565efSmrg   if (abnormal_edge_flags)
172710d565efSmrg     make_edge (src, target, abnormal_edge_flags);
172810d565efSmrg 
172910d565efSmrg   df_mark_solutions_dirty ();
173010d565efSmrg   fixup_partition_crossing (e);
173110d565efSmrg   return new_bb;
173210d565efSmrg }
173310d565efSmrg 
173410d565efSmrg /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
173510d565efSmrg    (and possibly create new basic block) to make edge non-fallthru.
173610d565efSmrg    Return newly created BB or NULL if none.  */
173710d565efSmrg 
173810d565efSmrg static basic_block
rtl_force_nonfallthru(edge e)173910d565efSmrg rtl_force_nonfallthru (edge e)
174010d565efSmrg {
174110d565efSmrg   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
174210d565efSmrg }
174310d565efSmrg 
174410d565efSmrg /* Redirect edge even at the expense of creating new jump insn or
174510d565efSmrg    basic block.  Return new basic block if created, NULL otherwise.
174610d565efSmrg    Conversion must be possible.  */
174710d565efSmrg 
174810d565efSmrg static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)174910d565efSmrg rtl_redirect_edge_and_branch_force (edge e, basic_block target)
175010d565efSmrg {
175110d565efSmrg   if (redirect_edge_and_branch (e, target)
175210d565efSmrg       || e->dest == target)
175310d565efSmrg     return NULL;
175410d565efSmrg 
175510d565efSmrg   /* In case the edge redirection failed, try to force it to be non-fallthru
175610d565efSmrg      and redirect newly created simplejump.  */
175710d565efSmrg   df_set_bb_dirty (e->src);
175810d565efSmrg   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
175910d565efSmrg }
176010d565efSmrg 
176110d565efSmrg /* The given edge should potentially be a fallthru edge.  If that is in
176210d565efSmrg    fact true, delete the jump and barriers that are in the way.  */
176310d565efSmrg 
176410d565efSmrg static void
rtl_tidy_fallthru_edge(edge e)176510d565efSmrg rtl_tidy_fallthru_edge (edge e)
176610d565efSmrg {
176710d565efSmrg   rtx_insn *q;
176810d565efSmrg   basic_block b = e->src, c = b->next_bb;
176910d565efSmrg 
177010d565efSmrg   /* ??? In a late-running flow pass, other folks may have deleted basic
177110d565efSmrg      blocks by nopping out blocks, leaving multiple BARRIERs between here
177210d565efSmrg      and the target label. They ought to be chastised and fixed.
177310d565efSmrg 
177410d565efSmrg      We can also wind up with a sequence of undeletable labels between
177510d565efSmrg      one block and the next.
177610d565efSmrg 
177710d565efSmrg      So search through a sequence of barriers, labels, and notes for
177810d565efSmrg      the head of block C and assert that we really do fall through.  */
177910d565efSmrg 
178010d565efSmrg   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1781c7a68eb7Smrg     if (NONDEBUG_INSN_P (q))
178210d565efSmrg       return;
178310d565efSmrg 
178410d565efSmrg   /* Remove what will soon cease being the jump insn from the source block.
178510d565efSmrg      If block B consisted only of this single jump, turn it into a deleted
178610d565efSmrg      note.  */
178710d565efSmrg   q = BB_END (b);
178810d565efSmrg   if (JUMP_P (q)
178910d565efSmrg       && onlyjump_p (q)
179010d565efSmrg       && (any_uncondjump_p (q)
179110d565efSmrg 	  || single_succ_p (b)))
179210d565efSmrg     {
179310d565efSmrg       rtx_insn *label;
179410d565efSmrg       rtx_jump_table_data *table;
179510d565efSmrg 
179610d565efSmrg       if (tablejump_p (q, &label, &table))
179710d565efSmrg 	{
179810d565efSmrg 	  /* The label is likely mentioned in some instruction before
179910d565efSmrg 	     the tablejump and might not be DCEd, so turn it into
180010d565efSmrg 	     a note instead and move before the tablejump that is going to
180110d565efSmrg 	     be deleted.  */
180210d565efSmrg 	  const char *name = LABEL_NAME (label);
180310d565efSmrg 	  PUT_CODE (label, NOTE);
180410d565efSmrg 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
180510d565efSmrg 	  NOTE_DELETED_LABEL_NAME (label) = name;
180610d565efSmrg 	  reorder_insns (label, label, PREV_INSN (q));
180710d565efSmrg 	  delete_insn (table);
180810d565efSmrg 	}
180910d565efSmrg 
181010d565efSmrg       /* If this was a conditional jump, we need to also delete
181110d565efSmrg 	 the insn that set cc0.  */
181210d565efSmrg       if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
181310d565efSmrg 	q = PREV_INSN (q);
181410d565efSmrg 
181510d565efSmrg       q = PREV_INSN (q);
181610d565efSmrg     }
181710d565efSmrg   /* Unconditional jumps with side-effects (i.e. which we can't just delete
181810d565efSmrg      together with the barrier) should never have a fallthru edge.  */
181910d565efSmrg   else if (JUMP_P (q) && any_uncondjump_p (q))
182010d565efSmrg     return;
182110d565efSmrg 
182210d565efSmrg   /* Selectively unlink the sequence.  */
182310d565efSmrg   if (q != PREV_INSN (BB_HEAD (c)))
182410d565efSmrg     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
182510d565efSmrg 
182610d565efSmrg   e->flags |= EDGE_FALLTHRU;
182710d565efSmrg }
182810d565efSmrg 
182910d565efSmrg /* Should move basic block BB after basic block AFTER.  NIY.  */
183010d565efSmrg 
183110d565efSmrg static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)183210d565efSmrg rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
183310d565efSmrg 		      basic_block after ATTRIBUTE_UNUSED)
183410d565efSmrg {
183510d565efSmrg   return false;
183610d565efSmrg }
183710d565efSmrg 
183810d565efSmrg /* Locate the last bb in the same partition as START_BB.  */
183910d565efSmrg 
184010d565efSmrg static basic_block
last_bb_in_partition(basic_block start_bb)184110d565efSmrg last_bb_in_partition (basic_block start_bb)
184210d565efSmrg {
184310d565efSmrg   basic_block bb;
184410d565efSmrg   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
184510d565efSmrg     {
184610d565efSmrg       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
184710d565efSmrg         return bb;
184810d565efSmrg     }
184910d565efSmrg   /* Return bb before the exit block.  */
185010d565efSmrg   return bb->prev_bb;
185110d565efSmrg }
185210d565efSmrg 
185310d565efSmrg /* Split a (typically critical) edge.  Return the new block.
185410d565efSmrg    The edge must not be abnormal.
185510d565efSmrg 
185610d565efSmrg    ??? The code generally expects to be called on critical edges.
185710d565efSmrg    The case of a block ending in an unconditional jump to a
185810d565efSmrg    block with multiple predecessors is not handled optimally.  */
185910d565efSmrg 
186010d565efSmrg static basic_block
rtl_split_edge(edge edge_in)186110d565efSmrg rtl_split_edge (edge edge_in)
186210d565efSmrg {
186310d565efSmrg   basic_block bb, new_bb;
186410d565efSmrg   rtx_insn *before;
186510d565efSmrg 
186610d565efSmrg   /* Abnormal edges cannot be split.  */
186710d565efSmrg   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
186810d565efSmrg 
186910d565efSmrg   /* We are going to place the new block in front of edge destination.
187010d565efSmrg      Avoid existence of fallthru predecessors.  */
187110d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
187210d565efSmrg     {
187310d565efSmrg       edge e = find_fallthru_edge (edge_in->dest->preds);
187410d565efSmrg 
187510d565efSmrg       if (e)
187610d565efSmrg 	force_nonfallthru (e);
187710d565efSmrg     }
187810d565efSmrg 
187910d565efSmrg   /* Create the basic block note.  */
188010d565efSmrg   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
188110d565efSmrg     before = BB_HEAD (edge_in->dest);
188210d565efSmrg   else
188310d565efSmrg     before = NULL;
188410d565efSmrg 
188510d565efSmrg   /* If this is a fall through edge to the exit block, the blocks might be
188610d565efSmrg      not adjacent, and the right place is after the source.  */
188710d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU)
188810d565efSmrg       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
188910d565efSmrg     {
189010d565efSmrg       before = NEXT_INSN (BB_END (edge_in->src));
189110d565efSmrg       bb = create_basic_block (before, NULL, edge_in->src);
189210d565efSmrg       BB_COPY_PARTITION (bb, edge_in->src);
189310d565efSmrg     }
189410d565efSmrg   else
189510d565efSmrg     {
189610d565efSmrg       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
189710d565efSmrg         {
189810d565efSmrg           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
189910d565efSmrg           BB_COPY_PARTITION (bb, edge_in->dest);
190010d565efSmrg         }
190110d565efSmrg       else
190210d565efSmrg         {
190310d565efSmrg           basic_block after = edge_in->dest->prev_bb;
190410d565efSmrg           /* If this is post-bb reordering, and the edge crosses a partition
190510d565efSmrg              boundary, the new block needs to be inserted in the bb chain
190610d565efSmrg              at the end of the src partition (since we put the new bb into
190710d565efSmrg              that partition, see below). Otherwise we may end up creating
190810d565efSmrg              an extra partition crossing in the chain, which is illegal.
190910d565efSmrg              It can't go after the src, because src may have a fall-through
191010d565efSmrg              to a different block.  */
191110d565efSmrg           if (crtl->bb_reorder_complete
191210d565efSmrg               && (edge_in->flags & EDGE_CROSSING))
191310d565efSmrg             {
191410d565efSmrg               after = last_bb_in_partition (edge_in->src);
191510d565efSmrg               before = get_last_bb_insn (after);
191610d565efSmrg               /* The instruction following the last bb in partition should
191710d565efSmrg                  be a barrier, since it cannot end in a fall-through.  */
191810d565efSmrg               gcc_checking_assert (BARRIER_P (before));
191910d565efSmrg               before = NEXT_INSN (before);
192010d565efSmrg             }
192110d565efSmrg           bb = create_basic_block (before, NULL, after);
192210d565efSmrg           /* Put the split bb into the src partition, to avoid creating
192310d565efSmrg              a situation where a cold bb dominates a hot bb, in the case
192410d565efSmrg              where src is cold and dest is hot. The src will dominate
192510d565efSmrg              the new bb (whereas it might not have dominated dest).  */
192610d565efSmrg           BB_COPY_PARTITION (bb, edge_in->src);
192710d565efSmrg         }
192810d565efSmrg     }
192910d565efSmrg 
193010d565efSmrg   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
193110d565efSmrg 
193210d565efSmrg   /* Can't allow a region crossing edge to be fallthrough.  */
193310d565efSmrg   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
193410d565efSmrg       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
193510d565efSmrg     {
193610d565efSmrg       new_bb = force_nonfallthru (single_succ_edge (bb));
193710d565efSmrg       gcc_assert (!new_bb);
193810d565efSmrg     }
193910d565efSmrg 
194010d565efSmrg   /* For non-fallthru edges, we must adjust the predecessor's
194110d565efSmrg      jump instruction to target our new block.  */
194210d565efSmrg   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
194310d565efSmrg     {
194410d565efSmrg       edge redirected = redirect_edge_and_branch (edge_in, bb);
194510d565efSmrg       gcc_assert (redirected);
194610d565efSmrg     }
194710d565efSmrg   else
194810d565efSmrg     {
194910d565efSmrg       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
195010d565efSmrg 	{
195110d565efSmrg 	  /* For asm goto even splitting of fallthru edge might
195210d565efSmrg 	     need insn patching, as other labels might point to the
195310d565efSmrg 	     old label.  */
195410d565efSmrg 	  rtx_insn *last = BB_END (edge_in->src);
195510d565efSmrg 	  if (last
195610d565efSmrg 	      && JUMP_P (last)
195710d565efSmrg 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
195810d565efSmrg 	      && (extract_asm_operands (PATTERN (last))
195910d565efSmrg 		  || JUMP_LABEL (last) == before)
196010d565efSmrg 	      && patch_jump_insn (last, before, bb))
196110d565efSmrg 	    df_set_bb_dirty (edge_in->src);
196210d565efSmrg 	}
196310d565efSmrg       redirect_edge_succ (edge_in, bb);
196410d565efSmrg     }
196510d565efSmrg 
196610d565efSmrg   return bb;
196710d565efSmrg }
196810d565efSmrg 
196910d565efSmrg /* Queue instructions for insertion on an edge between two basic blocks.
197010d565efSmrg    The new instructions and basic blocks (if any) will not appear in the
197110d565efSmrg    CFG until commit_edge_insertions is called.  */
197210d565efSmrg 
197310d565efSmrg void
insert_insn_on_edge(rtx pattern,edge e)197410d565efSmrg insert_insn_on_edge (rtx pattern, edge e)
197510d565efSmrg {
197610d565efSmrg   /* We cannot insert instructions on an abnormal critical edge.
197710d565efSmrg      It will be easier to find the culprit if we die now.  */
197810d565efSmrg   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
197910d565efSmrg 
198010d565efSmrg   if (e->insns.r == NULL_RTX)
198110d565efSmrg     start_sequence ();
198210d565efSmrg   else
198310d565efSmrg     push_to_sequence (e->insns.r);
198410d565efSmrg 
198510d565efSmrg   emit_insn (pattern);
198610d565efSmrg 
198710d565efSmrg   e->insns.r = get_insns ();
198810d565efSmrg   end_sequence ();
198910d565efSmrg }
199010d565efSmrg 
199110d565efSmrg /* Update the CFG for the instructions queued on edge E.  */
199210d565efSmrg 
199310d565efSmrg void
commit_one_edge_insertion(edge e)199410d565efSmrg commit_one_edge_insertion (edge e)
199510d565efSmrg {
199610d565efSmrg   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
199710d565efSmrg   basic_block bb;
199810d565efSmrg 
199910d565efSmrg   /* Pull the insns off the edge now since the edge might go away.  */
200010d565efSmrg   insns = e->insns.r;
200110d565efSmrg   e->insns.r = NULL;
200210d565efSmrg 
200310d565efSmrg   /* Figure out where to put these insns.  If the destination has
200410d565efSmrg      one predecessor, insert there.  Except for the exit block.  */
200510d565efSmrg   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
200610d565efSmrg     {
200710d565efSmrg       bb = e->dest;
200810d565efSmrg 
200910d565efSmrg       /* Get the location correct wrt a code label, and "nice" wrt
201010d565efSmrg 	 a basic block note, and before everything else.  */
201110d565efSmrg       tmp = BB_HEAD (bb);
201210d565efSmrg       if (LABEL_P (tmp))
201310d565efSmrg 	tmp = NEXT_INSN (tmp);
201410d565efSmrg       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
201510d565efSmrg 	tmp = NEXT_INSN (tmp);
201610d565efSmrg       if (tmp == BB_HEAD (bb))
201710d565efSmrg 	before = tmp;
201810d565efSmrg       else if (tmp)
201910d565efSmrg 	after = PREV_INSN (tmp);
202010d565efSmrg       else
202110d565efSmrg 	after = get_last_insn ();
202210d565efSmrg     }
202310d565efSmrg 
202410d565efSmrg   /* If the source has one successor and the edge is not abnormal,
202510d565efSmrg      insert there.  Except for the entry block.
202610d565efSmrg      Don't do this if the predecessor ends in a jump other than
202710d565efSmrg      unconditional simple jump.  E.g. for asm goto that points all
202810d565efSmrg      its labels at the fallthru basic block, we can't insert instructions
202910d565efSmrg      before the asm goto, as the asm goto can have various of side effects,
203010d565efSmrg      and can't emit instructions after the asm goto, as it must end
203110d565efSmrg      the basic block.  */
203210d565efSmrg   else if ((e->flags & EDGE_ABNORMAL) == 0
203310d565efSmrg 	   && single_succ_p (e->src)
203410d565efSmrg 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
203510d565efSmrg 	   && (!JUMP_P (BB_END (e->src))
203610d565efSmrg 	       || simplejump_p (BB_END (e->src))))
203710d565efSmrg     {
203810d565efSmrg       bb = e->src;
203910d565efSmrg 
204010d565efSmrg       /* It is possible to have a non-simple jump here.  Consider a target
204110d565efSmrg 	 where some forms of unconditional jumps clobber a register.  This
204210d565efSmrg 	 happens on the fr30 for example.
204310d565efSmrg 
204410d565efSmrg 	 We know this block has a single successor, so we can just emit
204510d565efSmrg 	 the queued insns before the jump.  */
204610d565efSmrg       if (JUMP_P (BB_END (bb)))
204710d565efSmrg 	before = BB_END (bb);
204810d565efSmrg       else
204910d565efSmrg 	{
205010d565efSmrg 	  /* We'd better be fallthru, or we've lost track of what's what.  */
205110d565efSmrg 	  gcc_assert (e->flags & EDGE_FALLTHRU);
205210d565efSmrg 
205310d565efSmrg 	  after = BB_END (bb);
205410d565efSmrg 	}
205510d565efSmrg     }
205610d565efSmrg 
205710d565efSmrg   /* Otherwise we must split the edge.  */
205810d565efSmrg   else
205910d565efSmrg     {
206010d565efSmrg       bb = split_edge (e);
206110d565efSmrg 
206210d565efSmrg       /* If E crossed a partition boundary, we needed to make bb end in
206310d565efSmrg          a region-crossing jump, even though it was originally fallthru.  */
206410d565efSmrg       if (JUMP_P (BB_END (bb)))
206510d565efSmrg 	before = BB_END (bb);
206610d565efSmrg       else
206710d565efSmrg         after = BB_END (bb);
206810d565efSmrg     }
206910d565efSmrg 
207010d565efSmrg   /* Now that we've found the spot, do the insertion.  */
207110d565efSmrg   if (before)
207210d565efSmrg     {
207310d565efSmrg       emit_insn_before_noloc (insns, before, bb);
207410d565efSmrg       last = prev_nonnote_insn (before);
207510d565efSmrg     }
207610d565efSmrg   else
207710d565efSmrg     last = emit_insn_after_noloc (insns, after, bb);
207810d565efSmrg 
207910d565efSmrg   if (returnjump_p (last))
208010d565efSmrg     {
208110d565efSmrg       /* ??? Remove all outgoing edges from BB and add one for EXIT.
208210d565efSmrg 	 This is not currently a problem because this only happens
208310d565efSmrg 	 for the (single) epilogue, which already has a fallthru edge
208410d565efSmrg 	 to EXIT.  */
208510d565efSmrg 
208610d565efSmrg       e = single_succ_edge (bb);
208710d565efSmrg       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
208810d565efSmrg 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
208910d565efSmrg 
209010d565efSmrg       e->flags &= ~EDGE_FALLTHRU;
209110d565efSmrg       emit_barrier_after (last);
209210d565efSmrg 
209310d565efSmrg       if (before)
209410d565efSmrg 	delete_insn (before);
209510d565efSmrg     }
209610d565efSmrg   else
209710d565efSmrg     gcc_assert (!JUMP_P (last));
209810d565efSmrg }
209910d565efSmrg 
210010d565efSmrg /* Update the CFG for all queued instructions.  */
210110d565efSmrg 
210210d565efSmrg void
commit_edge_insertions(void)210310d565efSmrg commit_edge_insertions (void)
210410d565efSmrg {
210510d565efSmrg   basic_block bb;
210610d565efSmrg 
210710d565efSmrg   /* Optimization passes that invoke this routine can cause hot blocks
210810d565efSmrg      previously reached by both hot and cold blocks to become dominated only
210910d565efSmrg      by cold blocks. This will cause the verification below to fail,
211010d565efSmrg      and lead to now cold code in the hot section. In some cases this
211110d565efSmrg      may only be visible after newly unreachable blocks are deleted,
211210d565efSmrg      which will be done by fixup_partitions.  */
211310d565efSmrg   fixup_partitions ();
211410d565efSmrg 
2115*ec02198aSmrg   if (!currently_expanding_to_rtl)
211610d565efSmrg     checking_verify_flow_info ();
211710d565efSmrg 
211810d565efSmrg   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
211910d565efSmrg 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
212010d565efSmrg     {
212110d565efSmrg       edge e;
212210d565efSmrg       edge_iterator ei;
212310d565efSmrg 
212410d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
212510d565efSmrg 	if (e->insns.r)
2126*ec02198aSmrg 	  {
2127*ec02198aSmrg 	    if (currently_expanding_to_rtl)
2128*ec02198aSmrg 	      rebuild_jump_labels_chain (e->insns.r);
212910d565efSmrg 	    commit_one_edge_insertion (e);
213010d565efSmrg 	  }
213110d565efSmrg     }
2132*ec02198aSmrg }
213310d565efSmrg 
213410d565efSmrg 
213510d565efSmrg /* Print out RTL-specific basic block information (live information
213610d565efSmrg    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
213710d565efSmrg    documented in dumpfile.h.  */
213810d565efSmrg 
213910d565efSmrg static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2140c7a68eb7Smrg rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
214110d565efSmrg {
214210d565efSmrg   char *s_indent;
214310d565efSmrg 
214410d565efSmrg   s_indent = (char *) alloca ((size_t) indent + 1);
214510d565efSmrg   memset (s_indent, ' ', (size_t) indent);
214610d565efSmrg   s_indent[indent] = '\0';
214710d565efSmrg 
214810d565efSmrg   if (df && (flags & TDF_DETAILS))
214910d565efSmrg     {
215010d565efSmrg       df_dump_top (bb, outf);
215110d565efSmrg       putc ('\n', outf);
215210d565efSmrg     }
215310d565efSmrg 
215410d565efSmrg   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2155c7a68eb7Smrg     {
2156c7a68eb7Smrg       rtx_insn *last = BB_END (bb);
2157c7a68eb7Smrg       if (last)
2158c7a68eb7Smrg 	last = NEXT_INSN (last);
2159c7a68eb7Smrg       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
216010d565efSmrg 	{
216110d565efSmrg 	  if (flags & TDF_DETAILS)
216210d565efSmrg 	    df_dump_insn_top (insn, outf);
216310d565efSmrg 	  if (! (flags & TDF_SLIM))
216410d565efSmrg 	    print_rtl_single (outf, insn);
216510d565efSmrg 	  else
216610d565efSmrg 	    dump_insn_slim (outf, insn);
216710d565efSmrg 	  if (flags & TDF_DETAILS)
216810d565efSmrg 	    df_dump_insn_bottom (insn, outf);
216910d565efSmrg 	}
2170c7a68eb7Smrg     }
217110d565efSmrg 
217210d565efSmrg   if (df && (flags & TDF_DETAILS))
217310d565efSmrg     {
217410d565efSmrg       df_dump_bottom (bb, outf);
217510d565efSmrg       putc ('\n', outf);
217610d565efSmrg     }
217710d565efSmrg 
217810d565efSmrg }
217910d565efSmrg 
218010d565efSmrg /* Like dump_function_to_file, but for RTL.  Print out dataflow information
218110d565efSmrg    for the start of each basic block.  FLAGS are the TDF_* masks documented
218210d565efSmrg    in dumpfile.h.  */
218310d565efSmrg 
218410d565efSmrg void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2185c7a68eb7Smrg print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
218610d565efSmrg {
218710d565efSmrg   const rtx_insn *tmp_rtx;
218810d565efSmrg   if (rtx_first == 0)
218910d565efSmrg     fprintf (outf, "(nil)\n");
219010d565efSmrg   else
219110d565efSmrg     {
219210d565efSmrg       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
219310d565efSmrg       int max_uid = get_max_uid ();
219410d565efSmrg       basic_block *start = XCNEWVEC (basic_block, max_uid);
219510d565efSmrg       basic_block *end = XCNEWVEC (basic_block, max_uid);
219610d565efSmrg       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
219710d565efSmrg       basic_block bb;
219810d565efSmrg 
219910d565efSmrg       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
220010d565efSmrg 	 insns, but the CFG is not maintained so the basic block info
220110d565efSmrg 	 is not reliable.  Therefore it's omitted from the dumps.  */
220210d565efSmrg       if (! (cfun->curr_properties & PROP_cfg))
220310d565efSmrg         flags &= ~TDF_BLOCKS;
220410d565efSmrg 
220510d565efSmrg       if (df)
220610d565efSmrg 	df_dump_start (outf);
220710d565efSmrg 
2208*ec02198aSmrg       if (cfun->curr_properties & PROP_cfg)
220910d565efSmrg 	{
221010d565efSmrg 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
221110d565efSmrg 	    {
221210d565efSmrg 	      rtx_insn *x;
221310d565efSmrg 
221410d565efSmrg 	      start[INSN_UID (BB_HEAD (bb))] = bb;
221510d565efSmrg 	      end[INSN_UID (BB_END (bb))] = bb;
2216*ec02198aSmrg 	      if (flags & TDF_BLOCKS)
2217*ec02198aSmrg 		{
221810d565efSmrg 		  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
221910d565efSmrg 		    {
222010d565efSmrg 		      enum bb_state state = IN_MULTIPLE_BB;
222110d565efSmrg 
222210d565efSmrg 		      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
222310d565efSmrg 			state = IN_ONE_BB;
222410d565efSmrg 		      in_bb_p[INSN_UID (x)] = state;
222510d565efSmrg 
222610d565efSmrg 		      if (x == BB_END (bb))
222710d565efSmrg 			break;
222810d565efSmrg 		    }
222910d565efSmrg 		}
223010d565efSmrg 	    }
2231*ec02198aSmrg 	}
223210d565efSmrg 
2233c7a68eb7Smrg       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
223410d565efSmrg 	{
223510d565efSmrg 	  if (flags & TDF_BLOCKS)
223610d565efSmrg 	    {
223710d565efSmrg 	      bb = start[INSN_UID (tmp_rtx)];
223810d565efSmrg 	      if (bb != NULL)
223910d565efSmrg 		{
2240c7a68eb7Smrg 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
224110d565efSmrg 		  if (df && (flags & TDF_DETAILS))
224210d565efSmrg 		    df_dump_top (bb, outf);
224310d565efSmrg 		}
224410d565efSmrg 
224510d565efSmrg 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
224610d565efSmrg 		  && !NOTE_P (tmp_rtx)
224710d565efSmrg 		  && !BARRIER_P (tmp_rtx))
224810d565efSmrg 		fprintf (outf, ";; Insn is not within a basic block\n");
224910d565efSmrg 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
225010d565efSmrg 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
225110d565efSmrg 	    }
225210d565efSmrg 
225310d565efSmrg 	  if (flags & TDF_DETAILS)
225410d565efSmrg 	    df_dump_insn_top (tmp_rtx, outf);
225510d565efSmrg 	  if (! (flags & TDF_SLIM))
225610d565efSmrg 	    print_rtl_single (outf, tmp_rtx);
225710d565efSmrg 	  else
225810d565efSmrg 	    dump_insn_slim (outf, tmp_rtx);
225910d565efSmrg 	  if (flags & TDF_DETAILS)
226010d565efSmrg 	    df_dump_insn_bottom (tmp_rtx, outf);
226110d565efSmrg 
226210d565efSmrg 	  bb = end[INSN_UID (tmp_rtx)];
226310d565efSmrg 	  if (bb != NULL)
226410d565efSmrg 	    {
2265*ec02198aSmrg 	      if (flags & TDF_BLOCKS)
2266*ec02198aSmrg 		{
2267c7a68eb7Smrg 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
226810d565efSmrg 		  if (df && (flags & TDF_DETAILS))
226910d565efSmrg 		    df_dump_bottom (bb, outf);
227010d565efSmrg 		  putc ('\n', outf);
227110d565efSmrg 		}
2272*ec02198aSmrg 	      /* Emit a hint if the fallthrough target of current basic block
2273*ec02198aSmrg 	         isn't the one placed right next.  */
2274*ec02198aSmrg 	      else if (EDGE_COUNT (bb->succs) > 0)
2275*ec02198aSmrg 		{
2276*ec02198aSmrg 		  gcc_assert (BB_END (bb) == tmp_rtx);
2277*ec02198aSmrg 		  const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2278*ec02198aSmrg 		  /* Bypass intervening deleted-insn notes and debug insns.  */
2279*ec02198aSmrg 		  while (ninsn
2280*ec02198aSmrg 			 && !NONDEBUG_INSN_P (ninsn)
2281*ec02198aSmrg 			 && !start[INSN_UID (ninsn)])
2282*ec02198aSmrg 		    ninsn = NEXT_INSN (ninsn);
2283*ec02198aSmrg 		  edge e = find_fallthru_edge (bb->succs);
2284*ec02198aSmrg 		  if (e && ninsn)
2285*ec02198aSmrg 		    {
2286*ec02198aSmrg 		      basic_block dest = e->dest;
2287*ec02198aSmrg 		      if (start[INSN_UID (ninsn)] != dest)
2288*ec02198aSmrg 			fprintf (outf, "%s      ; pc falls through to BB %d\n",
2289*ec02198aSmrg 				 print_rtx_head, dest->index);
2290*ec02198aSmrg 		    }
2291*ec02198aSmrg 		}
229210d565efSmrg 	    }
229310d565efSmrg 	}
229410d565efSmrg 
229510d565efSmrg       free (start);
229610d565efSmrg       free (end);
229710d565efSmrg       free (in_bb_p);
229810d565efSmrg     }
229910d565efSmrg }
230010d565efSmrg 
230110d565efSmrg /* Update the branch probability of BB if a REG_BR_PROB is present.  */
230210d565efSmrg 
230310d565efSmrg void
update_br_prob_note(basic_block bb)230410d565efSmrg update_br_prob_note (basic_block bb)
230510d565efSmrg {
230610d565efSmrg   rtx note;
230710d565efSmrg   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2308c7a68eb7Smrg   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309c7a68eb7Smrg     {
2310c7a68eb7Smrg       if (note)
2311c7a68eb7Smrg 	{
2312c7a68eb7Smrg 	  rtx *note_link, this_rtx;
2313c7a68eb7Smrg 
2314c7a68eb7Smrg 	  note_link = &REG_NOTES (BB_END (bb));
2315c7a68eb7Smrg 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2316c7a68eb7Smrg 	    if (this_rtx == note)
2317c7a68eb7Smrg 	      {
2318c7a68eb7Smrg 		*note_link = XEXP (this_rtx, 1);
2319c7a68eb7Smrg 		break;
2320c7a68eb7Smrg 	      }
2321c7a68eb7Smrg 	}
232210d565efSmrg       return;
2323c7a68eb7Smrg     }
2324c7a68eb7Smrg   if (!note
2325c7a68eb7Smrg       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2326c7a68eb7Smrg     return;
2327c7a68eb7Smrg   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
232810d565efSmrg }
232910d565efSmrg 
233010d565efSmrg /* Get the last insn associated with block BB (that includes barriers and
233110d565efSmrg    tablejumps after BB).  */
233210d565efSmrg rtx_insn *
get_last_bb_insn(basic_block bb)233310d565efSmrg get_last_bb_insn (basic_block bb)
233410d565efSmrg {
233510d565efSmrg   rtx_jump_table_data *table;
233610d565efSmrg   rtx_insn *tmp;
233710d565efSmrg   rtx_insn *end = BB_END (bb);
233810d565efSmrg 
233910d565efSmrg   /* Include any jump table following the basic block.  */
234010d565efSmrg   if (tablejump_p (end, NULL, &table))
234110d565efSmrg     end = table;
234210d565efSmrg 
234310d565efSmrg   /* Include any barriers that may follow the basic block.  */
2344c7a68eb7Smrg   tmp = next_nonnote_nondebug_insn_bb (end);
234510d565efSmrg   while (tmp && BARRIER_P (tmp))
234610d565efSmrg     {
234710d565efSmrg       end = tmp;
2348c7a68eb7Smrg       tmp = next_nonnote_nondebug_insn_bb (end);
234910d565efSmrg     }
235010d565efSmrg 
235110d565efSmrg   return end;
235210d565efSmrg }
235310d565efSmrg 
2354c7a68eb7Smrg /* Add all BBs reachable from entry via hot paths into the SET.  */
2355c7a68eb7Smrg 
2356c7a68eb7Smrg void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2357c7a68eb7Smrg find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358c7a68eb7Smrg {
2359c7a68eb7Smrg   auto_vec<basic_block, 64> worklist;
2360c7a68eb7Smrg 
2361c7a68eb7Smrg   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2362c7a68eb7Smrg   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363c7a68eb7Smrg 
2364c7a68eb7Smrg   while (worklist.length () > 0)
2365c7a68eb7Smrg     {
2366c7a68eb7Smrg       basic_block bb = worklist.pop ();
2367c7a68eb7Smrg       edge_iterator ei;
2368c7a68eb7Smrg       edge e;
2369c7a68eb7Smrg 
2370c7a68eb7Smrg       FOR_EACH_EDGE (e, ei, bb->succs)
2371c7a68eb7Smrg 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2372c7a68eb7Smrg 	    && !set->add (e->dest))
2373c7a68eb7Smrg 	  worklist.safe_push (e->dest);
2374c7a68eb7Smrg     }
2375c7a68eb7Smrg }
2376c7a68eb7Smrg 
237710d565efSmrg /* Sanity check partition hotness to ensure that basic blocks in
237810d565efSmrg    the cold partition don't dominate basic blocks in the hot partition.
237910d565efSmrg    If FLAG_ONLY is true, report violations as errors. Otherwise
238010d565efSmrg    re-mark the dominated blocks as cold, since this is run after
238110d565efSmrg    cfg optimizations that may make hot blocks previously reached
238210d565efSmrg    by both hot and cold blocks now only reachable along cold paths.  */
238310d565efSmrg 
238410d565efSmrg static vec<basic_block>
find_partition_fixes(bool flag_only)238510d565efSmrg find_partition_fixes (bool flag_only)
238610d565efSmrg {
238710d565efSmrg   basic_block bb;
238810d565efSmrg   vec<basic_block> bbs_to_fix = vNULL;
2389c7a68eb7Smrg   hash_set<basic_block> set;
239010d565efSmrg 
239110d565efSmrg   /* Callers check this.  */
239210d565efSmrg   gcc_checking_assert (crtl->has_bb_partition);
239310d565efSmrg 
2394c7a68eb7Smrg   find_bbs_reachable_by_hot_paths (&set);
2395c7a68eb7Smrg 
239610d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
2397c7a68eb7Smrg     if (!set.contains (bb)
2398c7a68eb7Smrg 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
239910d565efSmrg       {
240010d565efSmrg 	if (flag_only)
2401c7a68eb7Smrg 	  error ("non-cold basic block %d reachable only "
2402c7a68eb7Smrg 		 "by paths crossing the cold partition", bb->index);
240310d565efSmrg 	else
2404c7a68eb7Smrg 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2405c7a68eb7Smrg 	bbs_to_fix.safe_push (bb);
240610d565efSmrg       }
240710d565efSmrg 
240810d565efSmrg   return bbs_to_fix;
240910d565efSmrg }
241010d565efSmrg 
241110d565efSmrg /* Perform cleanup on the hot/cold bb partitioning after optimization
241210d565efSmrg    passes that modify the cfg.  */
241310d565efSmrg 
241410d565efSmrg void
fixup_partitions(void)241510d565efSmrg fixup_partitions (void)
241610d565efSmrg {
241710d565efSmrg   if (!crtl->has_bb_partition)
241810d565efSmrg     return;
241910d565efSmrg 
242010d565efSmrg   /* Delete any blocks that became unreachable and weren't
242110d565efSmrg      already cleaned up, for example during edge forwarding
242210d565efSmrg      and convert_jumps_to_returns. This will expose more
242310d565efSmrg      opportunities for fixing the partition boundaries here.
242410d565efSmrg      Also, the calculation of the dominance graph during verification
242510d565efSmrg      will assert if there are unreachable nodes.  */
242610d565efSmrg   delete_unreachable_blocks ();
242710d565efSmrg 
242810d565efSmrg   /* If there are partitions, do a sanity check on them: A basic block in
242910d565efSmrg      a cold partition cannot dominate a basic block in a hot partition.
243010d565efSmrg      Fixup any that now violate this requirement, as a result of edge
243110d565efSmrg      forwarding and unreachable block deletion.  */
243210d565efSmrg   vec<basic_block> bbs_to_fix = find_partition_fixes (false);
243310d565efSmrg 
243410d565efSmrg   /* Do the partition fixup after all necessary blocks have been converted to
243510d565efSmrg      cold, so that we only update the region crossings the minimum number of
243610d565efSmrg      places, which can require forcing edges to be non fallthru.  */
2437*ec02198aSmrg   if (! bbs_to_fix.is_empty ())
243810d565efSmrg     {
2439*ec02198aSmrg       do
2440*ec02198aSmrg 	{
2441*ec02198aSmrg 	  basic_block bb = bbs_to_fix.pop ();
244210d565efSmrg 	  fixup_new_cold_bb (bb);
244310d565efSmrg 	}
2444*ec02198aSmrg       while (! bbs_to_fix.is_empty ());
2445*ec02198aSmrg 
2446*ec02198aSmrg       /* Fix up hot cold block grouping if needed.  */
2447*ec02198aSmrg       if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2448*ec02198aSmrg 	{
2449*ec02198aSmrg 	  basic_block bb, first = NULL, second = NULL;
2450*ec02198aSmrg 	  int current_partition = BB_UNPARTITIONED;
2451*ec02198aSmrg 
2452*ec02198aSmrg 	  FOR_EACH_BB_FN (bb, cfun)
2453*ec02198aSmrg 	    {
2454*ec02198aSmrg 	      if (current_partition != BB_UNPARTITIONED
2455*ec02198aSmrg 		  && BB_PARTITION (bb) != current_partition)
2456*ec02198aSmrg 		{
2457*ec02198aSmrg 		  if (first == NULL)
2458*ec02198aSmrg 		    first = bb;
2459*ec02198aSmrg 		  else if (second == NULL)
2460*ec02198aSmrg 		    second = bb;
2461*ec02198aSmrg 		  else
2462*ec02198aSmrg 		    {
2463*ec02198aSmrg 		      /* If we switch partitions for the 3rd, 5th etc. time,
2464*ec02198aSmrg 			 move bbs first (inclusive) .. second (exclusive) right
2465*ec02198aSmrg 			 before bb.  */
2466*ec02198aSmrg 		      basic_block prev_first = first->prev_bb;
2467*ec02198aSmrg 		      basic_block prev_second = second->prev_bb;
2468*ec02198aSmrg 		      basic_block prev_bb = bb->prev_bb;
2469*ec02198aSmrg 		      prev_first->next_bb = second;
2470*ec02198aSmrg 		      second->prev_bb = prev_first;
2471*ec02198aSmrg 		      prev_second->next_bb = bb;
2472*ec02198aSmrg 		      bb->prev_bb = prev_second;
2473*ec02198aSmrg 		      prev_bb->next_bb = first;
2474*ec02198aSmrg 		      first->prev_bb = prev_bb;
2475*ec02198aSmrg 		      rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2476*ec02198aSmrg 		      rtx_insn *prev_second_insn
2477*ec02198aSmrg 			= PREV_INSN (BB_HEAD (second));
2478*ec02198aSmrg 		      rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2479*ec02198aSmrg 		      SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2480*ec02198aSmrg 		      SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2481*ec02198aSmrg 		      SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2482*ec02198aSmrg 		      SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2483*ec02198aSmrg 		      SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2484*ec02198aSmrg 		      SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2485*ec02198aSmrg 		      second = NULL;
2486*ec02198aSmrg 		    }
2487*ec02198aSmrg 		}
2488*ec02198aSmrg 	      current_partition = BB_PARTITION (bb);
2489*ec02198aSmrg 	    }
2490*ec02198aSmrg 	  gcc_assert (!second);
2491*ec02198aSmrg 	}
2492*ec02198aSmrg     }
249310d565efSmrg }
249410d565efSmrg 
249510d565efSmrg /* Verify, in the basic block chain, that there is at most one switch
249610d565efSmrg    between hot/cold partitions. This condition will not be true until
249710d565efSmrg    after reorder_basic_blocks is called.  */
249810d565efSmrg 
249910d565efSmrg static int
verify_hot_cold_block_grouping(void)250010d565efSmrg verify_hot_cold_block_grouping (void)
250110d565efSmrg {
250210d565efSmrg   basic_block bb;
250310d565efSmrg   int err = 0;
250410d565efSmrg   bool switched_sections = false;
250510d565efSmrg   int current_partition = BB_UNPARTITIONED;
250610d565efSmrg 
250710d565efSmrg   /* Even after bb reordering is complete, we go into cfglayout mode
250810d565efSmrg      again (in compgoto). Ensure we don't call this before going back
250910d565efSmrg      into linearized RTL when any layout fixes would have been committed.  */
251010d565efSmrg   if (!crtl->bb_reorder_complete
251110d565efSmrg       || current_ir_type () != IR_RTL_CFGRTL)
251210d565efSmrg     return err;
251310d565efSmrg 
251410d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
251510d565efSmrg     {
251610d565efSmrg       if (current_partition != BB_UNPARTITIONED
251710d565efSmrg           && BB_PARTITION (bb) != current_partition)
251810d565efSmrg 	{
251910d565efSmrg 	  if (switched_sections)
252010d565efSmrg 	    {
252110d565efSmrg 	      error ("multiple hot/cold transitions found (bb %i)",
252210d565efSmrg 		     bb->index);
252310d565efSmrg 	      err = 1;
252410d565efSmrg 	    }
252510d565efSmrg 	  else
252610d565efSmrg             switched_sections = true;
252710d565efSmrg 
252810d565efSmrg           if (!crtl->has_bb_partition)
252910d565efSmrg             error ("partition found but function partition flag not set");
253010d565efSmrg 	}
253110d565efSmrg       current_partition = BB_PARTITION (bb);
253210d565efSmrg     }
253310d565efSmrg 
253410d565efSmrg   return err;
253510d565efSmrg }
253610d565efSmrg 
253710d565efSmrg 
253810d565efSmrg /* Perform several checks on the edges out of each block, such as
253910d565efSmrg    the consistency of the branch probabilities, the correctness
254010d565efSmrg    of hot/cold partition crossing edges, and the number of expected
254110d565efSmrg    successor edges.  Also verify that the dominance relationship
254210d565efSmrg    between hot/cold blocks is sane.  */
254310d565efSmrg 
254410d565efSmrg static int
rtl_verify_edges(void)254510d565efSmrg rtl_verify_edges (void)
254610d565efSmrg {
254710d565efSmrg   int err = 0;
254810d565efSmrg   basic_block bb;
254910d565efSmrg 
255010d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
255110d565efSmrg     {
255210d565efSmrg       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
255310d565efSmrg       int n_eh = 0, n_abnormal = 0;
255410d565efSmrg       edge e, fallthru = NULL;
255510d565efSmrg       edge_iterator ei;
255610d565efSmrg       rtx note;
255710d565efSmrg       bool has_crossing_edge = false;
255810d565efSmrg 
255910d565efSmrg       if (JUMP_P (BB_END (bb))
256010d565efSmrg 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
256110d565efSmrg 	  && EDGE_COUNT (bb->succs) >= 2
256210d565efSmrg 	  && any_condjump_p (BB_END (bb)))
256310d565efSmrg 	{
2564c7a68eb7Smrg 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2565c7a68eb7Smrg 	    {
2566c7a68eb7Smrg 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2567c7a68eb7Smrg 		{
2568c7a68eb7Smrg 		  error ("verify_flow_info: "
2569c7a68eb7Smrg 			 "REG_BR_PROB is set but cfg probability is not");
2570c7a68eb7Smrg 		  err = 1;
2571c7a68eb7Smrg 		}
2572c7a68eb7Smrg 	    }
2573c7a68eb7Smrg 	  else if (XINT (note, 0)
2574c7a68eb7Smrg 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
257510d565efSmrg 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
257610d565efSmrg 	    {
257710d565efSmrg 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2578c7a68eb7Smrg 		     XINT (note, 0),
2579c7a68eb7Smrg 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
258010d565efSmrg 	      err = 1;
258110d565efSmrg 	    }
258210d565efSmrg 	}
258310d565efSmrg 
258410d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
258510d565efSmrg 	{
258610d565efSmrg 	  bool is_crossing;
258710d565efSmrg 
258810d565efSmrg 	  if (e->flags & EDGE_FALLTHRU)
258910d565efSmrg 	    n_fallthru++, fallthru = e;
259010d565efSmrg 
259110d565efSmrg 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
259210d565efSmrg 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
259310d565efSmrg 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
259410d565efSmrg           has_crossing_edge |= is_crossing;
259510d565efSmrg 	  if (e->flags & EDGE_CROSSING)
259610d565efSmrg 	    {
259710d565efSmrg 	      if (!is_crossing)
259810d565efSmrg 		{
259910d565efSmrg 		  error ("EDGE_CROSSING incorrectly set across same section");
260010d565efSmrg 		  err = 1;
260110d565efSmrg 		}
260210d565efSmrg 	      if (e->flags & EDGE_FALLTHRU)
260310d565efSmrg 		{
260410d565efSmrg 		  error ("fallthru edge crosses section boundary in bb %i",
260510d565efSmrg 			 e->src->index);
260610d565efSmrg 		  err = 1;
260710d565efSmrg 		}
260810d565efSmrg 	      if (e->flags & EDGE_EH)
260910d565efSmrg 		{
261010d565efSmrg 		  error ("EH edge crosses section boundary in bb %i",
261110d565efSmrg 			 e->src->index);
261210d565efSmrg 		  err = 1;
261310d565efSmrg 		}
261410d565efSmrg               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
261510d565efSmrg 		{
261610d565efSmrg 		  error ("No region crossing jump at section boundary in bb %i",
261710d565efSmrg 			 bb->index);
261810d565efSmrg 		  err = 1;
261910d565efSmrg 		}
262010d565efSmrg 	    }
262110d565efSmrg 	  else if (is_crossing)
262210d565efSmrg 	    {
262310d565efSmrg 	      error ("EDGE_CROSSING missing across section boundary");
262410d565efSmrg 	      err = 1;
262510d565efSmrg 	    }
262610d565efSmrg 
262710d565efSmrg 	  if ((e->flags & ~(EDGE_DFS_BACK
262810d565efSmrg 			    | EDGE_CAN_FALLTHRU
262910d565efSmrg 			    | EDGE_IRREDUCIBLE_LOOP
263010d565efSmrg 			    | EDGE_LOOP_EXIT
263110d565efSmrg 			    | EDGE_CROSSING
263210d565efSmrg 			    | EDGE_PRESERVE)) == 0)
263310d565efSmrg 	    n_branch++;
263410d565efSmrg 
263510d565efSmrg 	  if (e->flags & EDGE_ABNORMAL_CALL)
263610d565efSmrg 	    n_abnormal_call++;
263710d565efSmrg 
263810d565efSmrg 	  if (e->flags & EDGE_SIBCALL)
263910d565efSmrg 	    n_sibcall++;
264010d565efSmrg 
264110d565efSmrg 	  if (e->flags & EDGE_EH)
264210d565efSmrg 	    n_eh++;
264310d565efSmrg 
264410d565efSmrg 	  if (e->flags & EDGE_ABNORMAL)
264510d565efSmrg 	    n_abnormal++;
264610d565efSmrg 	}
264710d565efSmrg 
264810d565efSmrg       if (!has_crossing_edge
264910d565efSmrg 	  && JUMP_P (BB_END (bb))
265010d565efSmrg 	  && CROSSING_JUMP_P (BB_END (bb)))
265110d565efSmrg 	{
2652c7a68eb7Smrg 	  print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
265310d565efSmrg 	  error ("Region crossing jump across same section in bb %i",
265410d565efSmrg 		 bb->index);
265510d565efSmrg 	  err = 1;
265610d565efSmrg 	}
265710d565efSmrg 
265810d565efSmrg       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
265910d565efSmrg 	{
266010d565efSmrg 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
266110d565efSmrg 	  err = 1;
266210d565efSmrg 	}
266310d565efSmrg       if (n_eh > 1)
266410d565efSmrg 	{
266510d565efSmrg 	  error ("too many exception handling edges in bb %i", bb->index);
266610d565efSmrg 	  err = 1;
266710d565efSmrg 	}
266810d565efSmrg       if (n_branch
266910d565efSmrg 	  && (!JUMP_P (BB_END (bb))
267010d565efSmrg 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
267110d565efSmrg 				   || any_condjump_p (BB_END (bb))))))
267210d565efSmrg 	{
267310d565efSmrg 	  error ("too many outgoing branch edges from bb %i", bb->index);
267410d565efSmrg 	  err = 1;
267510d565efSmrg 	}
267610d565efSmrg       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
267710d565efSmrg 	{
267810d565efSmrg 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
267910d565efSmrg 	  err = 1;
268010d565efSmrg 	}
268110d565efSmrg       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
268210d565efSmrg 	{
268310d565efSmrg 	  error ("wrong number of branch edges after unconditional jump"
268410d565efSmrg 		 " in bb %i", bb->index);
268510d565efSmrg 	  err = 1;
268610d565efSmrg 	}
268710d565efSmrg       if (n_branch != 1 && any_condjump_p (BB_END (bb))
268810d565efSmrg 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
268910d565efSmrg 	{
269010d565efSmrg 	  error ("wrong amount of branch edges after conditional jump"
269110d565efSmrg 		 " in bb %i", bb->index);
269210d565efSmrg 	  err = 1;
269310d565efSmrg 	}
269410d565efSmrg       if (n_abnormal_call && !CALL_P (BB_END (bb)))
269510d565efSmrg 	{
269610d565efSmrg 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
269710d565efSmrg 	  err = 1;
269810d565efSmrg 	}
269910d565efSmrg       if (n_sibcall && !CALL_P (BB_END (bb)))
270010d565efSmrg 	{
270110d565efSmrg 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
270210d565efSmrg 	  err = 1;
270310d565efSmrg 	}
270410d565efSmrg       if (n_abnormal > n_eh
270510d565efSmrg 	  && !(CALL_P (BB_END (bb))
270610d565efSmrg 	       && n_abnormal == n_abnormal_call + n_sibcall)
270710d565efSmrg 	  && (!JUMP_P (BB_END (bb))
270810d565efSmrg 	      || any_condjump_p (BB_END (bb))
270910d565efSmrg 	      || any_uncondjump_p (BB_END (bb))))
271010d565efSmrg 	{
271110d565efSmrg 	  error ("abnormal edges for no purpose in bb %i", bb->index);
271210d565efSmrg 	  err = 1;
271310d565efSmrg 	}
27140fc04c29Smrg 
27150fc04c29Smrg       int has_eh = -1;
27160fc04c29Smrg       FOR_EACH_EDGE (e, ei, bb->preds)
27170fc04c29Smrg 	{
27180fc04c29Smrg 	  if (has_eh == -1)
27190fc04c29Smrg 	    has_eh = (e->flags & EDGE_EH);
27200fc04c29Smrg 	  if ((e->flags & EDGE_EH) == has_eh)
27210fc04c29Smrg 	    continue;
27220fc04c29Smrg 	  error ("EH incoming edge mixed with non-EH incoming edges "
27230fc04c29Smrg 		 "in bb %i", bb->index);
27240fc04c29Smrg 	  err = 1;
27250fc04c29Smrg 	  break;
27260fc04c29Smrg 	}
272710d565efSmrg     }
272810d565efSmrg 
272910d565efSmrg   /* If there are partitions, do a sanity check on them: A basic block in
273010d565efSmrg      a cold partition cannot dominate a basic block in a hot partition.  */
2731c7a68eb7Smrg   if (crtl->has_bb_partition && !err
2732c7a68eb7Smrg       && current_ir_type () == IR_RTL_CFGLAYOUT)
273310d565efSmrg     {
273410d565efSmrg       vec<basic_block> bbs_to_fix = find_partition_fixes (true);
273510d565efSmrg       err = !bbs_to_fix.is_empty ();
273610d565efSmrg     }
273710d565efSmrg 
273810d565efSmrg   /* Clean up.  */
273910d565efSmrg   return err;
274010d565efSmrg }
274110d565efSmrg 
274210d565efSmrg /* Checks on the instructions within blocks. Currently checks that each
274310d565efSmrg    block starts with a basic block note, and that basic block notes and
274410d565efSmrg    control flow jumps are not found in the middle of the block.  */
274510d565efSmrg 
274610d565efSmrg static int
rtl_verify_bb_insns(void)274710d565efSmrg rtl_verify_bb_insns (void)
274810d565efSmrg {
274910d565efSmrg   rtx_insn *x;
275010d565efSmrg   int err = 0;
275110d565efSmrg   basic_block bb;
275210d565efSmrg 
275310d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
275410d565efSmrg     {
275510d565efSmrg       /* Now check the header of basic
275610d565efSmrg 	 block.  It ought to contain optional CODE_LABEL followed
275710d565efSmrg 	 by NOTE_BASIC_BLOCK.  */
275810d565efSmrg       x = BB_HEAD (bb);
275910d565efSmrg       if (LABEL_P (x))
276010d565efSmrg 	{
276110d565efSmrg 	  if (BB_END (bb) == x)
276210d565efSmrg 	    {
276310d565efSmrg 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
276410d565efSmrg 		     bb->index);
276510d565efSmrg 	      err = 1;
276610d565efSmrg 	    }
276710d565efSmrg 
276810d565efSmrg 	  x = NEXT_INSN (x);
276910d565efSmrg 	}
277010d565efSmrg 
277110d565efSmrg       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
277210d565efSmrg 	{
277310d565efSmrg 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
277410d565efSmrg 		 bb->index);
277510d565efSmrg 	  err = 1;
277610d565efSmrg 	}
277710d565efSmrg 
277810d565efSmrg       if (BB_END (bb) == x)
277910d565efSmrg 	/* Do checks for empty blocks here.  */
278010d565efSmrg 	;
278110d565efSmrg       else
278210d565efSmrg 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
278310d565efSmrg 	  {
278410d565efSmrg 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
278510d565efSmrg 	      {
278610d565efSmrg 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
278710d565efSmrg 		       INSN_UID (x), bb->index);
278810d565efSmrg 		err = 1;
278910d565efSmrg 	      }
279010d565efSmrg 
279110d565efSmrg 	    if (x == BB_END (bb))
279210d565efSmrg 	      break;
279310d565efSmrg 
279410d565efSmrg 	    if (control_flow_insn_p (x))
279510d565efSmrg 	      {
279610d565efSmrg 		error ("in basic block %d:", bb->index);
279710d565efSmrg 		fatal_insn ("flow control insn inside a basic block", x);
279810d565efSmrg 	      }
279910d565efSmrg 	  }
280010d565efSmrg     }
280110d565efSmrg 
280210d565efSmrg   /* Clean up.  */
280310d565efSmrg   return err;
280410d565efSmrg }
280510d565efSmrg 
280610d565efSmrg /* Verify that block pointers for instructions in basic blocks, headers and
280710d565efSmrg    footers are set appropriately.  */
280810d565efSmrg 
280910d565efSmrg static int
rtl_verify_bb_pointers(void)281010d565efSmrg rtl_verify_bb_pointers (void)
281110d565efSmrg {
281210d565efSmrg   int err = 0;
281310d565efSmrg   basic_block bb;
281410d565efSmrg 
281510d565efSmrg   /* Check the general integrity of the basic blocks.  */
281610d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
281710d565efSmrg     {
281810d565efSmrg       rtx_insn *insn;
281910d565efSmrg 
282010d565efSmrg       if (!(bb->flags & BB_RTL))
282110d565efSmrg 	{
282210d565efSmrg 	  error ("BB_RTL flag not set for block %d", bb->index);
282310d565efSmrg 	  err = 1;
282410d565efSmrg 	}
282510d565efSmrg 
282610d565efSmrg       FOR_BB_INSNS (bb, insn)
282710d565efSmrg 	if (BLOCK_FOR_INSN (insn) != bb)
282810d565efSmrg 	  {
282910d565efSmrg 	    error ("insn %d basic block pointer is %d, should be %d",
283010d565efSmrg 		   INSN_UID (insn),
283110d565efSmrg 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
283210d565efSmrg 		   bb->index);
283310d565efSmrg 	    err = 1;
283410d565efSmrg 	  }
283510d565efSmrg 
283610d565efSmrg       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
283710d565efSmrg 	if (!BARRIER_P (insn)
283810d565efSmrg 	    && BLOCK_FOR_INSN (insn) != NULL)
283910d565efSmrg 	  {
284010d565efSmrg 	    error ("insn %d in header of bb %d has non-NULL basic block",
284110d565efSmrg 		   INSN_UID (insn), bb->index);
284210d565efSmrg 	    err = 1;
284310d565efSmrg 	  }
284410d565efSmrg       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
284510d565efSmrg 	if (!BARRIER_P (insn)
284610d565efSmrg 	    && BLOCK_FOR_INSN (insn) != NULL)
284710d565efSmrg 	  {
284810d565efSmrg 	    error ("insn %d in footer of bb %d has non-NULL basic block",
284910d565efSmrg 		   INSN_UID (insn), bb->index);
285010d565efSmrg 	    err = 1;
285110d565efSmrg 	  }
285210d565efSmrg     }
285310d565efSmrg 
285410d565efSmrg   /* Clean up.  */
285510d565efSmrg   return err;
285610d565efSmrg }
285710d565efSmrg 
285810d565efSmrg /* Verify the CFG and RTL consistency common for both underlying RTL and
285910d565efSmrg    cfglayout RTL.
286010d565efSmrg 
286110d565efSmrg    Currently it does following checks:
286210d565efSmrg 
286310d565efSmrg    - overlapping of basic blocks
286410d565efSmrg    - insns with wrong BLOCK_FOR_INSN pointers
286510d565efSmrg    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
286610d565efSmrg    - tails of basic blocks (ensure that boundary is necessary)
286710d565efSmrg    - scans body of the basic block for JUMP_INSN, CODE_LABEL
286810d565efSmrg      and NOTE_INSN_BASIC_BLOCK
286910d565efSmrg    - verify that no fall_thru edge crosses hot/cold partition boundaries
287010d565efSmrg    - verify that there are no pending RTL branch predictions
287110d565efSmrg    - verify that hot blocks are not dominated by cold blocks
287210d565efSmrg 
287310d565efSmrg    In future it can be extended check a lot of other stuff as well
287410d565efSmrg    (reachability of basic blocks, life information, etc. etc.).  */
287510d565efSmrg 
287610d565efSmrg static int
rtl_verify_flow_info_1(void)287710d565efSmrg rtl_verify_flow_info_1 (void)
287810d565efSmrg {
287910d565efSmrg   int err = 0;
288010d565efSmrg 
288110d565efSmrg   err |= rtl_verify_bb_pointers ();
288210d565efSmrg 
288310d565efSmrg   err |= rtl_verify_bb_insns ();
288410d565efSmrg 
288510d565efSmrg   err |= rtl_verify_edges ();
288610d565efSmrg 
288710d565efSmrg   return err;
288810d565efSmrg }
288910d565efSmrg 
289010d565efSmrg /* Walk the instruction chain and verify that bb head/end pointers
289110d565efSmrg   are correct, and that instructions are in exactly one bb and have
289210d565efSmrg   correct block pointers.  */
289310d565efSmrg 
289410d565efSmrg static int
rtl_verify_bb_insn_chain(void)289510d565efSmrg rtl_verify_bb_insn_chain (void)
289610d565efSmrg {
289710d565efSmrg   basic_block bb;
289810d565efSmrg   int err = 0;
289910d565efSmrg   rtx_insn *x;
290010d565efSmrg   rtx_insn *last_head = get_last_insn ();
290110d565efSmrg   basic_block *bb_info;
290210d565efSmrg   const int max_uid = get_max_uid ();
290310d565efSmrg 
290410d565efSmrg   bb_info = XCNEWVEC (basic_block, max_uid);
290510d565efSmrg 
290610d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
290710d565efSmrg     {
290810d565efSmrg       rtx_insn *head = BB_HEAD (bb);
290910d565efSmrg       rtx_insn *end = BB_END (bb);
291010d565efSmrg 
291110d565efSmrg       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
291210d565efSmrg 	{
291310d565efSmrg 	  /* Verify the end of the basic block is in the INSN chain.  */
291410d565efSmrg 	  if (x == end)
291510d565efSmrg 	    break;
291610d565efSmrg 
291710d565efSmrg             /* And that the code outside of basic blocks has NULL bb field.  */
291810d565efSmrg           if (!BARRIER_P (x)
291910d565efSmrg               && BLOCK_FOR_INSN (x) != NULL)
292010d565efSmrg             {
292110d565efSmrg               error ("insn %d outside of basic blocks has non-NULL bb field",
292210d565efSmrg                      INSN_UID (x));
292310d565efSmrg               err = 1;
292410d565efSmrg             }
292510d565efSmrg 	}
292610d565efSmrg 
292710d565efSmrg       if (!x)
292810d565efSmrg 	{
292910d565efSmrg 	  error ("end insn %d for block %d not found in the insn stream",
293010d565efSmrg 		 INSN_UID (end), bb->index);
293110d565efSmrg 	  err = 1;
293210d565efSmrg 	}
293310d565efSmrg 
293410d565efSmrg       /* Work backwards from the end to the head of the basic block
293510d565efSmrg 	 to verify the head is in the RTL chain.  */
293610d565efSmrg       for (; x != NULL_RTX; x = PREV_INSN (x))
293710d565efSmrg 	{
293810d565efSmrg 	  /* While walking over the insn chain, verify insns appear
293910d565efSmrg 	     in only one basic block.  */
294010d565efSmrg 	  if (bb_info[INSN_UID (x)] != NULL)
294110d565efSmrg 	    {
294210d565efSmrg 	      error ("insn %d is in multiple basic blocks (%d and %d)",
294310d565efSmrg 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
294410d565efSmrg 	      err = 1;
294510d565efSmrg 	    }
294610d565efSmrg 
294710d565efSmrg 	  bb_info[INSN_UID (x)] = bb;
294810d565efSmrg 
294910d565efSmrg 	  if (x == head)
295010d565efSmrg 	    break;
295110d565efSmrg 	}
295210d565efSmrg       if (!x)
295310d565efSmrg 	{
295410d565efSmrg 	  error ("head insn %d for block %d not found in the insn stream",
295510d565efSmrg 		 INSN_UID (head), bb->index);
295610d565efSmrg 	  err = 1;
295710d565efSmrg 	}
295810d565efSmrg 
295910d565efSmrg       last_head = PREV_INSN (x);
296010d565efSmrg     }
296110d565efSmrg 
296210d565efSmrg   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
296310d565efSmrg     {
296410d565efSmrg       /* Check that the code before the first basic block has NULL
296510d565efSmrg 	 bb field.  */
296610d565efSmrg       if (!BARRIER_P (x)
296710d565efSmrg 	  && BLOCK_FOR_INSN (x) != NULL)
296810d565efSmrg 	{
296910d565efSmrg 	  error ("insn %d outside of basic blocks has non-NULL bb field",
297010d565efSmrg 		 INSN_UID (x));
297110d565efSmrg 	  err = 1;
297210d565efSmrg 	}
297310d565efSmrg     }
297410d565efSmrg   free (bb_info);
297510d565efSmrg 
297610d565efSmrg   return err;
297710d565efSmrg }
297810d565efSmrg 
297910d565efSmrg /* Verify that fallthru edges point to adjacent blocks in layout order and
298010d565efSmrg    that barriers exist after non-fallthru blocks.  */
298110d565efSmrg 
298210d565efSmrg static int
rtl_verify_fallthru(void)298310d565efSmrg rtl_verify_fallthru (void)
298410d565efSmrg {
298510d565efSmrg   basic_block bb;
298610d565efSmrg   int err = 0;
298710d565efSmrg 
298810d565efSmrg   FOR_EACH_BB_REVERSE_FN (bb, cfun)
298910d565efSmrg     {
299010d565efSmrg       edge e;
299110d565efSmrg 
299210d565efSmrg       e = find_fallthru_edge (bb->succs);
299310d565efSmrg       if (!e)
299410d565efSmrg 	{
299510d565efSmrg 	  rtx_insn *insn;
299610d565efSmrg 
299710d565efSmrg 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
299810d565efSmrg 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
299910d565efSmrg 	    {
300010d565efSmrg 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
300110d565efSmrg 		{
300210d565efSmrg 		  error ("missing barrier after block %i", bb->index);
300310d565efSmrg 		  err = 1;
300410d565efSmrg 		  break;
300510d565efSmrg 		}
300610d565efSmrg 	      if (BARRIER_P (insn))
300710d565efSmrg 		break;
300810d565efSmrg 	    }
300910d565efSmrg 	}
301010d565efSmrg       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
301110d565efSmrg 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
301210d565efSmrg 	{
301310d565efSmrg 	  rtx_insn *insn;
301410d565efSmrg 
301510d565efSmrg 	  if (e->src->next_bb != e->dest)
301610d565efSmrg 	    {
301710d565efSmrg 	      error
301810d565efSmrg 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
301910d565efSmrg 		 e->src->index, e->dest->index);
302010d565efSmrg 	      err = 1;
302110d565efSmrg 	    }
302210d565efSmrg 	  else
302310d565efSmrg 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
302410d565efSmrg 		 insn = NEXT_INSN (insn))
3025c7a68eb7Smrg 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
302610d565efSmrg 		{
302710d565efSmrg 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
302810d565efSmrg 			 e->src->index, e->dest->index);
302910d565efSmrg 		  fatal_insn ("wrong insn in the fallthru edge", insn);
303010d565efSmrg 		  err = 1;
303110d565efSmrg 		}
303210d565efSmrg 	}
303310d565efSmrg     }
303410d565efSmrg 
303510d565efSmrg    return err;
303610d565efSmrg }
303710d565efSmrg 
303810d565efSmrg /* Verify that blocks are laid out in consecutive order. While walking the
303910d565efSmrg    instructions, verify that all expected instructions are inside the basic
304010d565efSmrg    blocks, and that all returns are followed by barriers.  */
304110d565efSmrg 
304210d565efSmrg static int
rtl_verify_bb_layout(void)304310d565efSmrg rtl_verify_bb_layout (void)
304410d565efSmrg {
304510d565efSmrg   basic_block bb;
304610d565efSmrg   int err = 0;
3047c7a68eb7Smrg   rtx_insn *x, *y;
304810d565efSmrg   int num_bb_notes;
304910d565efSmrg   rtx_insn * const rtx_first = get_insns ();
305010d565efSmrg   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
305110d565efSmrg 
305210d565efSmrg   num_bb_notes = 0;
305310d565efSmrg 
305410d565efSmrg   for (x = rtx_first; x; x = NEXT_INSN (x))
305510d565efSmrg     {
305610d565efSmrg       if (NOTE_INSN_BASIC_BLOCK_P (x))
305710d565efSmrg 	{
305810d565efSmrg 	  bb = NOTE_BASIC_BLOCK (x);
305910d565efSmrg 
306010d565efSmrg 	  num_bb_notes++;
306110d565efSmrg 	  if (bb != last_bb_seen->next_bb)
306210d565efSmrg 	    internal_error ("basic blocks not laid down consecutively");
306310d565efSmrg 
306410d565efSmrg 	  curr_bb = last_bb_seen = bb;
306510d565efSmrg 	}
306610d565efSmrg 
306710d565efSmrg       if (!curr_bb)
306810d565efSmrg 	{
306910d565efSmrg 	  switch (GET_CODE (x))
307010d565efSmrg 	    {
307110d565efSmrg 	    case BARRIER:
307210d565efSmrg 	    case NOTE:
307310d565efSmrg 	      break;
307410d565efSmrg 
307510d565efSmrg 	    case CODE_LABEL:
307610d565efSmrg 	      /* An ADDR_VEC is placed outside any basic block.  */
307710d565efSmrg 	      if (NEXT_INSN (x)
307810d565efSmrg 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
307910d565efSmrg 		x = NEXT_INSN (x);
308010d565efSmrg 
308110d565efSmrg 	      /* But in any case, non-deletable labels can appear anywhere.  */
308210d565efSmrg 	      break;
308310d565efSmrg 
308410d565efSmrg 	    default:
308510d565efSmrg 	      fatal_insn ("insn outside basic block", x);
308610d565efSmrg 	    }
308710d565efSmrg 	}
308810d565efSmrg 
308910d565efSmrg       if (JUMP_P (x)
309010d565efSmrg 	  && returnjump_p (x) && ! condjump_p (x)
3091c7a68eb7Smrg 	  && ! ((y = next_nonnote_nondebug_insn (x))
3092c7a68eb7Smrg 		&& BARRIER_P (y)))
309310d565efSmrg 	    fatal_insn ("return not followed by barrier", x);
309410d565efSmrg 
309510d565efSmrg       if (curr_bb && x == BB_END (curr_bb))
309610d565efSmrg 	curr_bb = NULL;
309710d565efSmrg     }
309810d565efSmrg 
309910d565efSmrg   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
310010d565efSmrg     internal_error
310110d565efSmrg       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
310210d565efSmrg        num_bb_notes, n_basic_blocks_for_fn (cfun));
310310d565efSmrg 
310410d565efSmrg    return err;
310510d565efSmrg }
310610d565efSmrg 
310710d565efSmrg /* Verify the CFG and RTL consistency common for both underlying RTL and
310810d565efSmrg    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
310910d565efSmrg 
311010d565efSmrg    Currently it does following checks:
311110d565efSmrg    - all checks of rtl_verify_flow_info_1
311210d565efSmrg    - test head/end pointers
311310d565efSmrg    - check that blocks are laid out in consecutive order
311410d565efSmrg    - check that all insns are in the basic blocks
311510d565efSmrg      (except the switch handling code, barriers and notes)
311610d565efSmrg    - check that all returns are followed by barriers
311710d565efSmrg    - check that all fallthru edge points to the adjacent blocks
311810d565efSmrg    - verify that there is a single hot/cold partition boundary after bbro  */
311910d565efSmrg 
312010d565efSmrg static int
rtl_verify_flow_info(void)312110d565efSmrg rtl_verify_flow_info (void)
312210d565efSmrg {
312310d565efSmrg   int err = 0;
312410d565efSmrg 
312510d565efSmrg   err |= rtl_verify_flow_info_1 ();
312610d565efSmrg 
312710d565efSmrg   err |= rtl_verify_bb_insn_chain ();
312810d565efSmrg 
312910d565efSmrg   err |= rtl_verify_fallthru ();
313010d565efSmrg 
313110d565efSmrg   err |= rtl_verify_bb_layout ();
313210d565efSmrg 
313310d565efSmrg   err |= verify_hot_cold_block_grouping ();
313410d565efSmrg 
313510d565efSmrg   return err;
313610d565efSmrg }
313710d565efSmrg 
313810d565efSmrg /* Assume that the preceding pass has possibly eliminated jump instructions
313910d565efSmrg    or converted the unconditional jumps.  Eliminate the edges from CFG.
314010d565efSmrg    Return true if any edges are eliminated.  */
314110d565efSmrg 
314210d565efSmrg bool
purge_dead_edges(basic_block bb)314310d565efSmrg purge_dead_edges (basic_block bb)
314410d565efSmrg {
314510d565efSmrg   edge e;
314610d565efSmrg   rtx_insn *insn = BB_END (bb);
314710d565efSmrg   rtx note;
314810d565efSmrg   bool purged = false;
314910d565efSmrg   bool found;
315010d565efSmrg   edge_iterator ei;
315110d565efSmrg 
3152*ec02198aSmrg   if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
315310d565efSmrg     do
315410d565efSmrg       insn = PREV_INSN (insn);
315510d565efSmrg     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
315610d565efSmrg 
315710d565efSmrg   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
315810d565efSmrg   if (NONJUMP_INSN_P (insn)
315910d565efSmrg       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
316010d565efSmrg     {
316110d565efSmrg       rtx eqnote;
316210d565efSmrg 
316310d565efSmrg       if (! may_trap_p (PATTERN (insn))
316410d565efSmrg 	  || ((eqnote = find_reg_equal_equiv_note (insn))
316510d565efSmrg 	      && ! may_trap_p (XEXP (eqnote, 0))))
316610d565efSmrg 	remove_note (insn, note);
316710d565efSmrg     }
316810d565efSmrg 
316910d565efSmrg   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
317010d565efSmrg   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
317110d565efSmrg     {
317210d565efSmrg       bool remove = false;
317310d565efSmrg 
317410d565efSmrg       /* There are three types of edges we need to handle correctly here: EH
317510d565efSmrg 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
317610d565efSmrg 	 latter can appear when nonlocal gotos are used.  */
317710d565efSmrg       if (e->flags & EDGE_ABNORMAL_CALL)
317810d565efSmrg 	{
317910d565efSmrg 	  if (!CALL_P (insn))
318010d565efSmrg 	    remove = true;
318110d565efSmrg 	  else if (can_nonlocal_goto (insn))
318210d565efSmrg 	    ;
318310d565efSmrg 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
318410d565efSmrg 	    ;
318510d565efSmrg 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
318610d565efSmrg 	    ;
318710d565efSmrg 	  else
318810d565efSmrg 	    remove = true;
318910d565efSmrg 	}
319010d565efSmrg       else if (e->flags & EDGE_EH)
319110d565efSmrg 	remove = !can_throw_internal (insn);
319210d565efSmrg 
319310d565efSmrg       if (remove)
319410d565efSmrg 	{
319510d565efSmrg 	  remove_edge (e);
319610d565efSmrg 	  df_set_bb_dirty (bb);
319710d565efSmrg 	  purged = true;
319810d565efSmrg 	}
319910d565efSmrg       else
320010d565efSmrg 	ei_next (&ei);
320110d565efSmrg     }
320210d565efSmrg 
320310d565efSmrg   if (JUMP_P (insn))
320410d565efSmrg     {
320510d565efSmrg       rtx note;
320610d565efSmrg       edge b,f;
320710d565efSmrg       edge_iterator ei;
320810d565efSmrg 
320910d565efSmrg       /* We do care only about conditional jumps and simplejumps.  */
321010d565efSmrg       if (!any_condjump_p (insn)
321110d565efSmrg 	  && !returnjump_p (insn)
321210d565efSmrg 	  && !simplejump_p (insn))
321310d565efSmrg 	return purged;
321410d565efSmrg 
321510d565efSmrg       /* Branch probability/prediction notes are defined only for
321610d565efSmrg 	 condjumps.  We've possibly turned condjump into simplejump.  */
321710d565efSmrg       if (simplejump_p (insn))
321810d565efSmrg 	{
321910d565efSmrg 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
322010d565efSmrg 	  if (note)
322110d565efSmrg 	    remove_note (insn, note);
322210d565efSmrg 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
322310d565efSmrg 	    remove_note (insn, note);
322410d565efSmrg 	}
322510d565efSmrg 
322610d565efSmrg       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
322710d565efSmrg 	{
322810d565efSmrg 	  /* Avoid abnormal flags to leak from computed jumps turned
322910d565efSmrg 	     into simplejumps.  */
323010d565efSmrg 
323110d565efSmrg 	  e->flags &= ~EDGE_ABNORMAL;
323210d565efSmrg 
323310d565efSmrg 	  /* See if this edge is one we should keep.  */
323410d565efSmrg 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
323510d565efSmrg 	    /* A conditional jump can fall through into the next
323610d565efSmrg 	       block, so we should keep the edge.  */
323710d565efSmrg 	    {
323810d565efSmrg 	      ei_next (&ei);
323910d565efSmrg 	      continue;
324010d565efSmrg 	    }
324110d565efSmrg 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
324210d565efSmrg 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
324310d565efSmrg 	    /* If the destination block is the target of the jump,
324410d565efSmrg 	       keep the edge.  */
324510d565efSmrg 	    {
324610d565efSmrg 	      ei_next (&ei);
324710d565efSmrg 	      continue;
324810d565efSmrg 	    }
324910d565efSmrg 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
325010d565efSmrg 		   && returnjump_p (insn))
325110d565efSmrg 	    /* If the destination block is the exit block, and this
325210d565efSmrg 	       instruction is a return, then keep the edge.  */
325310d565efSmrg 	    {
325410d565efSmrg 	      ei_next (&ei);
325510d565efSmrg 	      continue;
325610d565efSmrg 	    }
325710d565efSmrg 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
325810d565efSmrg 	    /* Keep the edges that correspond to exceptions thrown by
325910d565efSmrg 	       this instruction and rematerialize the EDGE_ABNORMAL
326010d565efSmrg 	       flag we just cleared above.  */
326110d565efSmrg 	    {
326210d565efSmrg 	      e->flags |= EDGE_ABNORMAL;
326310d565efSmrg 	      ei_next (&ei);
326410d565efSmrg 	      continue;
326510d565efSmrg 	    }
326610d565efSmrg 
326710d565efSmrg 	  /* We do not need this edge.  */
326810d565efSmrg 	  df_set_bb_dirty (bb);
326910d565efSmrg 	  purged = true;
327010d565efSmrg 	  remove_edge (e);
327110d565efSmrg 	}
327210d565efSmrg 
327310d565efSmrg       if (EDGE_COUNT (bb->succs) == 0 || !purged)
327410d565efSmrg 	return purged;
327510d565efSmrg 
327610d565efSmrg       if (dump_file)
327710d565efSmrg 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
327810d565efSmrg 
327910d565efSmrg       if (!optimize)
328010d565efSmrg 	return purged;
328110d565efSmrg 
328210d565efSmrg       /* Redistribute probabilities.  */
328310d565efSmrg       if (single_succ_p (bb))
328410d565efSmrg 	{
3285c7a68eb7Smrg 	  single_succ_edge (bb)->probability = profile_probability::always ();
328610d565efSmrg 	}
328710d565efSmrg       else
328810d565efSmrg 	{
328910d565efSmrg 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
329010d565efSmrg 	  if (!note)
329110d565efSmrg 	    return purged;
329210d565efSmrg 
329310d565efSmrg 	  b = BRANCH_EDGE (bb);
329410d565efSmrg 	  f = FALLTHRU_EDGE (bb);
3295c7a68eb7Smrg 	  b->probability = profile_probability::from_reg_br_prob_note
3296c7a68eb7Smrg 					 (XINT (note, 0));
3297c7a68eb7Smrg 	  f->probability = b->probability.invert ();
329810d565efSmrg 	}
329910d565efSmrg 
330010d565efSmrg       return purged;
330110d565efSmrg     }
330210d565efSmrg   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
330310d565efSmrg     {
330410d565efSmrg       /* First, there should not be any EH or ABCALL edges resulting
330510d565efSmrg 	 from non-local gotos and the like.  If there were, we shouldn't
330610d565efSmrg 	 have created the sibcall in the first place.  Second, there
330710d565efSmrg 	 should of course never have been a fallthru edge.  */
330810d565efSmrg       gcc_assert (single_succ_p (bb));
330910d565efSmrg       gcc_assert (single_succ_edge (bb)->flags
331010d565efSmrg 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
331110d565efSmrg 
331210d565efSmrg       return 0;
331310d565efSmrg     }
331410d565efSmrg 
331510d565efSmrg   /* If we don't see a jump insn, we don't know exactly why the block would
331610d565efSmrg      have been broken at this point.  Look for a simple, non-fallthru edge,
331710d565efSmrg      as these are only created by conditional branches.  If we find such an
331810d565efSmrg      edge we know that there used to be a jump here and can then safely
331910d565efSmrg      remove all non-fallthru edges.  */
332010d565efSmrg   found = false;
332110d565efSmrg   FOR_EACH_EDGE (e, ei, bb->succs)
332210d565efSmrg     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
332310d565efSmrg       {
332410d565efSmrg 	found = true;
332510d565efSmrg 	break;
332610d565efSmrg       }
332710d565efSmrg 
332810d565efSmrg   if (!found)
332910d565efSmrg     return purged;
333010d565efSmrg 
333110d565efSmrg   /* Remove all but the fake and fallthru edges.  The fake edge may be
333210d565efSmrg      the only successor for this block in the case of noreturn
333310d565efSmrg      calls.  */
333410d565efSmrg   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
333510d565efSmrg     {
333610d565efSmrg       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
333710d565efSmrg 	{
333810d565efSmrg 	  df_set_bb_dirty (bb);
333910d565efSmrg 	  remove_edge (e);
334010d565efSmrg 	  purged = true;
334110d565efSmrg 	}
334210d565efSmrg       else
334310d565efSmrg 	ei_next (&ei);
334410d565efSmrg     }
334510d565efSmrg 
334610d565efSmrg   gcc_assert (single_succ_p (bb));
334710d565efSmrg 
3348c7a68eb7Smrg   single_succ_edge (bb)->probability = profile_probability::always ();
334910d565efSmrg 
335010d565efSmrg   if (dump_file)
335110d565efSmrg     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
335210d565efSmrg 	     bb->index);
335310d565efSmrg   return purged;
335410d565efSmrg }
335510d565efSmrg 
335610d565efSmrg /* Search all basic blocks for potentially dead edges and purge them.  Return
335710d565efSmrg    true if some edge has been eliminated.  */
335810d565efSmrg 
335910d565efSmrg bool
purge_all_dead_edges(void)336010d565efSmrg purge_all_dead_edges (void)
336110d565efSmrg {
336210d565efSmrg   int purged = false;
336310d565efSmrg   basic_block bb;
336410d565efSmrg 
336510d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
336610d565efSmrg     {
336710d565efSmrg       bool purged_here = purge_dead_edges (bb);
336810d565efSmrg 
336910d565efSmrg       purged |= purged_here;
337010d565efSmrg     }
337110d565efSmrg 
337210d565efSmrg   return purged;
337310d565efSmrg }
337410d565efSmrg 
337510d565efSmrg /* This is used by a few passes that emit some instructions after abnormal
337610d565efSmrg    calls, moving the basic block's end, while they in fact do want to emit
337710d565efSmrg    them on the fallthru edge.  Look for abnormal call edges, find backward
337810d565efSmrg    the call in the block and insert the instructions on the edge instead.
337910d565efSmrg 
338010d565efSmrg    Similarly, handle instructions throwing exceptions internally.
338110d565efSmrg 
338210d565efSmrg    Return true when instructions have been found and inserted on edges.  */
338310d565efSmrg 
338410d565efSmrg bool
fixup_abnormal_edges(void)338510d565efSmrg fixup_abnormal_edges (void)
338610d565efSmrg {
338710d565efSmrg   bool inserted = false;
338810d565efSmrg   basic_block bb;
338910d565efSmrg 
339010d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
339110d565efSmrg     {
339210d565efSmrg       edge e;
339310d565efSmrg       edge_iterator ei;
339410d565efSmrg 
339510d565efSmrg       /* Look for cases we are interested in - calls or instructions causing
339610d565efSmrg          exceptions.  */
339710d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
339810d565efSmrg 	if ((e->flags & EDGE_ABNORMAL_CALL)
339910d565efSmrg 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
340010d565efSmrg 		== (EDGE_ABNORMAL | EDGE_EH)))
340110d565efSmrg 	  break;
340210d565efSmrg 
340310d565efSmrg       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
340410d565efSmrg 	{
340510d565efSmrg 	  rtx_insn *insn;
340610d565efSmrg 
340710d565efSmrg 	  /* Get past the new insns generated.  Allow notes, as the insns
340810d565efSmrg 	     may be already deleted.  */
340910d565efSmrg 	  insn = BB_END (bb);
341010d565efSmrg 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
341110d565efSmrg 		 && !can_throw_internal (insn)
341210d565efSmrg 		 && insn != BB_HEAD (bb))
341310d565efSmrg 	    insn = PREV_INSN (insn);
341410d565efSmrg 
341510d565efSmrg 	  if (CALL_P (insn) || can_throw_internal (insn))
341610d565efSmrg 	    {
341710d565efSmrg 	      rtx_insn *stop, *next;
341810d565efSmrg 
341910d565efSmrg 	      e = find_fallthru_edge (bb->succs);
342010d565efSmrg 
342110d565efSmrg 	      stop = NEXT_INSN (BB_END (bb));
342210d565efSmrg 	      BB_END (bb) = insn;
342310d565efSmrg 
342410d565efSmrg 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
342510d565efSmrg 		{
342610d565efSmrg 		  next = NEXT_INSN (insn);
342710d565efSmrg 		  if (INSN_P (insn))
342810d565efSmrg 		    {
342910d565efSmrg 		      delete_insn (insn);
343010d565efSmrg 
343110d565efSmrg 		      /* Sometimes there's still the return value USE.
343210d565efSmrg 			 If it's placed after a trapping call (i.e. that
343310d565efSmrg 			 call is the last insn anyway), we have no fallthru
343410d565efSmrg 			 edge.  Simply delete this use and don't try to insert
3435c7a68eb7Smrg 			 on the non-existent edge.
3436c7a68eb7Smrg 			 Similarly, sometimes a call that can throw is
3437c7a68eb7Smrg 			 followed in the source with __builtin_unreachable (),
3438c7a68eb7Smrg 			 meaning that there is UB if the call returns rather
3439c7a68eb7Smrg 			 than throws.  If there weren't any instructions
3440c7a68eb7Smrg 			 following such calls before, supposedly even the ones
3441c7a68eb7Smrg 			 we've deleted aren't significant and can be
3442c7a68eb7Smrg 			 removed.  */
3443c7a68eb7Smrg 		      if (e)
344410d565efSmrg 			{
344510d565efSmrg 			  /* We're not deleting it, we're moving it.  */
344610d565efSmrg 			  insn->set_undeleted ();
344710d565efSmrg 			  SET_PREV_INSN (insn) = NULL_RTX;
344810d565efSmrg 			  SET_NEXT_INSN (insn) = NULL_RTX;
344910d565efSmrg 
345010d565efSmrg 			  insert_insn_on_edge (insn, e);
345110d565efSmrg 			  inserted = true;
345210d565efSmrg 			}
345310d565efSmrg 		    }
345410d565efSmrg 		  else if (!BARRIER_P (insn))
345510d565efSmrg 		    set_block_for_insn (insn, NULL);
345610d565efSmrg 		}
345710d565efSmrg 	    }
345810d565efSmrg 
345910d565efSmrg 	  /* It may be that we don't find any trapping insn.  In this
346010d565efSmrg 	     case we discovered quite late that the insn that had been
346110d565efSmrg 	     marked as can_throw_internal in fact couldn't trap at all.
346210d565efSmrg 	     So we should in fact delete the EH edges out of the block.  */
346310d565efSmrg 	  else
346410d565efSmrg 	    purge_dead_edges (bb);
346510d565efSmrg 	}
346610d565efSmrg     }
346710d565efSmrg 
346810d565efSmrg   return inserted;
346910d565efSmrg }
347010d565efSmrg 
347110d565efSmrg /* Cut the insns from FIRST to LAST out of the insns stream.  */
347210d565efSmrg 
347310d565efSmrg rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)347410d565efSmrg unlink_insn_chain (rtx_insn *first, rtx_insn *last)
347510d565efSmrg {
347610d565efSmrg   rtx_insn *prevfirst = PREV_INSN (first);
347710d565efSmrg   rtx_insn *nextlast = NEXT_INSN (last);
347810d565efSmrg 
347910d565efSmrg   SET_PREV_INSN (first) = NULL;
348010d565efSmrg   SET_NEXT_INSN (last) = NULL;
348110d565efSmrg   if (prevfirst)
348210d565efSmrg     SET_NEXT_INSN (prevfirst) = nextlast;
348310d565efSmrg   if (nextlast)
348410d565efSmrg     SET_PREV_INSN (nextlast) = prevfirst;
348510d565efSmrg   else
348610d565efSmrg     set_last_insn (prevfirst);
348710d565efSmrg   if (!prevfirst)
348810d565efSmrg     set_first_insn (nextlast);
348910d565efSmrg   return first;
349010d565efSmrg }
349110d565efSmrg 
349210d565efSmrg /* Skip over inter-block insns occurring after BB which are typically
349310d565efSmrg    associated with BB (e.g., barriers). If there are any such insns,
349410d565efSmrg    we return the last one. Otherwise, we return the end of BB.  */
349510d565efSmrg 
349610d565efSmrg static rtx_insn *
skip_insns_after_block(basic_block bb)349710d565efSmrg skip_insns_after_block (basic_block bb)
349810d565efSmrg {
349910d565efSmrg   rtx_insn *insn, *last_insn, *next_head, *prev;
350010d565efSmrg 
350110d565efSmrg   next_head = NULL;
350210d565efSmrg   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
350310d565efSmrg     next_head = BB_HEAD (bb->next_bb);
350410d565efSmrg 
350510d565efSmrg   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
350610d565efSmrg     {
350710d565efSmrg       if (insn == next_head)
350810d565efSmrg 	break;
350910d565efSmrg 
351010d565efSmrg       switch (GET_CODE (insn))
351110d565efSmrg 	{
351210d565efSmrg 	case BARRIER:
351310d565efSmrg 	  last_insn = insn;
351410d565efSmrg 	  continue;
351510d565efSmrg 
351610d565efSmrg 	case NOTE:
351710d565efSmrg 	  switch (NOTE_KIND (insn))
351810d565efSmrg 	    {
351910d565efSmrg 	    case NOTE_INSN_BLOCK_END:
352010d565efSmrg 	      gcc_unreachable ();
352110d565efSmrg 	      continue;
352210d565efSmrg 	    default:
352310d565efSmrg 	      continue;
352410d565efSmrg 	      break;
352510d565efSmrg 	    }
352610d565efSmrg 	  break;
352710d565efSmrg 
352810d565efSmrg 	case CODE_LABEL:
352910d565efSmrg 	  if (NEXT_INSN (insn)
353010d565efSmrg 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
353110d565efSmrg 	    {
353210d565efSmrg 	      insn = NEXT_INSN (insn);
353310d565efSmrg 	      last_insn = insn;
353410d565efSmrg 	      continue;
353510d565efSmrg 	    }
353610d565efSmrg 	  break;
353710d565efSmrg 
353810d565efSmrg 	default:
353910d565efSmrg 	  break;
354010d565efSmrg 	}
354110d565efSmrg 
354210d565efSmrg       break;
354310d565efSmrg     }
354410d565efSmrg 
354510d565efSmrg   /* It is possible to hit contradictory sequence.  For instance:
354610d565efSmrg 
354710d565efSmrg      jump_insn
354810d565efSmrg      NOTE_INSN_BLOCK_BEG
354910d565efSmrg      barrier
355010d565efSmrg 
355110d565efSmrg      Where barrier belongs to jump_insn, but the note does not.  This can be
355210d565efSmrg      created by removing the basic block originally following
355310d565efSmrg      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
355410d565efSmrg 
355510d565efSmrg   for (insn = last_insn; insn != BB_END (bb); insn = prev)
355610d565efSmrg     {
355710d565efSmrg       prev = PREV_INSN (insn);
355810d565efSmrg       if (NOTE_P (insn))
355910d565efSmrg 	switch (NOTE_KIND (insn))
356010d565efSmrg 	  {
356110d565efSmrg 	  case NOTE_INSN_BLOCK_END:
356210d565efSmrg 	    gcc_unreachable ();
356310d565efSmrg 	    break;
356410d565efSmrg 	  case NOTE_INSN_DELETED:
356510d565efSmrg 	  case NOTE_INSN_DELETED_LABEL:
356610d565efSmrg 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
356710d565efSmrg 	    continue;
356810d565efSmrg 	  default:
356910d565efSmrg 	    reorder_insns (insn, insn, last_insn);
357010d565efSmrg 	  }
357110d565efSmrg     }
357210d565efSmrg 
357310d565efSmrg   return last_insn;
357410d565efSmrg }
357510d565efSmrg 
357610d565efSmrg /* Locate or create a label for a given basic block.  */
357710d565efSmrg 
357810d565efSmrg static rtx_insn *
label_for_bb(basic_block bb)357910d565efSmrg label_for_bb (basic_block bb)
358010d565efSmrg {
358110d565efSmrg   rtx_insn *label = BB_HEAD (bb);
358210d565efSmrg 
358310d565efSmrg   if (!LABEL_P (label))
358410d565efSmrg     {
358510d565efSmrg       if (dump_file)
358610d565efSmrg 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
358710d565efSmrg 
358810d565efSmrg       label = block_label (bb);
358910d565efSmrg     }
359010d565efSmrg 
359110d565efSmrg   return label;
359210d565efSmrg }
359310d565efSmrg 
359410d565efSmrg /* Locate the effective beginning and end of the insn chain for each
359510d565efSmrg    block, as defined by skip_insns_after_block above.  */
359610d565efSmrg 
359710d565efSmrg static void
record_effective_endpoints(void)359810d565efSmrg record_effective_endpoints (void)
359910d565efSmrg {
360010d565efSmrg   rtx_insn *next_insn;
360110d565efSmrg   basic_block bb;
360210d565efSmrg   rtx_insn *insn;
360310d565efSmrg 
360410d565efSmrg   for (insn = get_insns ();
360510d565efSmrg        insn
360610d565efSmrg        && NOTE_P (insn)
360710d565efSmrg        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
360810d565efSmrg        insn = NEXT_INSN (insn))
360910d565efSmrg     continue;
361010d565efSmrg   /* No basic blocks at all?  */
361110d565efSmrg   gcc_assert (insn);
361210d565efSmrg 
361310d565efSmrg   if (PREV_INSN (insn))
361410d565efSmrg     cfg_layout_function_header =
361510d565efSmrg 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
361610d565efSmrg   else
361710d565efSmrg     cfg_layout_function_header = NULL;
361810d565efSmrg 
361910d565efSmrg   next_insn = get_insns ();
362010d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
362110d565efSmrg     {
362210d565efSmrg       rtx_insn *end;
362310d565efSmrg 
362410d565efSmrg       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
362510d565efSmrg 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
362610d565efSmrg 						PREV_INSN (BB_HEAD (bb)));
362710d565efSmrg       end = skip_insns_after_block (bb);
362810d565efSmrg       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
362910d565efSmrg 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
363010d565efSmrg       next_insn = NEXT_INSN (BB_END (bb));
363110d565efSmrg     }
363210d565efSmrg 
363310d565efSmrg   cfg_layout_function_footer = next_insn;
363410d565efSmrg   if (cfg_layout_function_footer)
363510d565efSmrg     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
363610d565efSmrg }
363710d565efSmrg 
363810d565efSmrg namespace {
363910d565efSmrg 
364010d565efSmrg const pass_data pass_data_into_cfg_layout_mode =
364110d565efSmrg {
364210d565efSmrg   RTL_PASS, /* type */
364310d565efSmrg   "into_cfglayout", /* name */
364410d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
364510d565efSmrg   TV_CFG, /* tv_id */
364610d565efSmrg   0, /* properties_required */
364710d565efSmrg   PROP_cfglayout, /* properties_provided */
364810d565efSmrg   0, /* properties_destroyed */
364910d565efSmrg   0, /* todo_flags_start */
365010d565efSmrg   0, /* todo_flags_finish */
365110d565efSmrg };
365210d565efSmrg 
365310d565efSmrg class pass_into_cfg_layout_mode : public rtl_opt_pass
365410d565efSmrg {
365510d565efSmrg public:
pass_into_cfg_layout_mode(gcc::context * ctxt)365610d565efSmrg   pass_into_cfg_layout_mode (gcc::context *ctxt)
365710d565efSmrg     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
365810d565efSmrg   {}
365910d565efSmrg 
366010d565efSmrg   /* opt_pass methods: */
execute(function *)366110d565efSmrg   virtual unsigned int execute (function *)
366210d565efSmrg     {
366310d565efSmrg       cfg_layout_initialize (0);
366410d565efSmrg       return 0;
366510d565efSmrg     }
366610d565efSmrg 
366710d565efSmrg }; // class pass_into_cfg_layout_mode
366810d565efSmrg 
366910d565efSmrg } // anon namespace
367010d565efSmrg 
367110d565efSmrg rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)367210d565efSmrg make_pass_into_cfg_layout_mode (gcc::context *ctxt)
367310d565efSmrg {
367410d565efSmrg   return new pass_into_cfg_layout_mode (ctxt);
367510d565efSmrg }
367610d565efSmrg 
367710d565efSmrg namespace {
367810d565efSmrg 
367910d565efSmrg const pass_data pass_data_outof_cfg_layout_mode =
368010d565efSmrg {
368110d565efSmrg   RTL_PASS, /* type */
368210d565efSmrg   "outof_cfglayout", /* name */
368310d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
368410d565efSmrg   TV_CFG, /* tv_id */
368510d565efSmrg   0, /* properties_required */
368610d565efSmrg   0, /* properties_provided */
368710d565efSmrg   PROP_cfglayout, /* properties_destroyed */
368810d565efSmrg   0, /* todo_flags_start */
368910d565efSmrg   0, /* todo_flags_finish */
369010d565efSmrg };
369110d565efSmrg 
369210d565efSmrg class pass_outof_cfg_layout_mode : public rtl_opt_pass
369310d565efSmrg {
369410d565efSmrg public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)369510d565efSmrg   pass_outof_cfg_layout_mode (gcc::context *ctxt)
369610d565efSmrg     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
369710d565efSmrg   {}
369810d565efSmrg 
369910d565efSmrg   /* opt_pass methods: */
370010d565efSmrg   virtual unsigned int execute (function *);
370110d565efSmrg 
370210d565efSmrg }; // class pass_outof_cfg_layout_mode
370310d565efSmrg 
370410d565efSmrg unsigned int
execute(function * fun)370510d565efSmrg pass_outof_cfg_layout_mode::execute (function *fun)
370610d565efSmrg {
370710d565efSmrg   basic_block bb;
370810d565efSmrg 
370910d565efSmrg   FOR_EACH_BB_FN (bb, fun)
371010d565efSmrg     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
371110d565efSmrg       bb->aux = bb->next_bb;
371210d565efSmrg 
371310d565efSmrg   cfg_layout_finalize ();
371410d565efSmrg 
371510d565efSmrg   return 0;
371610d565efSmrg }
371710d565efSmrg 
371810d565efSmrg } // anon namespace
371910d565efSmrg 
372010d565efSmrg rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)372110d565efSmrg make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
372210d565efSmrg {
372310d565efSmrg   return new pass_outof_cfg_layout_mode (ctxt);
372410d565efSmrg }
372510d565efSmrg 
372610d565efSmrg 
372710d565efSmrg /* Link the basic blocks in the correct order, compacting the basic
372810d565efSmrg    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
372910d565efSmrg    function also clears the basic block header and footer fields.
373010d565efSmrg 
373110d565efSmrg    This function is usually called after a pass (e.g. tracer) finishes
373210d565efSmrg    some transformations while in cfglayout mode.  The required sequence
373310d565efSmrg    of the basic blocks is in a linked list along the bb->aux field.
373410d565efSmrg    This functions re-links the basic block prev_bb and next_bb pointers
373510d565efSmrg    accordingly, and it compacts and renumbers the blocks.
373610d565efSmrg 
373710d565efSmrg    FIXME: This currently works only for RTL, but the only RTL-specific
373810d565efSmrg    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
373910d565efSmrg    to GIMPLE a long time ago, but it doesn't relink the basic block
374010d565efSmrg    chain.  It could do that (to give better initial RTL) if this function
374110d565efSmrg    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
374210d565efSmrg 
374310d565efSmrg void
relink_block_chain(bool stay_in_cfglayout_mode)374410d565efSmrg relink_block_chain (bool stay_in_cfglayout_mode)
374510d565efSmrg {
374610d565efSmrg   basic_block bb, prev_bb;
374710d565efSmrg   int index;
374810d565efSmrg 
374910d565efSmrg   /* Maybe dump the re-ordered sequence.  */
375010d565efSmrg   if (dump_file)
375110d565efSmrg     {
375210d565efSmrg       fprintf (dump_file, "Reordered sequence:\n");
375310d565efSmrg       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
375410d565efSmrg 	   NUM_FIXED_BLOCKS;
375510d565efSmrg 	   bb;
375610d565efSmrg 	   bb = (basic_block) bb->aux, index++)
375710d565efSmrg 	{
375810d565efSmrg 	  fprintf (dump_file, " %i ", index);
375910d565efSmrg 	  if (get_bb_original (bb))
3760*ec02198aSmrg 	    fprintf (dump_file, "duplicate of %i\n",
376110d565efSmrg 		     get_bb_original (bb)->index);
376210d565efSmrg 	  else if (forwarder_block_p (bb)
376310d565efSmrg 		   && !LABEL_P (BB_HEAD (bb)))
3764*ec02198aSmrg 	    fprintf (dump_file, "compensation\n");
376510d565efSmrg 	  else
3766*ec02198aSmrg 	    fprintf (dump_file, "bb %i\n", bb->index);
376710d565efSmrg 	}
376810d565efSmrg     }
376910d565efSmrg 
377010d565efSmrg   /* Now reorder the blocks.  */
377110d565efSmrg   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
377210d565efSmrg   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
377310d565efSmrg   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
377410d565efSmrg     {
377510d565efSmrg       bb->prev_bb = prev_bb;
377610d565efSmrg       prev_bb->next_bb = bb;
377710d565efSmrg     }
377810d565efSmrg   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
377910d565efSmrg   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
378010d565efSmrg 
378110d565efSmrg   /* Then, clean up the aux fields.  */
378210d565efSmrg   FOR_ALL_BB_FN (bb, cfun)
378310d565efSmrg     {
378410d565efSmrg       bb->aux = NULL;
378510d565efSmrg       if (!stay_in_cfglayout_mode)
378610d565efSmrg 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
378710d565efSmrg     }
378810d565efSmrg 
378910d565efSmrg   /* Maybe reset the original copy tables, they are not valid anymore
379010d565efSmrg      when we renumber the basic blocks in compact_blocks.  If we are
379110d565efSmrg      are going out of cfglayout mode, don't re-allocate the tables.  */
379210d565efSmrg   if (original_copy_tables_initialized_p ())
379310d565efSmrg     free_original_copy_tables ();
379410d565efSmrg   if (stay_in_cfglayout_mode)
379510d565efSmrg     initialize_original_copy_tables ();
379610d565efSmrg 
379710d565efSmrg   /* Finally, put basic_block_info in the new order.  */
379810d565efSmrg   compact_blocks ();
379910d565efSmrg }
380010d565efSmrg 
380110d565efSmrg 
380210d565efSmrg /* Given a reorder chain, rearrange the code to match.  */
380310d565efSmrg 
380410d565efSmrg static void
fixup_reorder_chain(void)380510d565efSmrg fixup_reorder_chain (void)
380610d565efSmrg {
380710d565efSmrg   basic_block bb;
380810d565efSmrg   rtx_insn *insn = NULL;
380910d565efSmrg 
381010d565efSmrg   if (cfg_layout_function_header)
381110d565efSmrg     {
381210d565efSmrg       set_first_insn (cfg_layout_function_header);
381310d565efSmrg       insn = cfg_layout_function_header;
381410d565efSmrg       while (NEXT_INSN (insn))
381510d565efSmrg 	insn = NEXT_INSN (insn);
381610d565efSmrg     }
381710d565efSmrg 
381810d565efSmrg   /* First do the bulk reordering -- rechain the blocks without regard to
381910d565efSmrg      the needed changes to jumps and labels.  */
382010d565efSmrg 
382110d565efSmrg   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
382210d565efSmrg        bb->aux)
382310d565efSmrg     {
382410d565efSmrg       if (BB_HEADER (bb))
382510d565efSmrg 	{
382610d565efSmrg 	  if (insn)
382710d565efSmrg 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
382810d565efSmrg 	  else
382910d565efSmrg 	    set_first_insn (BB_HEADER (bb));
383010d565efSmrg 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
383110d565efSmrg 	  insn = BB_HEADER (bb);
383210d565efSmrg 	  while (NEXT_INSN (insn))
383310d565efSmrg 	    insn = NEXT_INSN (insn);
383410d565efSmrg 	}
383510d565efSmrg       if (insn)
383610d565efSmrg 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
383710d565efSmrg       else
383810d565efSmrg 	set_first_insn (BB_HEAD (bb));
383910d565efSmrg       SET_PREV_INSN (BB_HEAD (bb)) = insn;
384010d565efSmrg       insn = BB_END (bb);
384110d565efSmrg       if (BB_FOOTER (bb))
384210d565efSmrg 	{
384310d565efSmrg 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
384410d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
384510d565efSmrg 	  while (NEXT_INSN (insn))
384610d565efSmrg 	    insn = NEXT_INSN (insn);
384710d565efSmrg 	}
384810d565efSmrg     }
384910d565efSmrg 
385010d565efSmrg   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
385110d565efSmrg   if (cfg_layout_function_footer)
385210d565efSmrg     SET_PREV_INSN (cfg_layout_function_footer) = insn;
385310d565efSmrg 
385410d565efSmrg   while (NEXT_INSN (insn))
385510d565efSmrg     insn = NEXT_INSN (insn);
385610d565efSmrg 
385710d565efSmrg   set_last_insn (insn);
385810d565efSmrg   if (flag_checking)
385910d565efSmrg     verify_insn_chain ();
386010d565efSmrg 
386110d565efSmrg   /* Now add jumps and labels as needed to match the blocks new
386210d565efSmrg      outgoing edges.  */
386310d565efSmrg 
386410d565efSmrg   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
386510d565efSmrg        bb->aux)
386610d565efSmrg     {
386710d565efSmrg       edge e_fall, e_taken, e;
386810d565efSmrg       rtx_insn *bb_end_insn;
386910d565efSmrg       rtx ret_label = NULL_RTX;
387010d565efSmrg       basic_block nb;
387110d565efSmrg       edge_iterator ei;
387210d565efSmrg 
387310d565efSmrg       if (EDGE_COUNT (bb->succs) == 0)
387410d565efSmrg 	continue;
387510d565efSmrg 
387610d565efSmrg       /* Find the old fallthru edge, and another non-EH edge for
387710d565efSmrg 	 a taken jump.  */
387810d565efSmrg       e_taken = e_fall = NULL;
387910d565efSmrg 
388010d565efSmrg       FOR_EACH_EDGE (e, ei, bb->succs)
388110d565efSmrg 	if (e->flags & EDGE_FALLTHRU)
388210d565efSmrg 	  e_fall = e;
388310d565efSmrg 	else if (! (e->flags & EDGE_EH))
388410d565efSmrg 	  e_taken = e;
388510d565efSmrg 
388610d565efSmrg       bb_end_insn = BB_END (bb);
388710d565efSmrg       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
388810d565efSmrg 	{
388910d565efSmrg 	  ret_label = JUMP_LABEL (bb_end_jump);
389010d565efSmrg 	  if (any_condjump_p (bb_end_jump))
389110d565efSmrg 	    {
389210d565efSmrg 	      /* This might happen if the conditional jump has side
389310d565efSmrg 		 effects and could therefore not be optimized away.
389410d565efSmrg 		 Make the basic block to end with a barrier in order
389510d565efSmrg 		 to prevent rtl_verify_flow_info from complaining.  */
389610d565efSmrg 	      if (!e_fall)
389710d565efSmrg 		{
389810d565efSmrg 		  gcc_assert (!onlyjump_p (bb_end_jump)
389910d565efSmrg 			      || returnjump_p (bb_end_jump)
390010d565efSmrg                               || (e_taken->flags & EDGE_CROSSING));
390110d565efSmrg 		  emit_barrier_after (bb_end_jump);
390210d565efSmrg 		  continue;
390310d565efSmrg 		}
390410d565efSmrg 
390510d565efSmrg 	      /* If the old fallthru is still next, nothing to do.  */
390610d565efSmrg 	      if (bb->aux == e_fall->dest
390710d565efSmrg 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
390810d565efSmrg 		continue;
390910d565efSmrg 
391010d565efSmrg 	      /* The degenerated case of conditional jump jumping to the next
391110d565efSmrg 		 instruction can happen for jumps with side effects.  We need
391210d565efSmrg 		 to construct a forwarder block and this will be done just
391310d565efSmrg 		 fine by force_nonfallthru below.  */
391410d565efSmrg 	      if (!e_taken)
391510d565efSmrg 		;
391610d565efSmrg 
391710d565efSmrg 	      /* There is another special case: if *neither* block is next,
391810d565efSmrg 		 such as happens at the very end of a function, then we'll
391910d565efSmrg 		 need to add a new unconditional jump.  Choose the taken
392010d565efSmrg 		 edge based on known or assumed probability.  */
392110d565efSmrg 	      else if (bb->aux != e_taken->dest)
392210d565efSmrg 		{
392310d565efSmrg 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
392410d565efSmrg 
392510d565efSmrg 		  if (note
3926c7a68eb7Smrg 		      && profile_probability::from_reg_br_prob_note
3927c7a68eb7Smrg 				 (XINT (note, 0)) < profile_probability::even ()
392810d565efSmrg 		      && invert_jump (bb_end_jump,
392910d565efSmrg 				      (e_fall->dest
393010d565efSmrg 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
393110d565efSmrg 				       ? NULL_RTX
393210d565efSmrg 				       : label_for_bb (e_fall->dest)), 0))
393310d565efSmrg 		    {
393410d565efSmrg 		      e_fall->flags &= ~EDGE_FALLTHRU;
393510d565efSmrg 		      gcc_checking_assert (could_fall_through
393610d565efSmrg 					   (e_taken->src, e_taken->dest));
393710d565efSmrg 		      e_taken->flags |= EDGE_FALLTHRU;
393810d565efSmrg 		      update_br_prob_note (bb);
393910d565efSmrg 		      e = e_fall, e_fall = e_taken, e_taken = e;
394010d565efSmrg 		    }
394110d565efSmrg 		}
394210d565efSmrg 
394310d565efSmrg 	      /* If the "jumping" edge is a crossing edge, and the fall
394410d565efSmrg 		 through edge is non-crossing, leave things as they are.  */
394510d565efSmrg 	      else if ((e_taken->flags & EDGE_CROSSING)
394610d565efSmrg 		       && !(e_fall->flags & EDGE_CROSSING))
394710d565efSmrg 		continue;
394810d565efSmrg 
394910d565efSmrg 	      /* Otherwise we can try to invert the jump.  This will
395010d565efSmrg 		 basically never fail, however, keep up the pretense.  */
395110d565efSmrg 	      else if (invert_jump (bb_end_jump,
395210d565efSmrg 				    (e_fall->dest
395310d565efSmrg 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
395410d565efSmrg 				     ? NULL_RTX
395510d565efSmrg 				     : label_for_bb (e_fall->dest)), 0))
395610d565efSmrg 		{
395710d565efSmrg 		  e_fall->flags &= ~EDGE_FALLTHRU;
395810d565efSmrg 		  gcc_checking_assert (could_fall_through
395910d565efSmrg 				       (e_taken->src, e_taken->dest));
396010d565efSmrg 		  e_taken->flags |= EDGE_FALLTHRU;
396110d565efSmrg 		  update_br_prob_note (bb);
396210d565efSmrg 		  if (LABEL_NUSES (ret_label) == 0
396310d565efSmrg 		      && single_pred_p (e_taken->dest))
396410d565efSmrg 		    delete_insn (as_a<rtx_insn *> (ret_label));
396510d565efSmrg 		  continue;
396610d565efSmrg 		}
396710d565efSmrg 	    }
396810d565efSmrg 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
396910d565efSmrg 	    {
397010d565efSmrg 	      /* If the old fallthru is still next or if
397110d565efSmrg 		 asm goto doesn't have a fallthru (e.g. when followed by
397210d565efSmrg 		 __builtin_unreachable ()), nothing to do.  */
397310d565efSmrg 	      if (! e_fall
397410d565efSmrg 		  || bb->aux == e_fall->dest
397510d565efSmrg 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
397610d565efSmrg 		continue;
397710d565efSmrg 
397810d565efSmrg 	      /* Otherwise we'll have to use the fallthru fixup below.  */
397910d565efSmrg 	    }
398010d565efSmrg 	  else
398110d565efSmrg 	    {
398210d565efSmrg 	      /* Otherwise we have some return, switch or computed
398310d565efSmrg 		 jump.  In the 99% case, there should not have been a
398410d565efSmrg 		 fallthru edge.  */
398510d565efSmrg 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
398610d565efSmrg 	      continue;
398710d565efSmrg 	    }
398810d565efSmrg 	}
398910d565efSmrg       else
399010d565efSmrg 	{
399110d565efSmrg 	  /* No fallthru implies a noreturn function with EH edges, or
399210d565efSmrg 	     something similarly bizarre.  In any case, we don't need to
399310d565efSmrg 	     do anything.  */
399410d565efSmrg 	  if (! e_fall)
399510d565efSmrg 	    continue;
399610d565efSmrg 
399710d565efSmrg 	  /* If the fallthru block is still next, nothing to do.  */
399810d565efSmrg 	  if (bb->aux == e_fall->dest)
399910d565efSmrg 	    continue;
400010d565efSmrg 
400110d565efSmrg 	  /* A fallthru to exit block.  */
400210d565efSmrg 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
400310d565efSmrg 	    continue;
400410d565efSmrg 	}
400510d565efSmrg 
400610d565efSmrg       /* We got here if we need to add a new jump insn.
400710d565efSmrg 	 Note force_nonfallthru can delete E_FALL and thus we have to
400810d565efSmrg 	 save E_FALL->src prior to the call to force_nonfallthru.  */
400910d565efSmrg       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
401010d565efSmrg       if (nb)
401110d565efSmrg 	{
401210d565efSmrg 	  nb->aux = bb->aux;
401310d565efSmrg 	  bb->aux = nb;
401410d565efSmrg 	  /* Don't process this new block.  */
401510d565efSmrg 	  bb = nb;
401610d565efSmrg 	}
401710d565efSmrg     }
401810d565efSmrg 
401910d565efSmrg   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
402010d565efSmrg 
402110d565efSmrg   /* Annoying special case - jump around dead jumptables left in the code.  */
402210d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
402310d565efSmrg     {
402410d565efSmrg       edge e = find_fallthru_edge (bb->succs);
402510d565efSmrg 
402610d565efSmrg       if (e && !can_fallthru (e->src, e->dest))
402710d565efSmrg 	force_nonfallthru (e);
402810d565efSmrg     }
402910d565efSmrg 
40300fc04c29Smrg   /* Ensure goto_locus from edges has some instructions with that locus in RTL
40310fc04c29Smrg      when not optimizing.  */
40320fc04c29Smrg   if (!optimize && !DECL_IGNORED_P (current_function_decl))
403310d565efSmrg     FOR_EACH_BB_FN (bb, cfun)
403410d565efSmrg       {
403510d565efSmrg         edge e;
403610d565efSmrg         edge_iterator ei;
403710d565efSmrg 
403810d565efSmrg         FOR_EACH_EDGE (e, ei, bb->succs)
403910d565efSmrg 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
404010d565efSmrg 	      && !(e->flags & EDGE_ABNORMAL))
404110d565efSmrg 	    {
404210d565efSmrg 	      edge e2;
404310d565efSmrg 	      edge_iterator ei2;
404410d565efSmrg 	      basic_block dest, nb;
404510d565efSmrg 	      rtx_insn *end;
404610d565efSmrg 
404710d565efSmrg 	      insn = BB_END (e->src);
404810d565efSmrg 	      end = PREV_INSN (BB_HEAD (e->src));
404910d565efSmrg 	      while (insn != end
405010d565efSmrg 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
405110d565efSmrg 		insn = PREV_INSN (insn);
405210d565efSmrg 	      if (insn != end
405310d565efSmrg 		  && INSN_LOCATION (insn) == e->goto_locus)
405410d565efSmrg 		continue;
405510d565efSmrg 	      if (simplejump_p (BB_END (e->src))
405610d565efSmrg 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
405710d565efSmrg 		{
405810d565efSmrg 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
405910d565efSmrg 		  continue;
406010d565efSmrg 		}
406110d565efSmrg 	      dest = e->dest;
406210d565efSmrg 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
406310d565efSmrg 		{
406410d565efSmrg 		  /* Non-fallthru edges to the exit block cannot be split.  */
406510d565efSmrg 		  if (!(e->flags & EDGE_FALLTHRU))
406610d565efSmrg 		    continue;
406710d565efSmrg 		}
406810d565efSmrg 	      else
406910d565efSmrg 		{
407010d565efSmrg 		  insn = BB_HEAD (dest);
407110d565efSmrg 		  end = NEXT_INSN (BB_END (dest));
407210d565efSmrg 		  while (insn != end && !NONDEBUG_INSN_P (insn))
407310d565efSmrg 		    insn = NEXT_INSN (insn);
407410d565efSmrg 		  if (insn != end && INSN_HAS_LOCATION (insn)
407510d565efSmrg 		      && INSN_LOCATION (insn) == e->goto_locus)
407610d565efSmrg 		    continue;
407710d565efSmrg 		}
407810d565efSmrg 	      nb = split_edge (e);
407910d565efSmrg 	      if (!INSN_P (BB_END (nb)))
408010d565efSmrg 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
408110d565efSmrg 							 nb);
408210d565efSmrg 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
408310d565efSmrg 
408410d565efSmrg 	      /* If there are other incoming edges to the destination block
408510d565efSmrg 		 with the same goto locus, redirect them to the new block as
408610d565efSmrg 		 well, this can prevent other such blocks from being created
408710d565efSmrg 		 in subsequent iterations of the loop.  */
408810d565efSmrg 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
408910d565efSmrg 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
409010d565efSmrg 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
409110d565efSmrg 		    && e->goto_locus == e2->goto_locus)
409210d565efSmrg 		  redirect_edge_and_branch (e2, nb);
409310d565efSmrg 		else
409410d565efSmrg 		  ei_next (&ei2);
409510d565efSmrg 	    }
409610d565efSmrg       }
409710d565efSmrg }
409810d565efSmrg 
409910d565efSmrg /* Perform sanity checks on the insn chain.
410010d565efSmrg    1. Check that next/prev pointers are consistent in both the forward and
410110d565efSmrg       reverse direction.
410210d565efSmrg    2. Count insns in chain, going both directions, and check if equal.
410310d565efSmrg    3. Check that get_last_insn () returns the actual end of chain.  */
410410d565efSmrg 
410510d565efSmrg DEBUG_FUNCTION void
verify_insn_chain(void)410610d565efSmrg verify_insn_chain (void)
410710d565efSmrg {
410810d565efSmrg   rtx_insn *x, *prevx, *nextx;
410910d565efSmrg   int insn_cnt1, insn_cnt2;
411010d565efSmrg 
411110d565efSmrg   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
411210d565efSmrg        x != 0;
411310d565efSmrg        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
411410d565efSmrg     gcc_assert (PREV_INSN (x) == prevx);
411510d565efSmrg 
411610d565efSmrg   gcc_assert (prevx == get_last_insn ());
411710d565efSmrg 
411810d565efSmrg   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
411910d565efSmrg        x != 0;
412010d565efSmrg        nextx = x, insn_cnt2++, x = PREV_INSN (x))
412110d565efSmrg     gcc_assert (NEXT_INSN (x) == nextx);
412210d565efSmrg 
412310d565efSmrg   gcc_assert (insn_cnt1 == insn_cnt2);
412410d565efSmrg }
412510d565efSmrg 
412610d565efSmrg /* If we have assembler epilogues, the block falling through to exit must
412710d565efSmrg    be the last one in the reordered chain when we reach final.  Ensure
412810d565efSmrg    that this condition is met.  */
412910d565efSmrg static void
fixup_fallthru_exit_predecessor(void)413010d565efSmrg fixup_fallthru_exit_predecessor (void)
413110d565efSmrg {
413210d565efSmrg   edge e;
413310d565efSmrg   basic_block bb = NULL;
413410d565efSmrg 
413510d565efSmrg   /* This transformation is not valid before reload, because we might
413610d565efSmrg      separate a call from the instruction that copies the return
413710d565efSmrg      value.  */
413810d565efSmrg   gcc_assert (reload_completed);
413910d565efSmrg 
414010d565efSmrg   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
414110d565efSmrg   if (e)
414210d565efSmrg     bb = e->src;
414310d565efSmrg 
414410d565efSmrg   if (bb && bb->aux)
414510d565efSmrg     {
414610d565efSmrg       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
414710d565efSmrg 
414810d565efSmrg       /* If the very first block is the one with the fall-through exit
414910d565efSmrg 	 edge, we have to split that block.  */
415010d565efSmrg       if (c == bb)
415110d565efSmrg 	{
415210d565efSmrg 	  bb = split_block_after_labels (bb)->dest;
415310d565efSmrg 	  bb->aux = c->aux;
415410d565efSmrg 	  c->aux = bb;
415510d565efSmrg 	  BB_FOOTER (bb) = BB_FOOTER (c);
415610d565efSmrg 	  BB_FOOTER (c) = NULL;
415710d565efSmrg 	}
415810d565efSmrg 
415910d565efSmrg       while (c->aux != bb)
416010d565efSmrg 	c = (basic_block) c->aux;
416110d565efSmrg 
416210d565efSmrg       c->aux = bb->aux;
416310d565efSmrg       while (c->aux)
416410d565efSmrg 	c = (basic_block) c->aux;
416510d565efSmrg 
416610d565efSmrg       c->aux = bb;
416710d565efSmrg       bb->aux = NULL;
416810d565efSmrg     }
416910d565efSmrg }
417010d565efSmrg 
417110d565efSmrg /* In case there are more than one fallthru predecessors of exit, force that
417210d565efSmrg    there is only one.  */
417310d565efSmrg 
417410d565efSmrg static void
force_one_exit_fallthru(void)417510d565efSmrg force_one_exit_fallthru (void)
417610d565efSmrg {
417710d565efSmrg   edge e, predecessor = NULL;
417810d565efSmrg   bool more = false;
417910d565efSmrg   edge_iterator ei;
418010d565efSmrg   basic_block forwarder, bb;
418110d565efSmrg 
418210d565efSmrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
418310d565efSmrg     if (e->flags & EDGE_FALLTHRU)
418410d565efSmrg       {
418510d565efSmrg 	if (predecessor == NULL)
418610d565efSmrg 	  predecessor = e;
418710d565efSmrg 	else
418810d565efSmrg 	  {
418910d565efSmrg 	    more = true;
419010d565efSmrg 	    break;
419110d565efSmrg 	  }
419210d565efSmrg       }
419310d565efSmrg 
419410d565efSmrg   if (!more)
419510d565efSmrg     return;
419610d565efSmrg 
419710d565efSmrg   /* Exit has several fallthru predecessors.  Create a forwarder block for
419810d565efSmrg      them.  */
419910d565efSmrg   forwarder = split_edge (predecessor);
420010d565efSmrg   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
420110d565efSmrg        (e = ei_safe_edge (ei)); )
420210d565efSmrg     {
420310d565efSmrg       if (e->src == forwarder
420410d565efSmrg 	  || !(e->flags & EDGE_FALLTHRU))
420510d565efSmrg 	ei_next (&ei);
420610d565efSmrg       else
420710d565efSmrg 	redirect_edge_and_branch_force (e, forwarder);
420810d565efSmrg     }
420910d565efSmrg 
421010d565efSmrg   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
421110d565efSmrg      exit block.  */
421210d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
421310d565efSmrg     {
421410d565efSmrg       if (bb->aux == NULL && bb != forwarder)
421510d565efSmrg 	{
421610d565efSmrg 	  bb->aux = forwarder;
421710d565efSmrg 	  break;
421810d565efSmrg 	}
421910d565efSmrg     }
422010d565efSmrg }
422110d565efSmrg 
422210d565efSmrg /* Return true in case it is possible to duplicate the basic block BB.  */
422310d565efSmrg 
422410d565efSmrg static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)422510d565efSmrg cfg_layout_can_duplicate_bb_p (const_basic_block bb)
422610d565efSmrg {
422710d565efSmrg   /* Do not attempt to duplicate tablejumps, as we need to unshare
422810d565efSmrg      the dispatch table.  This is difficult to do, as the instructions
422910d565efSmrg      computing jump destination may be hoisted outside the basic block.  */
423010d565efSmrg   if (tablejump_p (BB_END (bb), NULL, NULL))
423110d565efSmrg     return false;
423210d565efSmrg 
423310d565efSmrg   /* Do not duplicate blocks containing insns that can't be copied.  */
423410d565efSmrg   if (targetm.cannot_copy_insn_p)
423510d565efSmrg     {
423610d565efSmrg       rtx_insn *insn = BB_HEAD (bb);
423710d565efSmrg       while (1)
423810d565efSmrg 	{
423910d565efSmrg 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
424010d565efSmrg 	    return false;
424110d565efSmrg 	  if (insn == BB_END (bb))
424210d565efSmrg 	    break;
424310d565efSmrg 	  insn = NEXT_INSN (insn);
424410d565efSmrg 	}
424510d565efSmrg     }
424610d565efSmrg 
424710d565efSmrg   return true;
424810d565efSmrg }
424910d565efSmrg 
425010d565efSmrg rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to)425110d565efSmrg duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
425210d565efSmrg {
425310d565efSmrg   rtx_insn *insn, *next, *copy;
425410d565efSmrg   rtx_note *last;
425510d565efSmrg 
425610d565efSmrg   /* Avoid updating of boundaries of previous basic block.  The
425710d565efSmrg      note will get removed from insn stream in fixup.  */
425810d565efSmrg   last = emit_note (NOTE_INSN_DELETED);
425910d565efSmrg 
426010d565efSmrg   /* Create copy at the end of INSN chain.  The chain will
426110d565efSmrg      be reordered later.  */
426210d565efSmrg   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
426310d565efSmrg     {
426410d565efSmrg       switch (GET_CODE (insn))
426510d565efSmrg 	{
426610d565efSmrg 	case DEBUG_INSN:
426710d565efSmrg 	  /* Don't duplicate label debug insns.  */
4268c7a68eb7Smrg 	  if (DEBUG_BIND_INSN_P (insn)
4269c7a68eb7Smrg 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
427010d565efSmrg 	    break;
427110d565efSmrg 	  /* FALLTHRU */
427210d565efSmrg 	case INSN:
427310d565efSmrg 	case CALL_INSN:
427410d565efSmrg 	case JUMP_INSN:
427510d565efSmrg 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
427610d565efSmrg 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
427710d565efSmrg 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
427810d565efSmrg 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
427910d565efSmrg           maybe_copy_prologue_epilogue_insn (insn, copy);
428010d565efSmrg 	  break;
428110d565efSmrg 
428210d565efSmrg 	case JUMP_TABLE_DATA:
428310d565efSmrg 	  /* Avoid copying of dispatch tables.  We never duplicate
428410d565efSmrg 	     tablejumps, so this can hit only in case the table got
428510d565efSmrg 	     moved far from original jump.
428610d565efSmrg 	     Avoid copying following barrier as well if any
428710d565efSmrg 	     (and debug insns in between).  */
428810d565efSmrg 	  for (next = NEXT_INSN (insn);
428910d565efSmrg 	       next != NEXT_INSN (to);
429010d565efSmrg 	       next = NEXT_INSN (next))
429110d565efSmrg 	    if (!DEBUG_INSN_P (next))
429210d565efSmrg 	      break;
429310d565efSmrg 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
429410d565efSmrg 	    insn = next;
429510d565efSmrg 	  break;
429610d565efSmrg 
429710d565efSmrg 	case CODE_LABEL:
429810d565efSmrg 	  break;
429910d565efSmrg 
430010d565efSmrg 	case BARRIER:
430110d565efSmrg 	  emit_barrier ();
430210d565efSmrg 	  break;
430310d565efSmrg 
430410d565efSmrg 	case NOTE:
430510d565efSmrg 	  switch (NOTE_KIND (insn))
430610d565efSmrg 	    {
430710d565efSmrg 	      /* In case prologue is empty and function contain label
430810d565efSmrg 		 in first BB, we may want to copy the block.  */
430910d565efSmrg 	    case NOTE_INSN_PROLOGUE_END:
431010d565efSmrg 
431110d565efSmrg 	    case NOTE_INSN_DELETED:
431210d565efSmrg 	    case NOTE_INSN_DELETED_LABEL:
431310d565efSmrg 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
431410d565efSmrg 	      /* No problem to strip these.  */
431510d565efSmrg 	    case NOTE_INSN_FUNCTION_BEG:
431610d565efSmrg 	      /* There is always just single entry to function.  */
431710d565efSmrg 	    case NOTE_INSN_BASIC_BLOCK:
431810d565efSmrg               /* We should only switch text sections once.  */
431910d565efSmrg 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
432010d565efSmrg 	      break;
432110d565efSmrg 
432210d565efSmrg 	    case NOTE_INSN_EPILOGUE_BEG:
432310d565efSmrg 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
432410d565efSmrg 	      emit_note_copy (as_a <rtx_note *> (insn));
432510d565efSmrg 	      break;
432610d565efSmrg 
432710d565efSmrg 	    default:
432810d565efSmrg 	      /* All other notes should have already been eliminated.  */
432910d565efSmrg 	      gcc_unreachable ();
433010d565efSmrg 	    }
433110d565efSmrg 	  break;
433210d565efSmrg 	default:
433310d565efSmrg 	  gcc_unreachable ();
433410d565efSmrg 	}
433510d565efSmrg     }
433610d565efSmrg   insn = NEXT_INSN (last);
433710d565efSmrg   delete_insn (last);
433810d565efSmrg   return insn;
433910d565efSmrg }
434010d565efSmrg 
434110d565efSmrg /* Create a duplicate of the basic block BB.  */
434210d565efSmrg 
434310d565efSmrg static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data *)434421ff1670Smrg cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
434510d565efSmrg {
434610d565efSmrg   rtx_insn *insn;
434710d565efSmrg   basic_block new_bb;
434810d565efSmrg 
434910d565efSmrg   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
435010d565efSmrg   new_bb = create_basic_block (insn,
435110d565efSmrg 			       insn ? get_last_insn () : NULL,
435210d565efSmrg 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
435310d565efSmrg 
435410d565efSmrg   BB_COPY_PARTITION (new_bb, bb);
435510d565efSmrg   if (BB_HEADER (bb))
435610d565efSmrg     {
435710d565efSmrg       insn = BB_HEADER (bb);
435810d565efSmrg       while (NEXT_INSN (insn))
435910d565efSmrg 	insn = NEXT_INSN (insn);
436010d565efSmrg       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
436110d565efSmrg       if (insn)
436210d565efSmrg 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
436310d565efSmrg     }
436410d565efSmrg 
436510d565efSmrg   if (BB_FOOTER (bb))
436610d565efSmrg     {
436710d565efSmrg       insn = BB_FOOTER (bb);
436810d565efSmrg       while (NEXT_INSN (insn))
436910d565efSmrg 	insn = NEXT_INSN (insn);
437010d565efSmrg       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
437110d565efSmrg       if (insn)
437210d565efSmrg 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
437310d565efSmrg     }
437410d565efSmrg 
437510d565efSmrg   return new_bb;
437610d565efSmrg }
437710d565efSmrg 
437810d565efSmrg 
437910d565efSmrg /* Main entry point to this module - initialize the datastructures for
438010d565efSmrg    CFG layout changes.  It keeps LOOPS up-to-date if not null.
438110d565efSmrg 
438210d565efSmrg    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
438310d565efSmrg 
438410d565efSmrg void
cfg_layout_initialize(int flags)4385c7a68eb7Smrg cfg_layout_initialize (int flags)
438610d565efSmrg {
438710d565efSmrg   rtx_insn_list *x;
438810d565efSmrg   basic_block bb;
438910d565efSmrg 
439010d565efSmrg   /* Once bb partitioning is complete, cfg layout mode should not be
439110d565efSmrg      re-entered.  Entering cfg layout mode may require fixups.  As an
439210d565efSmrg      example, if edge forwarding performed when optimizing the cfg
439310d565efSmrg      layout required moving a block from the hot to the cold
439410d565efSmrg      section. This would create an illegal partitioning unless some
439510d565efSmrg      manual fixup was performed.  */
4396c7a68eb7Smrg   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
439710d565efSmrg 
439810d565efSmrg   initialize_original_copy_tables ();
439910d565efSmrg 
440010d565efSmrg   cfg_layout_rtl_register_cfg_hooks ();
440110d565efSmrg 
440210d565efSmrg   record_effective_endpoints ();
440310d565efSmrg 
440410d565efSmrg   /* Make sure that the targets of non local gotos are marked.  */
440510d565efSmrg   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
440610d565efSmrg     {
440710d565efSmrg       bb = BLOCK_FOR_INSN (x->insn ());
440810d565efSmrg       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
440910d565efSmrg     }
441010d565efSmrg 
441110d565efSmrg   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
441210d565efSmrg }
441310d565efSmrg 
441410d565efSmrg /* Splits superblocks.  */
441510d565efSmrg void
break_superblocks(void)441610d565efSmrg break_superblocks (void)
441710d565efSmrg {
441810d565efSmrg   bool need = false;
441910d565efSmrg   basic_block bb;
442010d565efSmrg 
442110d565efSmrg   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
442210d565efSmrg   bitmap_clear (superblocks);
442310d565efSmrg 
442410d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
442510d565efSmrg     if (bb->flags & BB_SUPERBLOCK)
442610d565efSmrg       {
442710d565efSmrg 	bb->flags &= ~BB_SUPERBLOCK;
442810d565efSmrg 	bitmap_set_bit (superblocks, bb->index);
442910d565efSmrg 	need = true;
443010d565efSmrg       }
443110d565efSmrg 
443210d565efSmrg   if (need)
443310d565efSmrg     {
443410d565efSmrg       rebuild_jump_labels (get_insns ());
443510d565efSmrg       find_many_sub_basic_blocks (superblocks);
443610d565efSmrg     }
443710d565efSmrg }
443810d565efSmrg 
443910d565efSmrg /* Finalize the changes: reorder insn list according to the sequence specified
444010d565efSmrg    by aux pointers, enter compensation code, rebuild scope forest.  */
444110d565efSmrg 
444210d565efSmrg void
cfg_layout_finalize(void)444310d565efSmrg cfg_layout_finalize (void)
444410d565efSmrg {
444510d565efSmrg   free_dominance_info (CDI_DOMINATORS);
444610d565efSmrg   force_one_exit_fallthru ();
444710d565efSmrg   rtl_register_cfg_hooks ();
444810d565efSmrg   if (reload_completed && !targetm.have_epilogue ())
444910d565efSmrg     fixup_fallthru_exit_predecessor ();
445010d565efSmrg   fixup_reorder_chain ();
445110d565efSmrg 
445210d565efSmrg   rebuild_jump_labels (get_insns ());
445310d565efSmrg   delete_dead_jumptables ();
445410d565efSmrg 
445510d565efSmrg   if (flag_checking)
445610d565efSmrg     verify_insn_chain ();
445710d565efSmrg   checking_verify_flow_info ();
445810d565efSmrg }
445910d565efSmrg 
446010d565efSmrg 
446110d565efSmrg /* Same as split_block but update cfg_layout structures.  */
446210d565efSmrg 
446310d565efSmrg static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)446410d565efSmrg cfg_layout_split_block (basic_block bb, void *insnp)
446510d565efSmrg {
446610d565efSmrg   rtx insn = (rtx) insnp;
446710d565efSmrg   basic_block new_bb = rtl_split_block (bb, insn);
446810d565efSmrg 
446910d565efSmrg   BB_FOOTER (new_bb) = BB_FOOTER (bb);
447010d565efSmrg   BB_FOOTER (bb) = NULL;
447110d565efSmrg 
447210d565efSmrg   return new_bb;
447310d565efSmrg }
447410d565efSmrg 
447510d565efSmrg /* Redirect Edge to DEST.  */
447610d565efSmrg static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)447710d565efSmrg cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
447810d565efSmrg {
447910d565efSmrg   basic_block src = e->src;
448010d565efSmrg   edge ret;
448110d565efSmrg 
448210d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
448310d565efSmrg     return NULL;
448410d565efSmrg 
448510d565efSmrg   if (e->dest == dest)
448610d565efSmrg     return e;
448710d565efSmrg 
4488c7a68eb7Smrg   if (e->flags & EDGE_CROSSING
4489c7a68eb7Smrg       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4490c7a68eb7Smrg       && simplejump_p (BB_END (src)))
4491c7a68eb7Smrg     {
4492c7a68eb7Smrg       if (dump_file)
4493c7a68eb7Smrg 	fprintf (dump_file,
4494c7a68eb7Smrg 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4495c7a68eb7Smrg 		 e->src->index, dest->index);
4496c7a68eb7Smrg       delete_insn (BB_END (src));
4497c7a68eb7Smrg       remove_barriers_from_footer (src);
4498c7a68eb7Smrg       e->flags |= EDGE_FALLTHRU;
4499c7a68eb7Smrg     }
4500c7a68eb7Smrg 
450110d565efSmrg   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
450210d565efSmrg       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
450310d565efSmrg     {
450410d565efSmrg       df_set_bb_dirty (src);
450510d565efSmrg       return ret;
450610d565efSmrg     }
450710d565efSmrg 
450810d565efSmrg   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
450910d565efSmrg       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
451010d565efSmrg     {
451110d565efSmrg       if (dump_file)
451210d565efSmrg 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
451310d565efSmrg 		 e->src->index, dest->index);
451410d565efSmrg 
451510d565efSmrg       df_set_bb_dirty (e->src);
451610d565efSmrg       redirect_edge_succ (e, dest);
451710d565efSmrg       return e;
451810d565efSmrg     }
451910d565efSmrg 
452010d565efSmrg   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
452110d565efSmrg      in the case the basic block appears to be in sequence.  Avoid this
452210d565efSmrg      transformation.  */
452310d565efSmrg 
452410d565efSmrg   if (e->flags & EDGE_FALLTHRU)
452510d565efSmrg     {
452610d565efSmrg       /* Redirect any branch edges unified with the fallthru one.  */
452710d565efSmrg       if (JUMP_P (BB_END (src))
452810d565efSmrg 	  && label_is_jump_target_p (BB_HEAD (e->dest),
452910d565efSmrg 				     BB_END (src)))
453010d565efSmrg 	{
453110d565efSmrg 	  edge redirected;
453210d565efSmrg 
453310d565efSmrg 	  if (dump_file)
453410d565efSmrg 	    fprintf (dump_file, "Fallthru edge unified with branch "
453510d565efSmrg 		     "%i->%i redirected to %i\n",
453610d565efSmrg 		     e->src->index, e->dest->index, dest->index);
453710d565efSmrg 	  e->flags &= ~EDGE_FALLTHRU;
453810d565efSmrg 	  redirected = redirect_branch_edge (e, dest);
453910d565efSmrg 	  gcc_assert (redirected);
454010d565efSmrg 	  redirected->flags |= EDGE_FALLTHRU;
454110d565efSmrg 	  df_set_bb_dirty (redirected->src);
454210d565efSmrg 	  return redirected;
454310d565efSmrg 	}
454410d565efSmrg       /* In case we are redirecting fallthru edge to the branch edge
454510d565efSmrg 	 of conditional jump, remove it.  */
454610d565efSmrg       if (EDGE_COUNT (src->succs) == 2)
454710d565efSmrg 	{
454810d565efSmrg 	  /* Find the edge that is different from E.  */
454910d565efSmrg 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
455010d565efSmrg 
455110d565efSmrg 	  if (s->dest == dest
455210d565efSmrg 	      && any_condjump_p (BB_END (src))
455310d565efSmrg 	      && onlyjump_p (BB_END (src)))
455410d565efSmrg 	    delete_insn (BB_END (src));
455510d565efSmrg 	}
455610d565efSmrg       if (dump_file)
455710d565efSmrg 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
455810d565efSmrg 		 e->src->index, e->dest->index, dest->index);
455910d565efSmrg       ret = redirect_edge_succ_nodup (e, dest);
456010d565efSmrg     }
456110d565efSmrg   else
456210d565efSmrg     ret = redirect_branch_edge (e, dest);
456310d565efSmrg 
4564c7a68eb7Smrg   if (!ret)
4565c7a68eb7Smrg     return NULL;
4566c7a68eb7Smrg 
4567c7a68eb7Smrg   fixup_partition_crossing (ret);
456810d565efSmrg   /* We don't want simplejumps in the insn stream during cfglayout.  */
4569c7a68eb7Smrg   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
457010d565efSmrg 
457110d565efSmrg   df_set_bb_dirty (src);
457210d565efSmrg   return ret;
457310d565efSmrg }
457410d565efSmrg 
457510d565efSmrg /* Simple wrapper as we always can redirect fallthru edges.  */
457610d565efSmrg static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)457710d565efSmrg cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
457810d565efSmrg {
457910d565efSmrg   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
458010d565efSmrg 
458110d565efSmrg   gcc_assert (redirected);
458210d565efSmrg   return NULL;
458310d565efSmrg }
458410d565efSmrg 
458510d565efSmrg /* Same as delete_basic_block but update cfg_layout structures.  */
458610d565efSmrg 
458710d565efSmrg static void
cfg_layout_delete_block(basic_block bb)458810d565efSmrg cfg_layout_delete_block (basic_block bb)
458910d565efSmrg {
459010d565efSmrg   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
459110d565efSmrg   rtx_insn **to;
459210d565efSmrg 
459310d565efSmrg   if (BB_HEADER (bb))
459410d565efSmrg     {
459510d565efSmrg       next = BB_HEAD (bb);
459610d565efSmrg       if (prev)
459710d565efSmrg 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
459810d565efSmrg       else
459910d565efSmrg 	set_first_insn (BB_HEADER (bb));
460010d565efSmrg       SET_PREV_INSN (BB_HEADER (bb)) = prev;
460110d565efSmrg       insn = BB_HEADER (bb);
460210d565efSmrg       while (NEXT_INSN (insn))
460310d565efSmrg 	insn = NEXT_INSN (insn);
460410d565efSmrg       SET_NEXT_INSN (insn) = next;
460510d565efSmrg       SET_PREV_INSN (next) = insn;
460610d565efSmrg     }
460710d565efSmrg   next = NEXT_INSN (BB_END (bb));
460810d565efSmrg   if (BB_FOOTER (bb))
460910d565efSmrg     {
461010d565efSmrg       insn = BB_FOOTER (bb);
461110d565efSmrg       while (insn)
461210d565efSmrg 	{
461310d565efSmrg 	  if (BARRIER_P (insn))
461410d565efSmrg 	    {
461510d565efSmrg 	      if (PREV_INSN (insn))
461610d565efSmrg 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
461710d565efSmrg 	      else
461810d565efSmrg 		BB_FOOTER (bb) = NEXT_INSN (insn);
461910d565efSmrg 	      if (NEXT_INSN (insn))
462010d565efSmrg 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
462110d565efSmrg 	    }
462210d565efSmrg 	  if (LABEL_P (insn))
462310d565efSmrg 	    break;
462410d565efSmrg 	  insn = NEXT_INSN (insn);
462510d565efSmrg 	}
462610d565efSmrg       if (BB_FOOTER (bb))
462710d565efSmrg 	{
462810d565efSmrg 	  insn = BB_END (bb);
462910d565efSmrg 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
463010d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
463110d565efSmrg 	  while (NEXT_INSN (insn))
463210d565efSmrg 	    insn = NEXT_INSN (insn);
463310d565efSmrg 	  SET_NEXT_INSN (insn) = next;
463410d565efSmrg 	  if (next)
463510d565efSmrg 	    SET_PREV_INSN (next) = insn;
463610d565efSmrg 	  else
463710d565efSmrg 	    set_last_insn (insn);
463810d565efSmrg 	}
463910d565efSmrg     }
464010d565efSmrg   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
464110d565efSmrg     to = &BB_HEADER (bb->next_bb);
464210d565efSmrg   else
464310d565efSmrg     to = &cfg_layout_function_footer;
464410d565efSmrg 
464510d565efSmrg   rtl_delete_block (bb);
464610d565efSmrg 
464710d565efSmrg   if (prev)
464810d565efSmrg     prev = NEXT_INSN (prev);
464910d565efSmrg   else
465010d565efSmrg     prev = get_insns ();
465110d565efSmrg   if (next)
465210d565efSmrg     next = PREV_INSN (next);
465310d565efSmrg   else
465410d565efSmrg     next = get_last_insn ();
465510d565efSmrg 
465610d565efSmrg   if (next && NEXT_INSN (next) != prev)
465710d565efSmrg     {
465810d565efSmrg       remaints = unlink_insn_chain (prev, next);
465910d565efSmrg       insn = remaints;
466010d565efSmrg       while (NEXT_INSN (insn))
466110d565efSmrg 	insn = NEXT_INSN (insn);
466210d565efSmrg       SET_NEXT_INSN (insn) = *to;
466310d565efSmrg       if (*to)
466410d565efSmrg 	SET_PREV_INSN (*to) = insn;
466510d565efSmrg       *to = remaints;
466610d565efSmrg     }
466710d565efSmrg }
466810d565efSmrg 
466910d565efSmrg /* Return true when blocks A and B can be safely merged.  */
467010d565efSmrg 
467110d565efSmrg static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)467210d565efSmrg cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
467310d565efSmrg {
467410d565efSmrg   /* If we are partitioning hot/cold basic blocks, we don't want to
467510d565efSmrg      mess up unconditional or indirect jumps that cross between hot
467610d565efSmrg      and cold sections.
467710d565efSmrg 
467810d565efSmrg      Basic block partitioning may result in some jumps that appear to
467910d565efSmrg      be optimizable (or blocks that appear to be mergeable), but which really
468010d565efSmrg      must be left untouched (they are required to make it safely across
468110d565efSmrg      partition boundaries).  See  the comments at the top of
468210d565efSmrg      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
468310d565efSmrg 
468410d565efSmrg   if (BB_PARTITION (a) != BB_PARTITION (b))
468510d565efSmrg     return false;
468610d565efSmrg 
468710d565efSmrg   /* Protect the loop latches.  */
468810d565efSmrg   if (current_loops && b->loop_father->latch == b)
468910d565efSmrg     return false;
469010d565efSmrg 
469110d565efSmrg   /* If we would end up moving B's instructions, make sure it doesn't fall
469210d565efSmrg      through into the exit block, since we cannot recover from a fallthrough
469310d565efSmrg      edge into the exit block occurring in the middle of a function.  */
469410d565efSmrg   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
469510d565efSmrg     {
469610d565efSmrg       edge e = find_fallthru_edge (b->succs);
469710d565efSmrg       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
469810d565efSmrg 	return false;
469910d565efSmrg     }
470010d565efSmrg 
470110d565efSmrg   /* There must be exactly one edge in between the blocks.  */
470210d565efSmrg   return (single_succ_p (a)
470310d565efSmrg 	  && single_succ (a) == b
470410d565efSmrg 	  && single_pred_p (b) == 1
470510d565efSmrg 	  && a != b
470610d565efSmrg 	  /* Must be simple edge.  */
470710d565efSmrg 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
470810d565efSmrg 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
470910d565efSmrg 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
471010d565efSmrg 	  /* If the jump insn has side effects, we can't kill the edge.
471110d565efSmrg 	     When not optimizing, try_redirect_by_replacing_jump will
471210d565efSmrg 	     not allow us to redirect an edge by replacing a table jump.  */
471310d565efSmrg 	  && (!JUMP_P (BB_END (a))
471410d565efSmrg 	      || ((!optimize || reload_completed)
471510d565efSmrg 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
471610d565efSmrg }
471710d565efSmrg 
471810d565efSmrg /* Merge block A and B.  The blocks must be mergeable.  */
471910d565efSmrg 
472010d565efSmrg static void
cfg_layout_merge_blocks(basic_block a,basic_block b)472110d565efSmrg cfg_layout_merge_blocks (basic_block a, basic_block b)
472210d565efSmrg {
47230fc04c29Smrg   /* If B is a forwarder block whose outgoing edge has no location, we'll
47240fc04c29Smrg      propagate the locus of the edge between A and B onto it.  */
47250fc04c29Smrg   const bool forward_edge_locus
47260fc04c29Smrg     = (b->flags & BB_FORWARDER_BLOCK) != 0
47270fc04c29Smrg       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
472810d565efSmrg   rtx_insn *insn;
472910d565efSmrg 
473010d565efSmrg   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
473110d565efSmrg 
473210d565efSmrg   if (dump_file)
473310d565efSmrg     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
473410d565efSmrg 			 a->index);
473510d565efSmrg 
473610d565efSmrg   /* If there was a CODE_LABEL beginning B, delete it.  */
473710d565efSmrg   if (LABEL_P (BB_HEAD (b)))
473810d565efSmrg     {
473910d565efSmrg       delete_insn (BB_HEAD (b));
474010d565efSmrg     }
474110d565efSmrg 
474210d565efSmrg   /* We should have fallthru edge in a, or we can do dummy redirection to get
474310d565efSmrg      it cleaned up.  */
474410d565efSmrg   if (JUMP_P (BB_END (a)))
474510d565efSmrg     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
474610d565efSmrg   gcc_assert (!JUMP_P (BB_END (a)));
474710d565efSmrg 
47480fc04c29Smrg   /* If not optimizing, preserve the locus of the single edge between
47490fc04c29Smrg      blocks A and B if necessary by emitting a nop.  */
47500fc04c29Smrg   if (!optimize
47510fc04c29Smrg       && !forward_edge_locus
47520fc04c29Smrg       && !DECL_IGNORED_P (current_function_decl))
475310d565efSmrg     emit_nop_for_unique_locus_between (a, b);
475410d565efSmrg 
475510d565efSmrg   /* Move things from b->footer after a->footer.  */
475610d565efSmrg   if (BB_FOOTER (b))
475710d565efSmrg     {
475810d565efSmrg       if (!BB_FOOTER (a))
475910d565efSmrg 	BB_FOOTER (a) = BB_FOOTER (b);
476010d565efSmrg       else
476110d565efSmrg 	{
476210d565efSmrg 	  rtx_insn *last = BB_FOOTER (a);
476310d565efSmrg 
476410d565efSmrg 	  while (NEXT_INSN (last))
476510d565efSmrg 	    last = NEXT_INSN (last);
476610d565efSmrg 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
476710d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
476810d565efSmrg 	}
476910d565efSmrg       BB_FOOTER (b) = NULL;
477010d565efSmrg     }
477110d565efSmrg 
477210d565efSmrg   /* Move things from b->header before a->footer.
477310d565efSmrg      Note that this may include dead tablejump data, but we don't clean
477410d565efSmrg      those up until we go out of cfglayout mode.  */
477510d565efSmrg    if (BB_HEADER (b))
477610d565efSmrg      {
477710d565efSmrg       if (! BB_FOOTER (a))
477810d565efSmrg 	BB_FOOTER (a) = BB_HEADER (b);
477910d565efSmrg       else
478010d565efSmrg 	{
478110d565efSmrg 	  rtx_insn *last = BB_HEADER (b);
478210d565efSmrg 
478310d565efSmrg 	  while (NEXT_INSN (last))
478410d565efSmrg 	    last = NEXT_INSN (last);
478510d565efSmrg 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
478610d565efSmrg 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
478710d565efSmrg 	  BB_FOOTER (a) = BB_HEADER (b);
478810d565efSmrg 	}
478910d565efSmrg       BB_HEADER (b) = NULL;
479010d565efSmrg     }
479110d565efSmrg 
479210d565efSmrg   /* In the case basic blocks are not adjacent, move them around.  */
479310d565efSmrg   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
479410d565efSmrg     {
479510d565efSmrg       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
479610d565efSmrg 
479710d565efSmrg       emit_insn_after_noloc (insn, BB_END (a), a);
479810d565efSmrg     }
479910d565efSmrg   /* Otherwise just re-associate the instructions.  */
480010d565efSmrg   else
480110d565efSmrg     {
480210d565efSmrg       insn = BB_HEAD (b);
480310d565efSmrg       BB_END (a) = BB_END (b);
480410d565efSmrg     }
480510d565efSmrg 
480610d565efSmrg   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
480710d565efSmrg      We need to explicitly call. */
480810d565efSmrg   update_bb_for_insn_chain (insn, BB_END (b), a);
480910d565efSmrg 
481010d565efSmrg   /* Skip possible DELETED_LABEL insn.  */
481110d565efSmrg   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
481210d565efSmrg     insn = NEXT_INSN (insn);
481310d565efSmrg   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
481410d565efSmrg   BB_HEAD (b) = BB_END (b) = NULL;
481510d565efSmrg   delete_insn (insn);
481610d565efSmrg 
481710d565efSmrg   df_bb_delete (b->index);
481810d565efSmrg 
48190fc04c29Smrg   if (forward_edge_locus)
482010d565efSmrg     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
482110d565efSmrg 
482210d565efSmrg   if (dump_file)
482310d565efSmrg     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
482410d565efSmrg }
482510d565efSmrg 
482610d565efSmrg /* Split edge E.  */
482710d565efSmrg 
482810d565efSmrg static basic_block
cfg_layout_split_edge(edge e)482910d565efSmrg cfg_layout_split_edge (edge e)
483010d565efSmrg {
483110d565efSmrg   basic_block new_bb =
483210d565efSmrg     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
483310d565efSmrg 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
483410d565efSmrg 			NULL_RTX, e->src);
483510d565efSmrg 
483610d565efSmrg   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
483710d565efSmrg     BB_COPY_PARTITION (new_bb, e->src);
483810d565efSmrg   else
483910d565efSmrg     BB_COPY_PARTITION (new_bb, e->dest);
484010d565efSmrg   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
484110d565efSmrg   redirect_edge_and_branch_force (e, new_bb);
484210d565efSmrg 
484310d565efSmrg   return new_bb;
484410d565efSmrg }
484510d565efSmrg 
484610d565efSmrg /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
484710d565efSmrg 
484810d565efSmrg static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)484910d565efSmrg rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
485010d565efSmrg {
485110d565efSmrg }
485210d565efSmrg 
485310d565efSmrg /* Return true if BB contains only labels or non-executable
485410d565efSmrg    instructions.  */
485510d565efSmrg 
485610d565efSmrg static bool
rtl_block_empty_p(basic_block bb)485710d565efSmrg rtl_block_empty_p (basic_block bb)
485810d565efSmrg {
485910d565efSmrg   rtx_insn *insn;
486010d565efSmrg 
486110d565efSmrg   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
486210d565efSmrg       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
486310d565efSmrg     return true;
486410d565efSmrg 
486510d565efSmrg   FOR_BB_INSNS (bb, insn)
486610d565efSmrg     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
486710d565efSmrg       return false;
486810d565efSmrg 
486910d565efSmrg   return true;
487010d565efSmrg }
487110d565efSmrg 
487210d565efSmrg /* Split a basic block if it ends with a conditional branch and if
487310d565efSmrg    the other part of the block is not empty.  */
487410d565efSmrg 
487510d565efSmrg static basic_block
rtl_split_block_before_cond_jump(basic_block bb)487610d565efSmrg rtl_split_block_before_cond_jump (basic_block bb)
487710d565efSmrg {
487810d565efSmrg   rtx_insn *insn;
487910d565efSmrg   rtx_insn *split_point = NULL;
488010d565efSmrg   rtx_insn *last = NULL;
488110d565efSmrg   bool found_code = false;
488210d565efSmrg 
488310d565efSmrg   FOR_BB_INSNS (bb, insn)
488410d565efSmrg     {
488510d565efSmrg       if (any_condjump_p (insn))
488610d565efSmrg 	split_point = last;
488710d565efSmrg       else if (NONDEBUG_INSN_P (insn))
488810d565efSmrg 	found_code = true;
488910d565efSmrg       last = insn;
489010d565efSmrg     }
489110d565efSmrg 
489210d565efSmrg   /* Did not find everything.  */
489310d565efSmrg   if (found_code && split_point)
489410d565efSmrg     return split_block (bb, split_point)->dest;
489510d565efSmrg   else
489610d565efSmrg     return NULL;
489710d565efSmrg }
489810d565efSmrg 
489910d565efSmrg /* Return 1 if BB ends with a call, possibly followed by some
490010d565efSmrg    instructions that must stay with the call, 0 otherwise.  */
490110d565efSmrg 
490210d565efSmrg static bool
rtl_block_ends_with_call_p(basic_block bb)490310d565efSmrg rtl_block_ends_with_call_p (basic_block bb)
490410d565efSmrg {
490510d565efSmrg   rtx_insn *insn = BB_END (bb);
490610d565efSmrg 
490710d565efSmrg   while (!CALL_P (insn)
490810d565efSmrg 	 && insn != BB_HEAD (bb)
490910d565efSmrg 	 && (keep_with_call_p (insn)
491010d565efSmrg 	     || NOTE_P (insn)
491110d565efSmrg 	     || DEBUG_INSN_P (insn)))
491210d565efSmrg     insn = PREV_INSN (insn);
491310d565efSmrg   return (CALL_P (insn));
491410d565efSmrg }
491510d565efSmrg 
491610d565efSmrg /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
491710d565efSmrg 
491810d565efSmrg static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)491910d565efSmrg rtl_block_ends_with_condjump_p (const_basic_block bb)
492010d565efSmrg {
492110d565efSmrg   return any_condjump_p (BB_END (bb));
492210d565efSmrg }
492310d565efSmrg 
492410d565efSmrg /* Return true if we need to add fake edge to exit.
492510d565efSmrg    Helper function for rtl_flow_call_edges_add.  */
492610d565efSmrg 
492710d565efSmrg static bool
need_fake_edge_p(const rtx_insn * insn)492810d565efSmrg need_fake_edge_p (const rtx_insn *insn)
492910d565efSmrg {
493010d565efSmrg   if (!INSN_P (insn))
493110d565efSmrg     return false;
493210d565efSmrg 
493310d565efSmrg   if ((CALL_P (insn)
493410d565efSmrg        && !SIBLING_CALL_P (insn)
493510d565efSmrg        && !find_reg_note (insn, REG_NORETURN, NULL)
493610d565efSmrg        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
493710d565efSmrg     return true;
493810d565efSmrg 
493910d565efSmrg   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
494010d565efSmrg 	   && MEM_VOLATILE_P (PATTERN (insn)))
494110d565efSmrg 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
494210d565efSmrg 	      && asm_noperands (insn) != -1
494310d565efSmrg 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
494410d565efSmrg 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
494510d565efSmrg }
494610d565efSmrg 
494710d565efSmrg /* Add fake edges to the function exit for any non constant and non noreturn
494810d565efSmrg    calls, volatile inline assembly in the bitmap of blocks specified by
494910d565efSmrg    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
495010d565efSmrg    that were split.
495110d565efSmrg 
495210d565efSmrg    The goal is to expose cases in which entering a basic block does not imply
495310d565efSmrg    that all subsequent instructions must be executed.  */
495410d565efSmrg 
495510d565efSmrg static int
rtl_flow_call_edges_add(sbitmap blocks)495610d565efSmrg rtl_flow_call_edges_add (sbitmap blocks)
495710d565efSmrg {
495810d565efSmrg   int i;
495910d565efSmrg   int blocks_split = 0;
496010d565efSmrg   int last_bb = last_basic_block_for_fn (cfun);
496110d565efSmrg   bool check_last_block = false;
496210d565efSmrg 
496310d565efSmrg   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
496410d565efSmrg     return 0;
496510d565efSmrg 
496610d565efSmrg   if (! blocks)
496710d565efSmrg     check_last_block = true;
496810d565efSmrg   else
496910d565efSmrg     check_last_block = bitmap_bit_p (blocks,
497010d565efSmrg 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
497110d565efSmrg 
497210d565efSmrg   /* In the last basic block, before epilogue generation, there will be
497310d565efSmrg      a fallthru edge to EXIT.  Special care is required if the last insn
497410d565efSmrg      of the last basic block is a call because make_edge folds duplicate
497510d565efSmrg      edges, which would result in the fallthru edge also being marked
497610d565efSmrg      fake, which would result in the fallthru edge being removed by
497710d565efSmrg      remove_fake_edges, which would result in an invalid CFG.
497810d565efSmrg 
497910d565efSmrg      Moreover, we can't elide the outgoing fake edge, since the block
498010d565efSmrg      profiler needs to take this into account in order to solve the minimal
498110d565efSmrg      spanning tree in the case that the call doesn't return.
498210d565efSmrg 
498310d565efSmrg      Handle this by adding a dummy instruction in a new last basic block.  */
498410d565efSmrg   if (check_last_block)
498510d565efSmrg     {
498610d565efSmrg       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
498710d565efSmrg       rtx_insn *insn = BB_END (bb);
498810d565efSmrg 
498910d565efSmrg       /* Back up past insns that must be kept in the same block as a call.  */
499010d565efSmrg       while (insn != BB_HEAD (bb)
499110d565efSmrg 	     && keep_with_call_p (insn))
499210d565efSmrg 	insn = PREV_INSN (insn);
499310d565efSmrg 
499410d565efSmrg       if (need_fake_edge_p (insn))
499510d565efSmrg 	{
499610d565efSmrg 	  edge e;
499710d565efSmrg 
499810d565efSmrg 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
499910d565efSmrg 	  if (e)
500010d565efSmrg 	    {
500110d565efSmrg 	      insert_insn_on_edge (gen_use (const0_rtx), e);
500210d565efSmrg 	      commit_edge_insertions ();
500310d565efSmrg 	    }
500410d565efSmrg 	}
500510d565efSmrg     }
500610d565efSmrg 
500710d565efSmrg   /* Now add fake edges to the function exit for any non constant
500810d565efSmrg      calls since there is no way that we can determine if they will
500910d565efSmrg      return or not...  */
501010d565efSmrg 
501110d565efSmrg   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
501210d565efSmrg     {
501310d565efSmrg       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
501410d565efSmrg       rtx_insn *insn;
501510d565efSmrg       rtx_insn *prev_insn;
501610d565efSmrg 
501710d565efSmrg       if (!bb)
501810d565efSmrg 	continue;
501910d565efSmrg 
502010d565efSmrg       if (blocks && !bitmap_bit_p (blocks, i))
502110d565efSmrg 	continue;
502210d565efSmrg 
502310d565efSmrg       for (insn = BB_END (bb); ; insn = prev_insn)
502410d565efSmrg 	{
502510d565efSmrg 	  prev_insn = PREV_INSN (insn);
502610d565efSmrg 	  if (need_fake_edge_p (insn))
502710d565efSmrg 	    {
502810d565efSmrg 	      edge e;
502910d565efSmrg 	      rtx_insn *split_at_insn = insn;
503010d565efSmrg 
503110d565efSmrg 	      /* Don't split the block between a call and an insn that should
503210d565efSmrg 		 remain in the same block as the call.  */
503310d565efSmrg 	      if (CALL_P (insn))
503410d565efSmrg 		while (split_at_insn != BB_END (bb)
503510d565efSmrg 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
503610d565efSmrg 		  split_at_insn = NEXT_INSN (split_at_insn);
503710d565efSmrg 
503810d565efSmrg 	      /* The handling above of the final block before the epilogue
503910d565efSmrg 		 should be enough to verify that there is no edge to the exit
504010d565efSmrg 		 block in CFG already.  Calling make_edge in such case would
504110d565efSmrg 		 cause us to mark that edge as fake and remove it later.  */
504210d565efSmrg 
504310d565efSmrg 	      if (flag_checking && split_at_insn == BB_END (bb))
504410d565efSmrg 		{
504510d565efSmrg 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
504610d565efSmrg 		  gcc_assert (e == NULL);
504710d565efSmrg 		}
504810d565efSmrg 
504910d565efSmrg 	      /* Note that the following may create a new basic block
505010d565efSmrg 		 and renumber the existing basic blocks.  */
505110d565efSmrg 	      if (split_at_insn != BB_END (bb))
505210d565efSmrg 		{
505310d565efSmrg 		  e = split_block (bb, split_at_insn);
505410d565efSmrg 		  if (e)
505510d565efSmrg 		    blocks_split++;
505610d565efSmrg 		}
505710d565efSmrg 
5058c7a68eb7Smrg 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5059c7a68eb7Smrg 	      ne->probability = profile_probability::guessed_never ();
506010d565efSmrg 	    }
506110d565efSmrg 
506210d565efSmrg 	  if (insn == BB_HEAD (bb))
506310d565efSmrg 	    break;
506410d565efSmrg 	}
506510d565efSmrg     }
506610d565efSmrg 
506710d565efSmrg   if (blocks_split)
506810d565efSmrg     verify_flow_info ();
506910d565efSmrg 
507010d565efSmrg   return blocks_split;
507110d565efSmrg }
507210d565efSmrg 
507310d565efSmrg /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
507410d565efSmrg    the conditional branch target, SECOND_HEAD should be the fall-thru
507510d565efSmrg    there is no need to handle this here the loop versioning code handles
507610d565efSmrg    this.  the reason for SECON_HEAD is that it is needed for condition
507710d565efSmrg    in trees, and this should be of the same type since it is a hook.  */
507810d565efSmrg static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)507910d565efSmrg rtl_lv_add_condition_to_bb (basic_block first_head ,
508010d565efSmrg 			    basic_block second_head ATTRIBUTE_UNUSED,
508110d565efSmrg 			    basic_block cond_bb, void *comp_rtx)
508210d565efSmrg {
508310d565efSmrg   rtx_code_label *label;
508410d565efSmrg   rtx_insn *seq, *jump;
508510d565efSmrg   rtx op0 = XEXP ((rtx)comp_rtx, 0);
508610d565efSmrg   rtx op1 = XEXP ((rtx)comp_rtx, 1);
508710d565efSmrg   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
508810d565efSmrg   machine_mode mode;
508910d565efSmrg 
509010d565efSmrg 
509110d565efSmrg   label = block_label (first_head);
509210d565efSmrg   mode = GET_MODE (op0);
509310d565efSmrg   if (mode == VOIDmode)
509410d565efSmrg     mode = GET_MODE (op1);
509510d565efSmrg 
509610d565efSmrg   start_sequence ();
509710d565efSmrg   op0 = force_operand (op0, NULL_RTX);
509810d565efSmrg   op1 = force_operand (op1, NULL_RTX);
5099c7a68eb7Smrg   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5100c7a68eb7Smrg 			   profile_probability::uninitialized ());
510110d565efSmrg   jump = get_last_insn ();
510210d565efSmrg   JUMP_LABEL (jump) = label;
510310d565efSmrg   LABEL_NUSES (label)++;
510410d565efSmrg   seq = get_insns ();
510510d565efSmrg   end_sequence ();
510610d565efSmrg 
510710d565efSmrg   /* Add the new cond, in the new head.  */
510810d565efSmrg   emit_insn_after (seq, BB_END (cond_bb));
510910d565efSmrg }
511010d565efSmrg 
511110d565efSmrg 
511210d565efSmrg /* Given a block B with unconditional branch at its end, get the
511310d565efSmrg    store the return the branch edge and the fall-thru edge in
511410d565efSmrg    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
511510d565efSmrg static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)511610d565efSmrg rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
511710d565efSmrg 			   edge *fallthru_edge)
511810d565efSmrg {
511910d565efSmrg   edge e = EDGE_SUCC (b, 0);
512010d565efSmrg 
512110d565efSmrg   if (e->flags & EDGE_FALLTHRU)
512210d565efSmrg     {
512310d565efSmrg       *fallthru_edge = e;
512410d565efSmrg       *branch_edge = EDGE_SUCC (b, 1);
512510d565efSmrg     }
512610d565efSmrg   else
512710d565efSmrg     {
512810d565efSmrg       *branch_edge = e;
512910d565efSmrg       *fallthru_edge = EDGE_SUCC (b, 1);
513010d565efSmrg     }
513110d565efSmrg }
513210d565efSmrg 
513310d565efSmrg void
init_rtl_bb_info(basic_block bb)513410d565efSmrg init_rtl_bb_info (basic_block bb)
513510d565efSmrg {
513610d565efSmrg   gcc_assert (!bb->il.x.rtl);
513710d565efSmrg   bb->il.x.head_ = NULL;
513810d565efSmrg   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
513910d565efSmrg }
514010d565efSmrg 
514110d565efSmrg /* Returns true if it is possible to remove edge E by redirecting
514210d565efSmrg    it to the destination of the other edge from E->src.  */
514310d565efSmrg 
514410d565efSmrg static bool
rtl_can_remove_branch_p(const_edge e)514510d565efSmrg rtl_can_remove_branch_p (const_edge e)
514610d565efSmrg {
514710d565efSmrg   const_basic_block src = e->src;
514810d565efSmrg   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
514910d565efSmrg   const rtx_insn *insn = BB_END (src);
515010d565efSmrg   rtx set;
515110d565efSmrg 
515210d565efSmrg   /* The conditions are taken from try_redirect_by_replacing_jump.  */
515310d565efSmrg   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
515410d565efSmrg     return false;
515510d565efSmrg 
515610d565efSmrg   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
515710d565efSmrg     return false;
515810d565efSmrg 
515910d565efSmrg   if (BB_PARTITION (src) != BB_PARTITION (target))
516010d565efSmrg     return false;
516110d565efSmrg 
516210d565efSmrg   if (!onlyjump_p (insn)
516310d565efSmrg       || tablejump_p (insn, NULL, NULL))
516410d565efSmrg     return false;
516510d565efSmrg 
516610d565efSmrg   set = single_set (insn);
516710d565efSmrg   if (!set || side_effects_p (set))
516810d565efSmrg     return false;
516910d565efSmrg 
517010d565efSmrg   return true;
517110d565efSmrg }
517210d565efSmrg 
517310d565efSmrg static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)517421ff1670Smrg rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
517510d565efSmrg {
517621ff1670Smrg   bb = cfg_layout_duplicate_bb (bb, id);
517710d565efSmrg   bb->aux = NULL;
517810d565efSmrg   return bb;
517910d565efSmrg }
518010d565efSmrg 
518110d565efSmrg /* Do book-keeping of basic block BB for the profile consistency checker.
51820fc04c29Smrg    Store the counting in RECORD.  */
518310d565efSmrg static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)51840fc04c29Smrg rtl_account_profile_record (basic_block bb, struct profile_record *record)
518510d565efSmrg {
518610d565efSmrg   rtx_insn *insn;
518710d565efSmrg   FOR_BB_INSNS (bb, insn)
518810d565efSmrg     if (INSN_P (insn))
518910d565efSmrg       {
51900fc04c29Smrg 	record->size += insn_cost (insn, false);
5191c7a68eb7Smrg 	if (bb->count.initialized_p ())
51920fc04c29Smrg 	  record->time
5193c7a68eb7Smrg 	    += insn_cost (insn, true) * bb->count.to_gcov_type ();
519410d565efSmrg 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
51950fc04c29Smrg 	  record->time
5196c7a68eb7Smrg 	    += insn_cost (insn, true) * bb->count.to_frequency (cfun);
519710d565efSmrg       }
519810d565efSmrg }
519910d565efSmrg 
520010d565efSmrg /* Implementation of CFG manipulation for linearized RTL.  */
520110d565efSmrg struct cfg_hooks rtl_cfg_hooks = {
520210d565efSmrg   "rtl",
520310d565efSmrg   rtl_verify_flow_info,
520410d565efSmrg   rtl_dump_bb,
520510d565efSmrg   rtl_dump_bb_for_graph,
520610d565efSmrg   rtl_create_basic_block,
520710d565efSmrg   rtl_redirect_edge_and_branch,
520810d565efSmrg   rtl_redirect_edge_and_branch_force,
520910d565efSmrg   rtl_can_remove_branch_p,
521010d565efSmrg   rtl_delete_block,
521110d565efSmrg   rtl_split_block,
521210d565efSmrg   rtl_move_block_after,
521310d565efSmrg   rtl_can_merge_blocks,  /* can_merge_blocks_p */
521410d565efSmrg   rtl_merge_blocks,
521510d565efSmrg   rtl_predict_edge,
521610d565efSmrg   rtl_predicted_by_p,
521710d565efSmrg   cfg_layout_can_duplicate_bb_p,
521810d565efSmrg   rtl_duplicate_bb,
521910d565efSmrg   rtl_split_edge,
522010d565efSmrg   rtl_make_forwarder_block,
522110d565efSmrg   rtl_tidy_fallthru_edge,
522210d565efSmrg   rtl_force_nonfallthru,
522310d565efSmrg   rtl_block_ends_with_call_p,
522410d565efSmrg   rtl_block_ends_with_condjump_p,
522510d565efSmrg   rtl_flow_call_edges_add,
522610d565efSmrg   NULL, /* execute_on_growing_pred */
522710d565efSmrg   NULL, /* execute_on_shrinking_pred */
522810d565efSmrg   NULL, /* duplicate loop for trees */
522910d565efSmrg   NULL, /* lv_add_condition_to_bb */
523010d565efSmrg   NULL, /* lv_adjust_loop_header_phi*/
523110d565efSmrg   NULL, /* extract_cond_bb_edges */
523210d565efSmrg   NULL, /* flush_pending_stmts */
523310d565efSmrg   rtl_block_empty_p, /* block_empty_p */
523410d565efSmrg   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
523510d565efSmrg   rtl_account_profile_record,
523610d565efSmrg };
523710d565efSmrg 
523810d565efSmrg /* Implementation of CFG manipulation for cfg layout RTL, where
523910d565efSmrg    basic block connected via fallthru edges does not have to be adjacent.
524010d565efSmrg    This representation will hopefully become the default one in future
524110d565efSmrg    version of the compiler.  */
524210d565efSmrg 
524310d565efSmrg struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
524410d565efSmrg   "cfglayout mode",
524510d565efSmrg   rtl_verify_flow_info_1,
524610d565efSmrg   rtl_dump_bb,
524710d565efSmrg   rtl_dump_bb_for_graph,
524810d565efSmrg   cfg_layout_create_basic_block,
524910d565efSmrg   cfg_layout_redirect_edge_and_branch,
525010d565efSmrg   cfg_layout_redirect_edge_and_branch_force,
525110d565efSmrg   rtl_can_remove_branch_p,
525210d565efSmrg   cfg_layout_delete_block,
525310d565efSmrg   cfg_layout_split_block,
525410d565efSmrg   rtl_move_block_after,
525510d565efSmrg   cfg_layout_can_merge_blocks_p,
525610d565efSmrg   cfg_layout_merge_blocks,
525710d565efSmrg   rtl_predict_edge,
525810d565efSmrg   rtl_predicted_by_p,
525910d565efSmrg   cfg_layout_can_duplicate_bb_p,
526010d565efSmrg   cfg_layout_duplicate_bb,
526110d565efSmrg   cfg_layout_split_edge,
526210d565efSmrg   rtl_make_forwarder_block,
526310d565efSmrg   NULL, /* tidy_fallthru_edge */
526410d565efSmrg   rtl_force_nonfallthru,
526510d565efSmrg   rtl_block_ends_with_call_p,
526610d565efSmrg   rtl_block_ends_with_condjump_p,
526710d565efSmrg   rtl_flow_call_edges_add,
526810d565efSmrg   NULL, /* execute_on_growing_pred */
526910d565efSmrg   NULL, /* execute_on_shrinking_pred */
527010d565efSmrg   duplicate_loop_to_header_edge, /* duplicate loop for trees */
527110d565efSmrg   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
527210d565efSmrg   NULL, /* lv_adjust_loop_header_phi*/
527310d565efSmrg   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
527410d565efSmrg   NULL, /* flush_pending_stmts */
527510d565efSmrg   rtl_block_empty_p, /* block_empty_p */
527610d565efSmrg   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
527710d565efSmrg   rtl_account_profile_record,
527810d565efSmrg };
527910d565efSmrg 
528010d565efSmrg #include "gt-cfgrtl.h"
5281*ec02198aSmrg 
5282*ec02198aSmrg #if __GNUC__ >= 10
5283*ec02198aSmrg #  pragma GCC diagnostic pop
5284*ec02198aSmrg #endif
5285