xref: /dragonfly/contrib/gcc-8.0/gcc/cfgrtl.c (revision 7bcb6caf)
1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 
65 /* Holds the interesting leading and trailing notes for the function.
66    Only applicable if the CFG is in cfglayout mode.  */
67 static GTY(()) rtx_insn *cfg_layout_function_footer;
68 static GTY(()) rtx_insn *cfg_layout_function_header;
69 
70 static rtx_insn *skip_insns_after_block (basic_block);
71 static void record_effective_endpoints (void);
72 static void fixup_reorder_chain (void);
73 
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const rtx_note *);
77 static int can_delete_label_p (const rtx_code_label *);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
92 
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94    so that we may simply delete it.  */
95 
96 static int
97 can_delete_note_p (const rtx_note *note)
98 {
99   switch (NOTE_KIND (note))
100     {
101     case NOTE_INSN_DELETED:
102     case NOTE_INSN_BASIC_BLOCK:
103     case NOTE_INSN_EPILOGUE_BEG:
104       return true;
105 
106     default:
107       return false;
108     }
109 }
110 
111 /* True if a given label can be deleted.  */
112 
113 static int
114 can_delete_label_p (const rtx_code_label *label)
115 {
116   return (!LABEL_PRESERVE_P (label)
117 	  /* User declared labels must be preserved.  */
118 	  && LABEL_NAME (label) == 0
119 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
120 					     const_cast<rtx_code_label *> (label)));
121 }
122 
123 /* Delete INSN by patching it out.  */
124 
125 void
126 delete_insn (rtx_insn *insn)
127 {
128   rtx note;
129   bool really_delete = true;
130 
131   if (LABEL_P (insn))
132     {
133       /* Some labels can't be directly removed from the INSN chain, as they
134 	 might be references via variables, constant pool etc.
135 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
136       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
137 	{
138 	  const char *name = LABEL_NAME (insn);
139 	  basic_block bb = BLOCK_FOR_INSN (insn);
140 	  rtx_insn *bb_note = NEXT_INSN (insn);
141 
142 	  really_delete = false;
143 	  PUT_CODE (insn, NOTE);
144 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
145 	  NOTE_DELETED_LABEL_NAME (insn) = name;
146 
147 	  /* If the note following the label starts a basic block, and the
148 	     label is a member of the same basic block, interchange the two.  */
149 	  if (bb_note != NULL_RTX
150 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
151 	      && bb != NULL
152 	      && bb == BLOCK_FOR_INSN (bb_note))
153 	    {
154 	      reorder_insns_nobb (insn, insn, bb_note);
155 	      BB_HEAD (bb) = bb_note;
156 	      if (BB_END (bb) == bb_note)
157 		BB_END (bb) = insn;
158 	    }
159 	}
160 
161       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
162     }
163 
164   if (really_delete)
165     {
166       /* If this insn has already been deleted, something is very wrong.  */
167       gcc_assert (!insn->deleted ());
168       if (INSN_P (insn))
169 	df_insn_delete (insn);
170       remove_insn (insn);
171       insn->set_deleted ();
172     }
173 
174   /* If deleting a jump, decrement the use count of the label.  Deleting
175      the label itself should happen in the normal course of block merging.  */
176   if (JUMP_P (insn))
177     {
178       if (JUMP_LABEL (insn)
179 	  && LABEL_P (JUMP_LABEL (insn)))
180 	LABEL_NUSES (JUMP_LABEL (insn))--;
181 
182       /* If there are more targets, remove them too.  */
183       while ((note
184 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
185 	     && LABEL_P (XEXP (note, 0)))
186 	{
187 	  LABEL_NUSES (XEXP (note, 0))--;
188 	  remove_note (insn, note);
189 	}
190     }
191 
192   /* Also if deleting any insn that references a label as an operand.  */
193   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 	 && LABEL_P (XEXP (note, 0)))
195     {
196       LABEL_NUSES (XEXP (note, 0))--;
197       remove_note (insn, note);
198     }
199 
200   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
201     {
202       rtvec vec = table->get_labels ();
203       int len = GET_NUM_ELEM (vec);
204       int i;
205 
206       for (i = 0; i < len; i++)
207 	{
208 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
209 
210 	  /* When deleting code in bulk (e.g. removing many unreachable
211 	     blocks) we can delete a label that's a target of the vector
212 	     before deleting the vector itself.  */
213 	  if (!NOTE_P (label))
214 	    LABEL_NUSES (label)--;
215 	}
216     }
217 }
218 
219 /* Like delete_insn but also purge dead edges from BB.
220    Return true if any edges are eliminated.  */
221 
222 bool
223 delete_insn_and_edges (rtx_insn *insn)
224 {
225   bool purge = false;
226 
227   if (INSN_P (insn)
228       && BLOCK_FOR_INSN (insn)
229       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
230     purge = true;
231   delete_insn (insn);
232   if (purge)
233     return purge_dead_edges (BLOCK_FOR_INSN (insn));
234   return false;
235 }
236 
237 /* Unlink a chain of insns between START and FINISH, leaving notes
238    that must be paired.  If CLEAR_BB is true, we set bb field for
239    insns that cannot be removed to NULL.  */
240 
241 void
242 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
243 {
244   /* Unchain the insns one by one.  It would be quicker to delete all of these
245      with a single unchaining, rather than one at a time, but we need to keep
246      the NOTE's.  */
247   rtx_insn *current = finish;
248   while (1)
249     {
250       rtx_insn *prev = PREV_INSN (current);
251       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
252 	;
253       else
254 	delete_insn (current);
255 
256       if (clear_bb && !current->deleted ())
257 	set_block_for_insn (current, NULL);
258 
259       if (current == start)
260 	break;
261       current = prev;
262     }
263 }
264 
265 /* Create a new basic block consisting of the instructions between HEAD and END
266    inclusive.  This function is designed to allow fast BB construction - reuses
267    the note and basic block struct in BB_NOTE, if any and do not grow
268    BASIC_BLOCK chain and should be used directly only by CFG construction code.
269    END can be NULL in to create new empty basic block before HEAD.  Both END
270    and HEAD can be NULL to create basic block at the end of INSN chain.
271    AFTER is the basic block we should be put after.  */
272 
273 basic_block
274 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
275 			      basic_block after)
276 {
277   basic_block bb;
278 
279   if (bb_note
280       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281       && bb->aux == NULL)
282     {
283       /* If we found an existing note, thread it back onto the chain.  */
284 
285       rtx_insn *after;
286 
287       if (LABEL_P (head))
288 	after = head;
289       else
290 	{
291 	  after = PREV_INSN (head);
292 	  head = bb_note;
293 	}
294 
295       if (after != bb_note && NEXT_INSN (after) != bb_note)
296 	reorder_insns_nobb (bb_note, bb_note, after);
297     }
298   else
299     {
300       /* Otherwise we must create a note and a basic block structure.  */
301 
302       bb = alloc_block ();
303 
304       init_rtl_bb_info (bb);
305       if (!head && !end)
306 	head = end = bb_note
307 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
308       else if (LABEL_P (head) && end)
309 	{
310 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 	  if (head == end)
312 	    end = bb_note;
313 	}
314       else
315 	{
316 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 	  head = bb_note;
318 	  if (!end)
319 	    end = head;
320 	}
321 
322       NOTE_BASIC_BLOCK (bb_note) = bb;
323     }
324 
325   /* Always include the bb note in the block.  */
326   if (NEXT_INSN (end) == bb_note)
327     end = bb_note;
328 
329   BB_HEAD (bb) = head;
330   BB_END (bb) = end;
331   bb->index = last_basic_block_for_fn (cfun)++;
332   bb->flags = BB_NEW | BB_RTL;
333   link_block (bb, after);
334   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
335   df_bb_refs_record (bb->index, false);
336   update_bb_for_insn (bb);
337   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
338 
339   /* Tag the block so that we know it has been used when considering
340      other basic block notes.  */
341   bb->aux = bb;
342 
343   return bb;
344 }
345 
346 /* Create new basic block consisting of instructions in between HEAD and END
347    and place it to the BB chain after block AFTER.  END can be NULL to
348    create a new empty basic block before HEAD.  Both END and HEAD can be
349    NULL to create basic block at the end of INSN chain.  */
350 
351 static basic_block
352 rtl_create_basic_block (void *headp, void *endp, basic_block after)
353 {
354   rtx_insn *head = (rtx_insn *) headp;
355   rtx_insn *end = (rtx_insn *) endp;
356   basic_block bb;
357 
358   /* Grow the basic block array if needed.  */
359   if ((size_t) last_basic_block_for_fn (cfun)
360       >= basic_block_info_for_fn (cfun)->length ())
361     {
362       size_t new_size =
363 	(last_basic_block_for_fn (cfun)
364 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
365       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
366     }
367 
368   n_basic_blocks_for_fn (cfun)++;
369 
370   bb = create_basic_block_structure (head, end, NULL, after);
371   bb->aux = NULL;
372   return bb;
373 }
374 
375 static basic_block
376 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
377 {
378   basic_block newbb = rtl_create_basic_block (head, end, after);
379 
380   return newbb;
381 }
382 
383 /* Delete the insns in a (non-live) block.  We physically delete every
384    non-deleted-note insn, and update the flow graph appropriately.
385 
386    Return nonzero if we deleted an exception handler.  */
387 
388 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
389    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
390 
391 static void
392 rtl_delete_block (basic_block b)
393 {
394   rtx_insn *insn, *end;
395 
396   /* If the head of this block is a CODE_LABEL, then it might be the
397      label for an exception handler which can't be reached.  We need
398      to remove the label from the exception_handler_label list.  */
399   insn = BB_HEAD (b);
400 
401   end = get_last_bb_insn (b);
402 
403   /* Selectively delete the entire chain.  */
404   BB_HEAD (b) = NULL;
405   delete_insn_chain (insn, end, true);
406 
407 
408   if (dump_file)
409     fprintf (dump_file, "deleting block %d\n", b->index);
410   df_bb_delete (b->index);
411 }
412 
413 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
414 
415 void
416 compute_bb_for_insn (void)
417 {
418   basic_block bb;
419 
420   FOR_EACH_BB_FN (bb, cfun)
421     {
422       rtx_insn *end = BB_END (bb);
423       rtx_insn *insn;
424 
425       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
426 	{
427 	  BLOCK_FOR_INSN (insn) = bb;
428 	  if (insn == end)
429 	    break;
430 	}
431     }
432 }
433 
434 /* Release the basic_block_for_insn array.  */
435 
436 unsigned int
437 free_bb_for_insn (void)
438 {
439   rtx_insn *insn;
440   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
441     if (!BARRIER_P (insn))
442       BLOCK_FOR_INSN (insn) = NULL;
443   return 0;
444 }
445 
446 namespace {
447 
448 const pass_data pass_data_free_cfg =
449 {
450   RTL_PASS, /* type */
451   "*free_cfg", /* name */
452   OPTGROUP_NONE, /* optinfo_flags */
453   TV_NONE, /* tv_id */
454   0, /* properties_required */
455   0, /* properties_provided */
456   PROP_cfg, /* properties_destroyed */
457   0, /* todo_flags_start */
458   0, /* todo_flags_finish */
459 };
460 
461 class pass_free_cfg : public rtl_opt_pass
462 {
463 public:
464   pass_free_cfg (gcc::context *ctxt)
465     : rtl_opt_pass (pass_data_free_cfg, ctxt)
466   {}
467 
468   /* opt_pass methods: */
469   virtual unsigned int execute (function *);
470 
471 }; // class pass_free_cfg
472 
473 unsigned int
474 pass_free_cfg::execute (function *)
475 {
476   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
477      valid at that point so it would be too late to call df_analyze.  */
478   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
479     {
480       df_note_add_problem ();
481       df_analyze ();
482     }
483 
484   if (crtl->has_bb_partition)
485     insert_section_boundary_note ();
486 
487   free_bb_for_insn ();
488   return 0;
489 }
490 
491 } // anon namespace
492 
493 rtl_opt_pass *
494 make_pass_free_cfg (gcc::context *ctxt)
495 {
496   return new pass_free_cfg (ctxt);
497 }
498 
499 /* Return RTX to emit after when we want to emit code on the entry of function.  */
500 rtx_insn *
501 entry_of_function (void)
502 {
503   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
504 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
505 }
506 
507 /* Emit INSN at the entry point of the function, ensuring that it is only
508    executed once per function.  */
509 void
510 emit_insn_at_entry (rtx insn)
511 {
512   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
513   edge e = ei_safe_edge (ei);
514   gcc_assert (e->flags & EDGE_FALLTHRU);
515 
516   insert_insn_on_edge (insn, e);
517   commit_edge_insertions ();
518 }
519 
520 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
521    (or BARRIER if found) and notify df of the bb change.
522    The insn chain range is inclusive
523    (i.e. both BEGIN and END will be updated. */
524 
525 static void
526 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
527 {
528   rtx_insn *insn;
529 
530   end = NEXT_INSN (end);
531   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
532     if (!BARRIER_P (insn))
533       df_insn_change_bb (insn, bb);
534 }
535 
536 /* Update BLOCK_FOR_INSN of insns in BB to BB,
537    and notify df of the change.  */
538 
539 void
540 update_bb_for_insn (basic_block bb)
541 {
542   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
543 }
544 
545 
546 /* Like active_insn_p, except keep the return value clobber around
547    even after reload.  */
548 
549 static bool
550 flow_active_insn_p (const rtx_insn *insn)
551 {
552   if (active_insn_p (insn))
553     return true;
554 
555   /* A clobber of the function return value exists for buggy
556      programs that fail to return a value.  Its effect is to
557      keep the return value from being live across the entire
558      function.  If we allow it to be skipped, we introduce the
559      possibility for register lifetime confusion.  */
560   if (GET_CODE (PATTERN (insn)) == CLOBBER
561       && REG_P (XEXP (PATTERN (insn), 0))
562       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
563     return true;
564 
565   return false;
566 }
567 
568 /* Return true if the block has no effect and only forwards control flow to
569    its single destination.  */
570 
571 bool
572 contains_no_active_insn_p (const_basic_block bb)
573 {
574   rtx_insn *insn;
575 
576   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
577       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
578       || !single_succ_p (bb)
579       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
580     return false;
581 
582   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
583     if (INSN_P (insn) && flow_active_insn_p (insn))
584       return false;
585 
586   return (!INSN_P (insn)
587 	  || (JUMP_P (insn) && simplejump_p (insn))
588 	  || !flow_active_insn_p (insn));
589 }
590 
591 /* Likewise, but protect loop latches, headers and preheaders.  */
592 /* FIXME: Make this a cfg hook.  */
593 
594 bool
595 forwarder_block_p (const_basic_block bb)
596 {
597   if (!contains_no_active_insn_p (bb))
598     return false;
599 
600   /* Protect loop latches, headers and preheaders.  */
601   if (current_loops)
602     {
603       basic_block dest;
604       if (bb->loop_father->header == bb)
605 	return false;
606       dest = EDGE_SUCC (bb, 0)->dest;
607       if (dest->loop_father->header == dest)
608 	return false;
609     }
610 
611   return true;
612 }
613 
614 /* Return nonzero if we can reach target from src by falling through.  */
615 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
616 
617 bool
618 can_fallthru (basic_block src, basic_block target)
619 {
620   rtx_insn *insn = BB_END (src);
621   rtx_insn *insn2;
622   edge e;
623   edge_iterator ei;
624 
625   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
626     return true;
627   if (src->next_bb != target)
628     return false;
629 
630   /* ??? Later we may add code to move jump tables offline.  */
631   if (tablejump_p (insn, NULL, NULL))
632     return false;
633 
634   FOR_EACH_EDGE (e, ei, src->succs)
635     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
636 	&& e->flags & EDGE_FALLTHRU)
637       return false;
638 
639   insn2 = BB_HEAD (target);
640   if (!active_insn_p (insn2))
641     insn2 = next_active_insn (insn2);
642 
643   return next_active_insn (insn) == insn2;
644 }
645 
646 /* Return nonzero if we could reach target from src by falling through,
647    if the target was made adjacent.  If we already have a fall-through
648    edge to the exit block, we can't do that.  */
649 static bool
650 could_fall_through (basic_block src, basic_block target)
651 {
652   edge e;
653   edge_iterator ei;
654 
655   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
656     return true;
657   FOR_EACH_EDGE (e, ei, src->succs)
658     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
659 	&& e->flags & EDGE_FALLTHRU)
660       return 0;
661   return true;
662 }
663 
664 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
665 rtx_note *
666 bb_note (basic_block bb)
667 {
668   rtx_insn *note;
669 
670   note = BB_HEAD (bb);
671   if (LABEL_P (note))
672     note = NEXT_INSN (note);
673 
674   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
675   return as_a <rtx_note *> (note);
676 }
677 
678 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
679    note associated with the BLOCK.  */
680 
681 static rtx_insn *
682 first_insn_after_basic_block_note (basic_block block)
683 {
684   rtx_insn *insn;
685 
686   /* Get the first instruction in the block.  */
687   insn = BB_HEAD (block);
688 
689   if (insn == NULL_RTX)
690     return NULL;
691   if (LABEL_P (insn))
692     insn = NEXT_INSN (insn);
693   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
694 
695   return NEXT_INSN (insn);
696 }
697 
698 /* Creates a new basic block just after basic block BB by splitting
699    everything after specified instruction INSNP.  */
700 
701 static basic_block
702 rtl_split_block (basic_block bb, void *insnp)
703 {
704   basic_block new_bb;
705   rtx_insn *insn = (rtx_insn *) insnp;
706   edge e;
707   edge_iterator ei;
708 
709   if (!insn)
710     {
711       insn = first_insn_after_basic_block_note (bb);
712 
713       if (insn)
714 	{
715 	  rtx_insn *next = insn;
716 
717 	  insn = PREV_INSN (insn);
718 
719 	  /* If the block contains only debug insns, insn would have
720 	     been NULL in a non-debug compilation, and then we'd end
721 	     up emitting a DELETED note.  For -fcompare-debug
722 	     stability, emit the note too.  */
723 	  if (insn != BB_END (bb)
724 	      && DEBUG_INSN_P (next)
725 	      && DEBUG_INSN_P (BB_END (bb)))
726 	    {
727 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
728 		next = NEXT_INSN (next);
729 
730 	      if (next == BB_END (bb))
731 		emit_note_after (NOTE_INSN_DELETED, next);
732 	    }
733 	}
734       else
735 	insn = get_last_insn ();
736     }
737 
738   /* We probably should check type of the insn so that we do not create
739      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
740      bother.  */
741   if (insn == BB_END (bb))
742     emit_note_after (NOTE_INSN_DELETED, insn);
743 
744   /* Create the new basic block.  */
745   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
746   BB_COPY_PARTITION (new_bb, bb);
747   BB_END (bb) = insn;
748 
749   /* Redirect the outgoing edges.  */
750   new_bb->succs = bb->succs;
751   bb->succs = NULL;
752   FOR_EACH_EDGE (e, ei, new_bb->succs)
753     e->src = new_bb;
754 
755   /* The new block starts off being dirty.  */
756   df_set_bb_dirty (bb);
757   return new_bb;
758 }
759 
760 /* Return true if the single edge between blocks A and B is the only place
761    in RTL which holds some unique locus.  */
762 
763 static bool
764 unique_locus_on_edge_between_p (basic_block a, basic_block b)
765 {
766   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
767   rtx_insn *insn, *end;
768 
769   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
770     return false;
771 
772   /* First scan block A backward.  */
773   insn = BB_END (a);
774   end = PREV_INSN (BB_HEAD (a));
775   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
776     insn = PREV_INSN (insn);
777 
778   if (insn != end && INSN_LOCATION (insn) == goto_locus)
779     return false;
780 
781   /* Then scan block B forward.  */
782   insn = BB_HEAD (b);
783   if (insn)
784     {
785       end = NEXT_INSN (BB_END (b));
786       while (insn != end && !NONDEBUG_INSN_P (insn))
787 	insn = NEXT_INSN (insn);
788 
789       if (insn != end && INSN_HAS_LOCATION (insn)
790 	  && INSN_LOCATION (insn) == goto_locus)
791 	return false;
792     }
793 
794   return true;
795 }
796 
797 /* If the single edge between blocks A and B is the only place in RTL which
798    holds some unique locus, emit a nop with that locus between the blocks.  */
799 
800 static void
801 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
802 {
803   if (!unique_locus_on_edge_between_p (a, b))
804     return;
805 
806   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
807   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
808 }
809 
810 /* Blocks A and B are to be merged into a single block A.  The insns
811    are already contiguous.  */
812 
813 static void
814 rtl_merge_blocks (basic_block a, basic_block b)
815 {
816   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
817   rtx_insn *del_first = NULL, *del_last = NULL;
818   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
819   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
820   int b_empty = 0;
821 
822   if (dump_file)
823     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
824 	     a->index);
825 
826   while (DEBUG_INSN_P (b_end))
827     b_end = PREV_INSN (b_debug_start = b_end);
828 
829   /* If there was a CODE_LABEL beginning B, delete it.  */
830   if (LABEL_P (b_head))
831     {
832       /* Detect basic blocks with nothing but a label.  This can happen
833 	 in particular at the end of a function.  */
834       if (b_head == b_end)
835 	b_empty = 1;
836 
837       del_first = del_last = b_head;
838       b_head = NEXT_INSN (b_head);
839     }
840 
841   /* Delete the basic block note and handle blocks containing just that
842      note.  */
843   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
844     {
845       if (b_head == b_end)
846 	b_empty = 1;
847       if (! del_last)
848 	del_first = b_head;
849 
850       del_last = b_head;
851       b_head = NEXT_INSN (b_head);
852     }
853 
854   /* If there was a jump out of A, delete it.  */
855   if (JUMP_P (a_end))
856     {
857       rtx_insn *prev;
858 
859       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
860 	if (!NOTE_P (prev)
861 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
862 	    || prev == BB_HEAD (a))
863 	  break;
864 
865       del_first = a_end;
866 
867       /* If this was a conditional jump, we need to also delete
868 	 the insn that set cc0.  */
869       if (HAVE_cc0 && only_sets_cc0_p (prev))
870 	{
871 	  rtx_insn *tmp = prev;
872 
873 	  prev = prev_nonnote_insn (prev);
874 	  if (!prev)
875 	    prev = BB_HEAD (a);
876 	  del_first = tmp;
877 	}
878 
879       a_end = PREV_INSN (del_first);
880     }
881   else if (BARRIER_P (NEXT_INSN (a_end)))
882     del_first = NEXT_INSN (a_end);
883 
884   /* Delete everything marked above as well as crap that might be
885      hanging out between the two blocks.  */
886   BB_END (a) = a_end;
887   BB_HEAD (b) = b_empty ? NULL : b_head;
888   delete_insn_chain (del_first, del_last, true);
889 
890   /* When not optimizing and the edge is the only place in RTL which holds
891      some unique locus, emit a nop with that locus in between.  */
892   if (!optimize)
893     {
894       emit_nop_for_unique_locus_between (a, b);
895       a_end = BB_END (a);
896     }
897 
898   /* Reassociate the insns of B with A.  */
899   if (!b_empty)
900     {
901       update_bb_for_insn_chain (a_end, b_debug_end, a);
902 
903       BB_END (a) = b_debug_end;
904       BB_HEAD (b) = NULL;
905     }
906   else if (b_end != b_debug_end)
907     {
908       /* Move any deleted labels and other notes between the end of A
909 	 and the debug insns that make up B after the debug insns,
910 	 bringing the debug insns into A while keeping the notes after
911 	 the end of A.  */
912       if (NEXT_INSN (a_end) != b_debug_start)
913 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
914 			    b_debug_end);
915       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
916       BB_END (a) = b_debug_end;
917     }
918 
919   df_bb_delete (b->index);
920 
921   /* If B was a forwarder block, propagate the locus on the edge.  */
922   if (forwarder_p
923       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
924     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
925 
926   if (dump_file)
927     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
928 }
929 
930 
931 /* Return true when block A and B can be merged.  */
932 
933 static bool
934 rtl_can_merge_blocks (basic_block a, basic_block b)
935 {
936   /* If we are partitioning hot/cold basic blocks, we don't want to
937      mess up unconditional or indirect jumps that cross between hot
938      and cold sections.
939 
940      Basic block partitioning may result in some jumps that appear to
941      be optimizable (or blocks that appear to be mergeable), but which really
942      must be left untouched (they are required to make it safely across
943      partition boundaries).  See  the comments at the top of
944      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
945 
946   if (BB_PARTITION (a) != BB_PARTITION (b))
947     return false;
948 
949   /* Protect the loop latches.  */
950   if (current_loops && b->loop_father->latch == b)
951     return false;
952 
953   /* There must be exactly one edge in between the blocks.  */
954   return (single_succ_p (a)
955 	  && single_succ (a) == b
956 	  && single_pred_p (b)
957 	  && a != b
958 	  /* Must be simple edge.  */
959 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
960 	  && a->next_bb == b
961 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
962 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
963 	  /* If the jump insn has side effects,
964 	     we can't kill the edge.  */
965 	  && (!JUMP_P (BB_END (a))
966 	      || (reload_completed
967 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
968 }
969 
970 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
971    exist.  */
972 
973 rtx_code_label *
974 block_label (basic_block block)
975 {
976   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
977     return NULL;
978 
979   if (!LABEL_P (BB_HEAD (block)))
980     {
981       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
982     }
983 
984   return as_a <rtx_code_label *> (BB_HEAD (block));
985 }
986 
987 /* Attempt to perform edge redirection by replacing possibly complex jump
988    instruction by unconditional jump or removing jump completely.  This can
989    apply only if all edges now point to the same block.  The parameters and
990    return values are equivalent to redirect_edge_and_branch.  */
991 
992 edge
993 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
994 {
995   basic_block src = e->src;
996   rtx_insn *insn = BB_END (src), *kill_from;
997   rtx set;
998   int fallthru = 0;
999 
1000   /* If we are partitioning hot/cold basic blocks, we don't want to
1001      mess up unconditional or indirect jumps that cross between hot
1002      and cold sections.
1003 
1004      Basic block partitioning may result in some jumps that appear to
1005      be optimizable (or blocks that appear to be mergeable), but which really
1006      must be left untouched (they are required to make it safely across
1007      partition boundaries).  See  the comments at the top of
1008      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1009 
1010   if (BB_PARTITION (src) != BB_PARTITION (target))
1011     return NULL;
1012 
1013   /* We can replace or remove a complex jump only when we have exactly
1014      two edges.  Also, if we have exactly one outgoing edge, we can
1015      redirect that.  */
1016   if (EDGE_COUNT (src->succs) >= 3
1017       /* Verify that all targets will be TARGET.  Specifically, the
1018 	 edge that is not E must also go to TARGET.  */
1019       || (EDGE_COUNT (src->succs) == 2
1020 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1021     return NULL;
1022 
1023   if (!onlyjump_p (insn))
1024     return NULL;
1025   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1026     return NULL;
1027 
1028   /* Avoid removing branch with side effects.  */
1029   set = single_set (insn);
1030   if (!set || side_effects_p (set))
1031     return NULL;
1032 
1033   /* In case we zap a conditional jump, we'll need to kill
1034      the cc0 setter too.  */
1035   kill_from = insn;
1036   if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1037       && only_sets_cc0_p (PREV_INSN (insn)))
1038     kill_from = PREV_INSN (insn);
1039 
1040   /* See if we can create the fallthru edge.  */
1041   if (in_cfglayout || can_fallthru (src, target))
1042     {
1043       if (dump_file)
1044 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1045       fallthru = 1;
1046 
1047       /* Selectively unlink whole insn chain.  */
1048       if (in_cfglayout)
1049 	{
1050 	  rtx_insn *insn = BB_FOOTER (src);
1051 
1052 	  delete_insn_chain (kill_from, BB_END (src), false);
1053 
1054 	  /* Remove barriers but keep jumptables.  */
1055 	  while (insn)
1056 	    {
1057 	      if (BARRIER_P (insn))
1058 		{
1059 		  if (PREV_INSN (insn))
1060 		    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1061 		  else
1062 		    BB_FOOTER (src) = NEXT_INSN (insn);
1063 		  if (NEXT_INSN (insn))
1064 		    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1065 		}
1066 	      if (LABEL_P (insn))
1067 		break;
1068 	      insn = NEXT_INSN (insn);
1069 	    }
1070 	}
1071       else
1072 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1073 			   false);
1074     }
1075 
1076   /* If this already is simplejump, redirect it.  */
1077   else if (simplejump_p (insn))
1078     {
1079       if (e->dest == target)
1080 	return NULL;
1081       if (dump_file)
1082 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1083 		 INSN_UID (insn), e->dest->index, target->index);
1084       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1085 			  block_label (target), 0))
1086 	{
1087 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1088 	  return NULL;
1089 	}
1090     }
1091 
1092   /* Cannot do anything for target exit block.  */
1093   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1094     return NULL;
1095 
1096   /* Or replace possibly complicated jump insn by simple jump insn.  */
1097   else
1098     {
1099       rtx_code_label *target_label = block_label (target);
1100       rtx_insn *barrier;
1101       rtx_insn *label;
1102       rtx_jump_table_data *table;
1103 
1104       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1105       JUMP_LABEL (BB_END (src)) = target_label;
1106       LABEL_NUSES (target_label)++;
1107       if (dump_file)
1108 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1109 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1110 
1111 
1112       delete_insn_chain (kill_from, insn, false);
1113 
1114       /* Recognize a tablejump that we are converting to a
1115 	 simple jump and remove its associated CODE_LABEL
1116 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1117       if (tablejump_p (insn, &label, &table))
1118 	delete_insn_chain (label, table, false);
1119 
1120       barrier = next_nonnote_nondebug_insn (BB_END (src));
1121       if (!barrier || !BARRIER_P (barrier))
1122 	emit_barrier_after (BB_END (src));
1123       else
1124 	{
1125 	  if (barrier != NEXT_INSN (BB_END (src)))
1126 	    {
1127 	      /* Move the jump before barrier so that the notes
1128 		 which originally were or were created before jump table are
1129 		 inside the basic block.  */
1130 	      rtx_insn *new_insn = BB_END (src);
1131 
1132 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1133 				        PREV_INSN (barrier), src);
1134 
1135 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1136 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1137 
1138 	      SET_NEXT_INSN (new_insn) = barrier;
1139 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1140 
1141 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1142 	      SET_PREV_INSN (barrier) = new_insn;
1143 	    }
1144 	}
1145     }
1146 
1147   /* Keep only one edge out and set proper flags.  */
1148   if (!single_succ_p (src))
1149     remove_edge (e);
1150   gcc_assert (single_succ_p (src));
1151 
1152   e = single_succ_edge (src);
1153   if (fallthru)
1154     e->flags = EDGE_FALLTHRU;
1155   else
1156     e->flags = 0;
1157 
1158   e->probability = profile_probability::always ();
1159 
1160   if (e->dest != target)
1161     redirect_edge_succ (e, target);
1162   return e;
1163 }
1164 
1165 /* Subroutine of redirect_branch_edge that tries to patch the jump
1166    instruction INSN so that it reaches block NEW.  Do this
1167    only when it originally reached block OLD.  Return true if this
1168    worked or the original target wasn't OLD, return false if redirection
1169    doesn't work.  */
1170 
1171 static bool
1172 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1173 {
1174   rtx_jump_table_data *table;
1175   rtx tmp;
1176   /* Recognize a tablejump and adjust all matching cases.  */
1177   if (tablejump_p (insn, NULL, &table))
1178     {
1179       rtvec vec;
1180       int j;
1181       rtx_code_label *new_label = block_label (new_bb);
1182 
1183       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1184 	return false;
1185       vec = table->get_labels ();
1186 
1187       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1188 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1189 	  {
1190 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1191 	    --LABEL_NUSES (old_label);
1192 	    ++LABEL_NUSES (new_label);
1193 	  }
1194 
1195       /* Handle casesi dispatch insns.  */
1196       if ((tmp = single_set (insn)) != NULL
1197 	  && SET_DEST (tmp) == pc_rtx
1198 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1199 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1200 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1201 	{
1202 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1203 						       new_label);
1204 	  --LABEL_NUSES (old_label);
1205 	  ++LABEL_NUSES (new_label);
1206 	}
1207     }
1208   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1209     {
1210       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1211       rtx note;
1212 
1213       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1214 	return false;
1215       rtx_code_label *new_label = block_label (new_bb);
1216 
1217       for (i = 0; i < n; ++i)
1218 	{
1219 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1220 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1221 	  if (XEXP (old_ref, 0) == old_label)
1222 	    {
1223 	      ASM_OPERANDS_LABEL (tmp, i)
1224 		= gen_rtx_LABEL_REF (Pmode, new_label);
1225 	      --LABEL_NUSES (old_label);
1226 	      ++LABEL_NUSES (new_label);
1227 	    }
1228 	}
1229 
1230       if (JUMP_LABEL (insn) == old_label)
1231 	{
1232 	  JUMP_LABEL (insn) = new_label;
1233 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1234 	  if (note)
1235 	    remove_note (insn, note);
1236 	}
1237       else
1238 	{
1239 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1240 	  if (note)
1241 	    remove_note (insn, note);
1242 	  if (JUMP_LABEL (insn) != new_label
1243 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1244 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1245 	}
1246       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1247 	     != NULL_RTX)
1248 	XEXP (note, 0) = new_label;
1249     }
1250   else
1251     {
1252       /* ?? We may play the games with moving the named labels from
1253 	 one basic block to the other in case only one computed_jump is
1254 	 available.  */
1255       if (computed_jump_p (insn)
1256 	  /* A return instruction can't be redirected.  */
1257 	  || returnjump_p (insn))
1258 	return false;
1259 
1260       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1261 	{
1262 	  /* If the insn doesn't go where we think, we're confused.  */
1263 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1264 
1265 	  /* If the substitution doesn't succeed, die.  This can happen
1266 	     if the back end emitted unrecognizable instructions or if
1267 	     target is exit block on some arches.  */
1268 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1269 			      block_label (new_bb), 0))
1270 	    {
1271 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
1272 	      return false;
1273 	    }
1274 	}
1275     }
1276   return true;
1277 }
1278 
1279 
1280 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1281    NULL on failure  */
1282 static edge
1283 redirect_branch_edge (edge e, basic_block target)
1284 {
1285   rtx_insn *old_label = BB_HEAD (e->dest);
1286   basic_block src = e->src;
1287   rtx_insn *insn = BB_END (src);
1288 
1289   /* We can only redirect non-fallthru edges of jump insn.  */
1290   if (e->flags & EDGE_FALLTHRU)
1291     return NULL;
1292   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1293     return NULL;
1294 
1295   if (!currently_expanding_to_rtl)
1296     {
1297       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1298 	return NULL;
1299     }
1300   else
1301     /* When expanding this BB might actually contain multiple
1302        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1303        Redirect all of those that match our label.  */
1304     FOR_BB_INSNS (src, insn)
1305       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1306 					     old_label, target))
1307 	return NULL;
1308 
1309   if (dump_file)
1310     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1311 	     e->src->index, e->dest->index, target->index);
1312 
1313   if (e->dest != target)
1314     e = redirect_edge_succ_nodup (e, target);
1315 
1316   return e;
1317 }
1318 
1319 /* Called when edge E has been redirected to a new destination,
1320    in order to update the region crossing flag on the edge and
1321    jump.  */
1322 
1323 static void
1324 fixup_partition_crossing (edge e)
1325 {
1326   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1327       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1328     return;
1329   /* If we redirected an existing edge, it may already be marked
1330      crossing, even though the new src is missing a reg crossing note.
1331      But make sure reg crossing note doesn't already exist before
1332      inserting.  */
1333   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1334     {
1335       e->flags |= EDGE_CROSSING;
1336       if (JUMP_P (BB_END (e->src)))
1337 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1338     }
1339   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1340     {
1341       e->flags &= ~EDGE_CROSSING;
1342       /* Remove the section crossing note from jump at end of
1343          src if it exists, and if no other successors are
1344          still crossing.  */
1345       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1346         {
1347           bool has_crossing_succ = false;
1348           edge e2;
1349           edge_iterator ei;
1350           FOR_EACH_EDGE (e2, ei, e->src->succs)
1351             {
1352               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1353               if (has_crossing_succ)
1354                 break;
1355             }
1356           if (!has_crossing_succ)
1357 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1358         }
1359     }
1360 }
1361 
1362 /* Called when block BB has been reassigned to the cold partition,
1363    because it is now dominated by another cold block,
1364    to ensure that the region crossing attributes are updated.  */
1365 
1366 static void
1367 fixup_new_cold_bb (basic_block bb)
1368 {
1369   edge e;
1370   edge_iterator ei;
1371 
1372   /* This is called when a hot bb is found to now be dominated
1373      by a cold bb and therefore needs to become cold. Therefore,
1374      its preds will no longer be region crossing. Any non-dominating
1375      preds that were previously hot would also have become cold
1376      in the caller for the same region. Any preds that were previously
1377      region-crossing will be adjusted in fixup_partition_crossing.  */
1378   FOR_EACH_EDGE (e, ei, bb->preds)
1379     {
1380       fixup_partition_crossing (e);
1381     }
1382 
1383   /* Possibly need to make bb's successor edges region crossing,
1384      or remove stale region crossing.  */
1385   FOR_EACH_EDGE (e, ei, bb->succs)
1386     {
1387       /* We can't have fall-through edges across partition boundaries.
1388          Note that force_nonfallthru will do any necessary partition
1389          boundary fixup by calling fixup_partition_crossing itself.  */
1390       if ((e->flags & EDGE_FALLTHRU)
1391           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1392 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1393         force_nonfallthru (e);
1394       else
1395         fixup_partition_crossing (e);
1396     }
1397 }
1398 
1399 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1400    expense of adding new instructions or reordering basic blocks.
1401 
1402    Function can be also called with edge destination equivalent to the TARGET.
1403    Then it should try the simplifications and do nothing if none is possible.
1404 
1405    Return edge representing the branch if transformation succeeded.  Return NULL
1406    on failure.
1407    We still return NULL in case E already destinated TARGET and we didn't
1408    managed to simplify instruction stream.  */
1409 
1410 static edge
1411 rtl_redirect_edge_and_branch (edge e, basic_block target)
1412 {
1413   edge ret;
1414   basic_block src = e->src;
1415   basic_block dest = e->dest;
1416 
1417   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1418     return NULL;
1419 
1420   if (dest == target)
1421     return e;
1422 
1423   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1424     {
1425       df_set_bb_dirty (src);
1426       fixup_partition_crossing (ret);
1427       return ret;
1428     }
1429 
1430   ret = redirect_branch_edge (e, target);
1431   if (!ret)
1432     return NULL;
1433 
1434   df_set_bb_dirty (src);
1435   fixup_partition_crossing (ret);
1436   return ret;
1437 }
1438 
1439 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1440 
1441 void
1442 emit_barrier_after_bb (basic_block bb)
1443 {
1444   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1445   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1446               || current_ir_type () == IR_RTL_CFGLAYOUT);
1447   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1448     {
1449       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1450 
1451       if (BB_FOOTER (bb))
1452 	{
1453           rtx_insn *footer_tail = BB_FOOTER (bb);
1454 
1455           while (NEXT_INSN (footer_tail))
1456             footer_tail = NEXT_INSN (footer_tail);
1457           if (!BARRIER_P (footer_tail))
1458             {
1459               SET_NEXT_INSN (footer_tail) = insn;
1460               SET_PREV_INSN (insn) = footer_tail;
1461             }
1462 	}
1463       else
1464         BB_FOOTER (bb) = insn;
1465     }
1466 }
1467 
1468 /* Like force_nonfallthru below, but additionally performs redirection
1469    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1470    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1471    simple_return_rtx, indicating which kind of returnjump to create.
1472    It should be NULL otherwise.  */
1473 
1474 basic_block
1475 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1476 {
1477   basic_block jump_block, new_bb = NULL, src = e->src;
1478   rtx note;
1479   edge new_edge;
1480   int abnormal_edge_flags = 0;
1481   bool asm_goto_edge = false;
1482   int loc;
1483 
1484   /* In the case the last instruction is conditional jump to the next
1485      instruction, first redirect the jump itself and then continue
1486      by creating a basic block afterwards to redirect fallthru edge.  */
1487   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1488       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1489       && any_condjump_p (BB_END (e->src))
1490       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1491     {
1492       rtx note;
1493       edge b = unchecked_make_edge (e->src, target, 0);
1494       bool redirected;
1495 
1496       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1497 				  block_label (target), 0);
1498       gcc_assert (redirected);
1499 
1500       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1501       if (note)
1502 	{
1503 	  int prob = XINT (note, 0);
1504 
1505 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
1506 	  e->probability -= e->probability;
1507 	}
1508     }
1509 
1510   if (e->flags & EDGE_ABNORMAL)
1511     {
1512       /* Irritating special case - fallthru edge to the same block as abnormal
1513 	 edge.
1514 	 We can't redirect abnormal edge, but we still can split the fallthru
1515 	 one and create separate abnormal edge to original destination.
1516 	 This allows bb-reorder to make such edge non-fallthru.  */
1517       gcc_assert (e->dest == target);
1518       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1519       e->flags &= EDGE_FALLTHRU;
1520     }
1521   else
1522     {
1523       gcc_assert (e->flags & EDGE_FALLTHRU);
1524       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1525 	{
1526 	  /* We can't redirect the entry block.  Create an empty block
1527 	     at the start of the function which we use to add the new
1528 	     jump.  */
1529 	  edge tmp;
1530 	  edge_iterator ei;
1531 	  bool found = false;
1532 
1533 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1534 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1535 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1536 
1537 	  /* Make sure new block ends up in correct hot/cold section.  */
1538 	  BB_COPY_PARTITION (bb, e->dest);
1539 
1540 	  /* Change the existing edge's source to be the new block, and add
1541 	     a new edge from the entry block to the new block.  */
1542 	  e->src = bb;
1543 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1544 	       (tmp = ei_safe_edge (ei)); )
1545 	    {
1546 	      if (tmp == e)
1547 		{
1548 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1549 		  found = true;
1550 		  break;
1551 		}
1552 	      else
1553 		ei_next (&ei);
1554 	    }
1555 
1556 	  gcc_assert (found);
1557 
1558 	  vec_safe_push (bb->succs, e);
1559 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1560 				 EDGE_FALLTHRU);
1561 	}
1562     }
1563 
1564   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1565      don't point to the target or fallthru label.  */
1566   if (JUMP_P (BB_END (e->src))
1567       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1568       && (e->flags & EDGE_FALLTHRU)
1569       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1570     {
1571       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1572       bool adjust_jump_target = false;
1573 
1574       for (i = 0; i < n; ++i)
1575 	{
1576 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1577 	    {
1578 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1579 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1580 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1581 	      adjust_jump_target = true;
1582 	    }
1583 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1584 	    asm_goto_edge = true;
1585 	}
1586       if (adjust_jump_target)
1587 	{
1588 	  rtx_insn *insn = BB_END (e->src);
1589 	  rtx note;
1590 	  rtx_insn *old_label = BB_HEAD (e->dest);
1591 	  rtx_insn *new_label = BB_HEAD (target);
1592 
1593 	  if (JUMP_LABEL (insn) == old_label)
1594 	    {
1595 	      JUMP_LABEL (insn) = new_label;
1596 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1597 	      if (note)
1598 		remove_note (insn, note);
1599 	    }
1600 	  else
1601 	    {
1602 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1603 	      if (note)
1604 		remove_note (insn, note);
1605 	      if (JUMP_LABEL (insn) != new_label
1606 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1607 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1608 	    }
1609 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1610 		 != NULL_RTX)
1611 	    XEXP (note, 0) = new_label;
1612 	}
1613     }
1614 
1615   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1616     {
1617       rtx_insn *new_head;
1618       profile_count count = e->count ();
1619       profile_probability probability = e->probability;
1620       /* Create the new structures.  */
1621 
1622       /* If the old block ended with a tablejump, skip its table
1623 	 by searching forward from there.  Otherwise start searching
1624 	 forward from the last instruction of the old block.  */
1625       rtx_jump_table_data *table;
1626       if (tablejump_p (BB_END (e->src), NULL, &table))
1627 	new_head = table;
1628       else
1629 	new_head = BB_END (e->src);
1630       new_head = NEXT_INSN (new_head);
1631 
1632       jump_block = create_basic_block (new_head, NULL, e->src);
1633       jump_block->count = count;
1634 
1635       /* Make sure new block ends up in correct hot/cold section.  */
1636 
1637       BB_COPY_PARTITION (jump_block, e->src);
1638 
1639       /* Wire edge in.  */
1640       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1641       new_edge->probability = probability;
1642 
1643       /* Redirect old edge.  */
1644       redirect_edge_pred (e, jump_block);
1645       e->probability = profile_probability::always ();
1646 
1647       /* If e->src was previously region crossing, it no longer is
1648          and the reg crossing note should be removed.  */
1649       fixup_partition_crossing (new_edge);
1650 
1651       /* If asm goto has any label refs to target's label,
1652 	 add also edge from asm goto bb to target.  */
1653       if (asm_goto_edge)
1654 	{
1655 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1656 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1657 	  edge new_edge2 = make_edge (new_edge->src, target,
1658 				      e->flags & ~EDGE_FALLTHRU);
1659 	  new_edge2->probability = probability - new_edge->probability;
1660 	}
1661 
1662       new_bb = jump_block;
1663     }
1664   else
1665     jump_block = e->src;
1666 
1667   loc = e->goto_locus;
1668   e->flags &= ~EDGE_FALLTHRU;
1669   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1670     {
1671       if (jump_label == ret_rtx)
1672 	emit_jump_insn_after_setloc (targetm.gen_return (),
1673 				     BB_END (jump_block), loc);
1674       else
1675 	{
1676 	  gcc_assert (jump_label == simple_return_rtx);
1677 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1678 				       BB_END (jump_block), loc);
1679 	}
1680       set_return_jump_label (BB_END (jump_block));
1681     }
1682   else
1683     {
1684       rtx_code_label *label = block_label (target);
1685       emit_jump_insn_after_setloc (targetm.gen_jump (label),
1686 				   BB_END (jump_block), loc);
1687       JUMP_LABEL (BB_END (jump_block)) = label;
1688       LABEL_NUSES (label)++;
1689     }
1690 
1691   /* We might be in cfg layout mode, and if so, the following routine will
1692      insert the barrier correctly.  */
1693   emit_barrier_after_bb (jump_block);
1694   redirect_edge_succ_nodup (e, target);
1695 
1696   if (abnormal_edge_flags)
1697     make_edge (src, target, abnormal_edge_flags);
1698 
1699   df_mark_solutions_dirty ();
1700   fixup_partition_crossing (e);
1701   return new_bb;
1702 }
1703 
1704 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1705    (and possibly create new basic block) to make edge non-fallthru.
1706    Return newly created BB or NULL if none.  */
1707 
1708 static basic_block
1709 rtl_force_nonfallthru (edge e)
1710 {
1711   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1712 }
1713 
1714 /* Redirect edge even at the expense of creating new jump insn or
1715    basic block.  Return new basic block if created, NULL otherwise.
1716    Conversion must be possible.  */
1717 
1718 static basic_block
1719 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1720 {
1721   if (redirect_edge_and_branch (e, target)
1722       || e->dest == target)
1723     return NULL;
1724 
1725   /* In case the edge redirection failed, try to force it to be non-fallthru
1726      and redirect newly created simplejump.  */
1727   df_set_bb_dirty (e->src);
1728   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1729 }
1730 
1731 /* The given edge should potentially be a fallthru edge.  If that is in
1732    fact true, delete the jump and barriers that are in the way.  */
1733 
1734 static void
1735 rtl_tidy_fallthru_edge (edge e)
1736 {
1737   rtx_insn *q;
1738   basic_block b = e->src, c = b->next_bb;
1739 
1740   /* ??? In a late-running flow pass, other folks may have deleted basic
1741      blocks by nopping out blocks, leaving multiple BARRIERs between here
1742      and the target label. They ought to be chastised and fixed.
1743 
1744      We can also wind up with a sequence of undeletable labels between
1745      one block and the next.
1746 
1747      So search through a sequence of barriers, labels, and notes for
1748      the head of block C and assert that we really do fall through.  */
1749 
1750   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1751     if (NONDEBUG_INSN_P (q))
1752       return;
1753 
1754   /* Remove what will soon cease being the jump insn from the source block.
1755      If block B consisted only of this single jump, turn it into a deleted
1756      note.  */
1757   q = BB_END (b);
1758   if (JUMP_P (q)
1759       && onlyjump_p (q)
1760       && (any_uncondjump_p (q)
1761 	  || single_succ_p (b)))
1762     {
1763       rtx_insn *label;
1764       rtx_jump_table_data *table;
1765 
1766       if (tablejump_p (q, &label, &table))
1767 	{
1768 	  /* The label is likely mentioned in some instruction before
1769 	     the tablejump and might not be DCEd, so turn it into
1770 	     a note instead and move before the tablejump that is going to
1771 	     be deleted.  */
1772 	  const char *name = LABEL_NAME (label);
1773 	  PUT_CODE (label, NOTE);
1774 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1775 	  NOTE_DELETED_LABEL_NAME (label) = name;
1776 	  reorder_insns (label, label, PREV_INSN (q));
1777 	  delete_insn (table);
1778 	}
1779 
1780       /* If this was a conditional jump, we need to also delete
1781 	 the insn that set cc0.  */
1782       if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1783 	q = PREV_INSN (q);
1784 
1785       q = PREV_INSN (q);
1786     }
1787   /* Unconditional jumps with side-effects (i.e. which we can't just delete
1788      together with the barrier) should never have a fallthru edge.  */
1789   else if (JUMP_P (q) && any_uncondjump_p (q))
1790     return;
1791 
1792   /* Selectively unlink the sequence.  */
1793   if (q != PREV_INSN (BB_HEAD (c)))
1794     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1795 
1796   e->flags |= EDGE_FALLTHRU;
1797 }
1798 
1799 /* Should move basic block BB after basic block AFTER.  NIY.  */
1800 
1801 static bool
1802 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1803 		      basic_block after ATTRIBUTE_UNUSED)
1804 {
1805   return false;
1806 }
1807 
1808 /* Locate the last bb in the same partition as START_BB.  */
1809 
1810 static basic_block
1811 last_bb_in_partition (basic_block start_bb)
1812 {
1813   basic_block bb;
1814   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1815     {
1816       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1817         return bb;
1818     }
1819   /* Return bb before the exit block.  */
1820   return bb->prev_bb;
1821 }
1822 
1823 /* Split a (typically critical) edge.  Return the new block.
1824    The edge must not be abnormal.
1825 
1826    ??? The code generally expects to be called on critical edges.
1827    The case of a block ending in an unconditional jump to a
1828    block with multiple predecessors is not handled optimally.  */
1829 
1830 static basic_block
1831 rtl_split_edge (edge edge_in)
1832 {
1833   basic_block bb, new_bb;
1834   rtx_insn *before;
1835 
1836   /* Abnormal edges cannot be split.  */
1837   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1838 
1839   /* We are going to place the new block in front of edge destination.
1840      Avoid existence of fallthru predecessors.  */
1841   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1842     {
1843       edge e = find_fallthru_edge (edge_in->dest->preds);
1844 
1845       if (e)
1846 	force_nonfallthru (e);
1847     }
1848 
1849   /* Create the basic block note.  */
1850   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1851     before = BB_HEAD (edge_in->dest);
1852   else
1853     before = NULL;
1854 
1855   /* If this is a fall through edge to the exit block, the blocks might be
1856      not adjacent, and the right place is after the source.  */
1857   if ((edge_in->flags & EDGE_FALLTHRU)
1858       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1859     {
1860       before = NEXT_INSN (BB_END (edge_in->src));
1861       bb = create_basic_block (before, NULL, edge_in->src);
1862       BB_COPY_PARTITION (bb, edge_in->src);
1863     }
1864   else
1865     {
1866       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1867         {
1868           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1869           BB_COPY_PARTITION (bb, edge_in->dest);
1870         }
1871       else
1872         {
1873           basic_block after = edge_in->dest->prev_bb;
1874           /* If this is post-bb reordering, and the edge crosses a partition
1875              boundary, the new block needs to be inserted in the bb chain
1876              at the end of the src partition (since we put the new bb into
1877              that partition, see below). Otherwise we may end up creating
1878              an extra partition crossing in the chain, which is illegal.
1879              It can't go after the src, because src may have a fall-through
1880              to a different block.  */
1881           if (crtl->bb_reorder_complete
1882               && (edge_in->flags & EDGE_CROSSING))
1883             {
1884               after = last_bb_in_partition (edge_in->src);
1885               before = get_last_bb_insn (after);
1886               /* The instruction following the last bb in partition should
1887                  be a barrier, since it cannot end in a fall-through.  */
1888               gcc_checking_assert (BARRIER_P (before));
1889               before = NEXT_INSN (before);
1890             }
1891           bb = create_basic_block (before, NULL, after);
1892           /* Put the split bb into the src partition, to avoid creating
1893              a situation where a cold bb dominates a hot bb, in the case
1894              where src is cold and dest is hot. The src will dominate
1895              the new bb (whereas it might not have dominated dest).  */
1896           BB_COPY_PARTITION (bb, edge_in->src);
1897         }
1898     }
1899 
1900   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1901 
1902   /* Can't allow a region crossing edge to be fallthrough.  */
1903   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1904       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1905     {
1906       new_bb = force_nonfallthru (single_succ_edge (bb));
1907       gcc_assert (!new_bb);
1908     }
1909 
1910   /* For non-fallthru edges, we must adjust the predecessor's
1911      jump instruction to target our new block.  */
1912   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1913     {
1914       edge redirected = redirect_edge_and_branch (edge_in, bb);
1915       gcc_assert (redirected);
1916     }
1917   else
1918     {
1919       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1920 	{
1921 	  /* For asm goto even splitting of fallthru edge might
1922 	     need insn patching, as other labels might point to the
1923 	     old label.  */
1924 	  rtx_insn *last = BB_END (edge_in->src);
1925 	  if (last
1926 	      && JUMP_P (last)
1927 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1928 	      && (extract_asm_operands (PATTERN (last))
1929 		  || JUMP_LABEL (last) == before)
1930 	      && patch_jump_insn (last, before, bb))
1931 	    df_set_bb_dirty (edge_in->src);
1932 	}
1933       redirect_edge_succ (edge_in, bb);
1934     }
1935 
1936   return bb;
1937 }
1938 
1939 /* Queue instructions for insertion on an edge between two basic blocks.
1940    The new instructions and basic blocks (if any) will not appear in the
1941    CFG until commit_edge_insertions is called.  */
1942 
1943 void
1944 insert_insn_on_edge (rtx pattern, edge e)
1945 {
1946   /* We cannot insert instructions on an abnormal critical edge.
1947      It will be easier to find the culprit if we die now.  */
1948   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1949 
1950   if (e->insns.r == NULL_RTX)
1951     start_sequence ();
1952   else
1953     push_to_sequence (e->insns.r);
1954 
1955   emit_insn (pattern);
1956 
1957   e->insns.r = get_insns ();
1958   end_sequence ();
1959 }
1960 
1961 /* Update the CFG for the instructions queued on edge E.  */
1962 
1963 void
1964 commit_one_edge_insertion (edge e)
1965 {
1966   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1967   basic_block bb;
1968 
1969   /* Pull the insns off the edge now since the edge might go away.  */
1970   insns = e->insns.r;
1971   e->insns.r = NULL;
1972 
1973   /* Figure out where to put these insns.  If the destination has
1974      one predecessor, insert there.  Except for the exit block.  */
1975   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1976     {
1977       bb = e->dest;
1978 
1979       /* Get the location correct wrt a code label, and "nice" wrt
1980 	 a basic block note, and before everything else.  */
1981       tmp = BB_HEAD (bb);
1982       if (LABEL_P (tmp))
1983 	tmp = NEXT_INSN (tmp);
1984       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1985 	tmp = NEXT_INSN (tmp);
1986       if (tmp == BB_HEAD (bb))
1987 	before = tmp;
1988       else if (tmp)
1989 	after = PREV_INSN (tmp);
1990       else
1991 	after = get_last_insn ();
1992     }
1993 
1994   /* If the source has one successor and the edge is not abnormal,
1995      insert there.  Except for the entry block.
1996      Don't do this if the predecessor ends in a jump other than
1997      unconditional simple jump.  E.g. for asm goto that points all
1998      its labels at the fallthru basic block, we can't insert instructions
1999      before the asm goto, as the asm goto can have various of side effects,
2000      and can't emit instructions after the asm goto, as it must end
2001      the basic block.  */
2002   else if ((e->flags & EDGE_ABNORMAL) == 0
2003 	   && single_succ_p (e->src)
2004 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2005 	   && (!JUMP_P (BB_END (e->src))
2006 	       || simplejump_p (BB_END (e->src))))
2007     {
2008       bb = e->src;
2009 
2010       /* It is possible to have a non-simple jump here.  Consider a target
2011 	 where some forms of unconditional jumps clobber a register.  This
2012 	 happens on the fr30 for example.
2013 
2014 	 We know this block has a single successor, so we can just emit
2015 	 the queued insns before the jump.  */
2016       if (JUMP_P (BB_END (bb)))
2017 	before = BB_END (bb);
2018       else
2019 	{
2020 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2021 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2022 
2023 	  after = BB_END (bb);
2024 	}
2025     }
2026 
2027   /* Otherwise we must split the edge.  */
2028   else
2029     {
2030       bb = split_edge (e);
2031 
2032       /* If E crossed a partition boundary, we needed to make bb end in
2033          a region-crossing jump, even though it was originally fallthru.  */
2034       if (JUMP_P (BB_END (bb)))
2035 	before = BB_END (bb);
2036       else
2037         after = BB_END (bb);
2038     }
2039 
2040   /* Now that we've found the spot, do the insertion.  */
2041   if (before)
2042     {
2043       emit_insn_before_noloc (insns, before, bb);
2044       last = prev_nonnote_insn (before);
2045     }
2046   else
2047     last = emit_insn_after_noloc (insns, after, bb);
2048 
2049   if (returnjump_p (last))
2050     {
2051       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2052 	 This is not currently a problem because this only happens
2053 	 for the (single) epilogue, which already has a fallthru edge
2054 	 to EXIT.  */
2055 
2056       e = single_succ_edge (bb);
2057       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2058 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2059 
2060       e->flags &= ~EDGE_FALLTHRU;
2061       emit_barrier_after (last);
2062 
2063       if (before)
2064 	delete_insn (before);
2065     }
2066   else
2067     gcc_assert (!JUMP_P (last));
2068 }
2069 
2070 /* Update the CFG for all queued instructions.  */
2071 
2072 void
2073 commit_edge_insertions (void)
2074 {
2075   basic_block bb;
2076 
2077   /* Optimization passes that invoke this routine can cause hot blocks
2078      previously reached by both hot and cold blocks to become dominated only
2079      by cold blocks. This will cause the verification below to fail,
2080      and lead to now cold code in the hot section. In some cases this
2081      may only be visible after newly unreachable blocks are deleted,
2082      which will be done by fixup_partitions.  */
2083   fixup_partitions ();
2084 
2085   checking_verify_flow_info ();
2086 
2087   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2088 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2089     {
2090       edge e;
2091       edge_iterator ei;
2092 
2093       FOR_EACH_EDGE (e, ei, bb->succs)
2094 	if (e->insns.r)
2095 	  commit_one_edge_insertion (e);
2096     }
2097 }
2098 
2099 
2100 /* Print out RTL-specific basic block information (live information
2101    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2102    documented in dumpfile.h.  */
2103 
2104 static void
2105 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2106 {
2107   char *s_indent;
2108 
2109   s_indent = (char *) alloca ((size_t) indent + 1);
2110   memset (s_indent, ' ', (size_t) indent);
2111   s_indent[indent] = '\0';
2112 
2113   if (df && (flags & TDF_DETAILS))
2114     {
2115       df_dump_top (bb, outf);
2116       putc ('\n', outf);
2117     }
2118 
2119   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2120     {
2121       rtx_insn *last = BB_END (bb);
2122       if (last)
2123 	last = NEXT_INSN (last);
2124       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2125 	{
2126 	  if (flags & TDF_DETAILS)
2127 	    df_dump_insn_top (insn, outf);
2128 	  if (! (flags & TDF_SLIM))
2129 	    print_rtl_single (outf, insn);
2130 	  else
2131 	    dump_insn_slim (outf, insn);
2132 	  if (flags & TDF_DETAILS)
2133 	    df_dump_insn_bottom (insn, outf);
2134 	}
2135     }
2136 
2137   if (df && (flags & TDF_DETAILS))
2138     {
2139       df_dump_bottom (bb, outf);
2140       putc ('\n', outf);
2141     }
2142 
2143 }
2144 
2145 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2146    for the start of each basic block.  FLAGS are the TDF_* masks documented
2147    in dumpfile.h.  */
2148 
2149 void
2150 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2151 {
2152   const rtx_insn *tmp_rtx;
2153   if (rtx_first == 0)
2154     fprintf (outf, "(nil)\n");
2155   else
2156     {
2157       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2158       int max_uid = get_max_uid ();
2159       basic_block *start = XCNEWVEC (basic_block, max_uid);
2160       basic_block *end = XCNEWVEC (basic_block, max_uid);
2161       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2162       basic_block bb;
2163 
2164       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2165 	 insns, but the CFG is not maintained so the basic block info
2166 	 is not reliable.  Therefore it's omitted from the dumps.  */
2167       if (! (cfun->curr_properties & PROP_cfg))
2168         flags &= ~TDF_BLOCKS;
2169 
2170       if (df)
2171 	df_dump_start (outf);
2172 
2173       if (flags & TDF_BLOCKS)
2174 	{
2175 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2176 	    {
2177 	      rtx_insn *x;
2178 
2179 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2180 	      end[INSN_UID (BB_END (bb))] = bb;
2181 	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2182 		{
2183 		  enum bb_state state = IN_MULTIPLE_BB;
2184 
2185 		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2186 		    state = IN_ONE_BB;
2187 		  in_bb_p[INSN_UID (x)] = state;
2188 
2189 		  if (x == BB_END (bb))
2190 		    break;
2191 		}
2192 	    }
2193 	}
2194 
2195       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2196 	{
2197 	  if (flags & TDF_BLOCKS)
2198 	    {
2199 	      bb = start[INSN_UID (tmp_rtx)];
2200 	      if (bb != NULL)
2201 		{
2202 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
2203 		  if (df && (flags & TDF_DETAILS))
2204 		    df_dump_top (bb, outf);
2205 		}
2206 
2207 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2208 		  && !NOTE_P (tmp_rtx)
2209 		  && !BARRIER_P (tmp_rtx))
2210 		fprintf (outf, ";; Insn is not within a basic block\n");
2211 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2212 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2213 	    }
2214 
2215 	  if (flags & TDF_DETAILS)
2216 	    df_dump_insn_top (tmp_rtx, outf);
2217 	  if (! (flags & TDF_SLIM))
2218 	    print_rtl_single (outf, tmp_rtx);
2219 	  else
2220 	    dump_insn_slim (outf, tmp_rtx);
2221 	  if (flags & TDF_DETAILS)
2222 	    df_dump_insn_bottom (tmp_rtx, outf);
2223 
2224 	  if (flags & TDF_BLOCKS)
2225 	    {
2226 	      bb = end[INSN_UID (tmp_rtx)];
2227 	      if (bb != NULL)
2228 		{
2229 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
2230 		  if (df && (flags & TDF_DETAILS))
2231 		    df_dump_bottom (bb, outf);
2232 		  putc ('\n', outf);
2233 		}
2234 	    }
2235 	}
2236 
2237       free (start);
2238       free (end);
2239       free (in_bb_p);
2240     }
2241 }
2242 
2243 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2244 
2245 void
2246 update_br_prob_note (basic_block bb)
2247 {
2248   rtx note;
2249   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2250   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2251     {
2252       if (note)
2253 	{
2254 	  rtx *note_link, this_rtx;
2255 
2256 	  note_link = &REG_NOTES (BB_END (bb));
2257 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2258 	    if (this_rtx == note)
2259 	      {
2260 		*note_link = XEXP (this_rtx, 1);
2261 		break;
2262 	      }
2263 	}
2264       return;
2265     }
2266   if (!note
2267       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2268     return;
2269   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2270 }
2271 
2272 /* Get the last insn associated with block BB (that includes barriers and
2273    tablejumps after BB).  */
2274 rtx_insn *
2275 get_last_bb_insn (basic_block bb)
2276 {
2277   rtx_jump_table_data *table;
2278   rtx_insn *tmp;
2279   rtx_insn *end = BB_END (bb);
2280 
2281   /* Include any jump table following the basic block.  */
2282   if (tablejump_p (end, NULL, &table))
2283     end = table;
2284 
2285   /* Include any barriers that may follow the basic block.  */
2286   tmp = next_nonnote_nondebug_insn_bb (end);
2287   while (tmp && BARRIER_P (tmp))
2288     {
2289       end = tmp;
2290       tmp = next_nonnote_nondebug_insn_bb (end);
2291     }
2292 
2293   return end;
2294 }
2295 
2296 /* Add all BBs reachable from entry via hot paths into the SET.  */
2297 
2298 void
2299 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2300 {
2301   auto_vec<basic_block, 64> worklist;
2302 
2303   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2304   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2305 
2306   while (worklist.length () > 0)
2307     {
2308       basic_block bb = worklist.pop ();
2309       edge_iterator ei;
2310       edge e;
2311 
2312       FOR_EACH_EDGE (e, ei, bb->succs)
2313 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2314 	    && !set->add (e->dest))
2315 	  worklist.safe_push (e->dest);
2316     }
2317 }
2318 
2319 /* Sanity check partition hotness to ensure that basic blocks in
2320    the cold partition don't dominate basic blocks in the hot partition.
2321    If FLAG_ONLY is true, report violations as errors. Otherwise
2322    re-mark the dominated blocks as cold, since this is run after
2323    cfg optimizations that may make hot blocks previously reached
2324    by both hot and cold blocks now only reachable along cold paths.  */
2325 
2326 static vec<basic_block>
2327 find_partition_fixes (bool flag_only)
2328 {
2329   basic_block bb;
2330   vec<basic_block> bbs_in_cold_partition = vNULL;
2331   vec<basic_block> bbs_to_fix = vNULL;
2332   hash_set<basic_block> set;
2333 
2334   /* Callers check this.  */
2335   gcc_checking_assert (crtl->has_bb_partition);
2336 
2337   find_bbs_reachable_by_hot_paths (&set);
2338 
2339   FOR_EACH_BB_FN (bb, cfun)
2340     if (!set.contains (bb)
2341 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
2342       {
2343 	if (flag_only)
2344 	  error ("non-cold basic block %d reachable only "
2345 		 "by paths crossing the cold partition", bb->index);
2346 	else
2347 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2348 	bbs_to_fix.safe_push (bb);
2349 	bbs_in_cold_partition.safe_push (bb);
2350       }
2351 
2352   return bbs_to_fix;
2353 }
2354 
2355 /* Perform cleanup on the hot/cold bb partitioning after optimization
2356    passes that modify the cfg.  */
2357 
2358 void
2359 fixup_partitions (void)
2360 {
2361   basic_block bb;
2362 
2363   if (!crtl->has_bb_partition)
2364     return;
2365 
2366   /* Delete any blocks that became unreachable and weren't
2367      already cleaned up, for example during edge forwarding
2368      and convert_jumps_to_returns. This will expose more
2369      opportunities for fixing the partition boundaries here.
2370      Also, the calculation of the dominance graph during verification
2371      will assert if there are unreachable nodes.  */
2372   delete_unreachable_blocks ();
2373 
2374   /* If there are partitions, do a sanity check on them: A basic block in
2375      a cold partition cannot dominate a basic block in a hot partition.
2376      Fixup any that now violate this requirement, as a result of edge
2377      forwarding and unreachable block deletion.  */
2378   vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2379 
2380   /* Do the partition fixup after all necessary blocks have been converted to
2381      cold, so that we only update the region crossings the minimum number of
2382      places, which can require forcing edges to be non fallthru.  */
2383   while (! bbs_to_fix.is_empty ())
2384     {
2385       bb = bbs_to_fix.pop ();
2386       fixup_new_cold_bb (bb);
2387     }
2388 }
2389 
2390 /* Verify, in the basic block chain, that there is at most one switch
2391    between hot/cold partitions. This condition will not be true until
2392    after reorder_basic_blocks is called.  */
2393 
2394 static int
2395 verify_hot_cold_block_grouping (void)
2396 {
2397   basic_block bb;
2398   int err = 0;
2399   bool switched_sections = false;
2400   int current_partition = BB_UNPARTITIONED;
2401 
2402   /* Even after bb reordering is complete, we go into cfglayout mode
2403      again (in compgoto). Ensure we don't call this before going back
2404      into linearized RTL when any layout fixes would have been committed.  */
2405   if (!crtl->bb_reorder_complete
2406       || current_ir_type () != IR_RTL_CFGRTL)
2407     return err;
2408 
2409   FOR_EACH_BB_FN (bb, cfun)
2410     {
2411       if (current_partition != BB_UNPARTITIONED
2412           && BB_PARTITION (bb) != current_partition)
2413 	{
2414 	  if (switched_sections)
2415 	    {
2416 	      error ("multiple hot/cold transitions found (bb %i)",
2417 		     bb->index);
2418 	      err = 1;
2419 	    }
2420 	  else
2421             switched_sections = true;
2422 
2423           if (!crtl->has_bb_partition)
2424             error ("partition found but function partition flag not set");
2425 	}
2426       current_partition = BB_PARTITION (bb);
2427     }
2428 
2429   return err;
2430 }
2431 
2432 
2433 /* Perform several checks on the edges out of each block, such as
2434    the consistency of the branch probabilities, the correctness
2435    of hot/cold partition crossing edges, and the number of expected
2436    successor edges.  Also verify that the dominance relationship
2437    between hot/cold blocks is sane.  */
2438 
2439 static int
2440 rtl_verify_edges (void)
2441 {
2442   int err = 0;
2443   basic_block bb;
2444 
2445   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2446     {
2447       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2448       int n_eh = 0, n_abnormal = 0;
2449       edge e, fallthru = NULL;
2450       edge_iterator ei;
2451       rtx note;
2452       bool has_crossing_edge = false;
2453 
2454       if (JUMP_P (BB_END (bb))
2455 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2456 	  && EDGE_COUNT (bb->succs) >= 2
2457 	  && any_condjump_p (BB_END (bb)))
2458 	{
2459 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2460 	    {
2461 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2462 		{
2463 		  error ("verify_flow_info: "
2464 			 "REG_BR_PROB is set but cfg probability is not");
2465 		  err = 1;
2466 		}
2467 	    }
2468 	  else if (XINT (note, 0)
2469 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2470 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2471 	    {
2472 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2473 		     XINT (note, 0),
2474 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2475 	      err = 1;
2476 	    }
2477 	}
2478 
2479       FOR_EACH_EDGE (e, ei, bb->succs)
2480 	{
2481 	  bool is_crossing;
2482 
2483 	  if (e->flags & EDGE_FALLTHRU)
2484 	    n_fallthru++, fallthru = e;
2485 
2486 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2487 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2488 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2489           has_crossing_edge |= is_crossing;
2490 	  if (e->flags & EDGE_CROSSING)
2491 	    {
2492 	      if (!is_crossing)
2493 		{
2494 		  error ("EDGE_CROSSING incorrectly set across same section");
2495 		  err = 1;
2496 		}
2497 	      if (e->flags & EDGE_FALLTHRU)
2498 		{
2499 		  error ("fallthru edge crosses section boundary in bb %i",
2500 			 e->src->index);
2501 		  err = 1;
2502 		}
2503 	      if (e->flags & EDGE_EH)
2504 		{
2505 		  error ("EH edge crosses section boundary in bb %i",
2506 			 e->src->index);
2507 		  err = 1;
2508 		}
2509               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2510 		{
2511 		  error ("No region crossing jump at section boundary in bb %i",
2512 			 bb->index);
2513 		  err = 1;
2514 		}
2515 	    }
2516 	  else if (is_crossing)
2517 	    {
2518 	      error ("EDGE_CROSSING missing across section boundary");
2519 	      err = 1;
2520 	    }
2521 
2522 	  if ((e->flags & ~(EDGE_DFS_BACK
2523 			    | EDGE_CAN_FALLTHRU
2524 			    | EDGE_IRREDUCIBLE_LOOP
2525 			    | EDGE_LOOP_EXIT
2526 			    | EDGE_CROSSING
2527 			    | EDGE_PRESERVE)) == 0)
2528 	    n_branch++;
2529 
2530 	  if (e->flags & EDGE_ABNORMAL_CALL)
2531 	    n_abnormal_call++;
2532 
2533 	  if (e->flags & EDGE_SIBCALL)
2534 	    n_sibcall++;
2535 
2536 	  if (e->flags & EDGE_EH)
2537 	    n_eh++;
2538 
2539 	  if (e->flags & EDGE_ABNORMAL)
2540 	    n_abnormal++;
2541 	}
2542 
2543         if (!has_crossing_edge
2544 	    && JUMP_P (BB_END (bb))
2545 	    && CROSSING_JUMP_P (BB_END (bb)))
2546           {
2547 	    print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2548             error ("Region crossing jump across same section in bb %i",
2549                    bb->index);
2550             err = 1;
2551           }
2552 
2553       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2554 	{
2555 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2556 	  err = 1;
2557 	}
2558       if (n_eh > 1)
2559 	{
2560 	  error ("too many exception handling edges in bb %i", bb->index);
2561 	  err = 1;
2562 	}
2563       if (n_branch
2564 	  && (!JUMP_P (BB_END (bb))
2565 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2566 				   || any_condjump_p (BB_END (bb))))))
2567 	{
2568 	  error ("too many outgoing branch edges from bb %i", bb->index);
2569 	  err = 1;
2570 	}
2571       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2572 	{
2573 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2574 	  err = 1;
2575 	}
2576       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2577 	{
2578 	  error ("wrong number of branch edges after unconditional jump"
2579 		 " in bb %i", bb->index);
2580 	  err = 1;
2581 	}
2582       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2583 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2584 	{
2585 	  error ("wrong amount of branch edges after conditional jump"
2586 		 " in bb %i", bb->index);
2587 	  err = 1;
2588 	}
2589       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2590 	{
2591 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2592 	  err = 1;
2593 	}
2594       if (n_sibcall && !CALL_P (BB_END (bb)))
2595 	{
2596 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2597 	  err = 1;
2598 	}
2599       if (n_abnormal > n_eh
2600 	  && !(CALL_P (BB_END (bb))
2601 	       && n_abnormal == n_abnormal_call + n_sibcall)
2602 	  && (!JUMP_P (BB_END (bb))
2603 	      || any_condjump_p (BB_END (bb))
2604 	      || any_uncondjump_p (BB_END (bb))))
2605 	{
2606 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2607 	  err = 1;
2608 	}
2609     }
2610 
2611   /* If there are partitions, do a sanity check on them: A basic block in
2612      a cold partition cannot dominate a basic block in a hot partition.  */
2613   if (crtl->has_bb_partition && !err
2614       && current_ir_type () == IR_RTL_CFGLAYOUT)
2615     {
2616       vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2617       err = !bbs_to_fix.is_empty ();
2618     }
2619 
2620   /* Clean up.  */
2621   return err;
2622 }
2623 
2624 /* Checks on the instructions within blocks. Currently checks that each
2625    block starts with a basic block note, and that basic block notes and
2626    control flow jumps are not found in the middle of the block.  */
2627 
2628 static int
2629 rtl_verify_bb_insns (void)
2630 {
2631   rtx_insn *x;
2632   int err = 0;
2633   basic_block bb;
2634 
2635   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2636     {
2637       /* Now check the header of basic
2638 	 block.  It ought to contain optional CODE_LABEL followed
2639 	 by NOTE_BASIC_BLOCK.  */
2640       x = BB_HEAD (bb);
2641       if (LABEL_P (x))
2642 	{
2643 	  if (BB_END (bb) == x)
2644 	    {
2645 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2646 		     bb->index);
2647 	      err = 1;
2648 	    }
2649 
2650 	  x = NEXT_INSN (x);
2651 	}
2652 
2653       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2654 	{
2655 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2656 		 bb->index);
2657 	  err = 1;
2658 	}
2659 
2660       if (BB_END (bb) == x)
2661 	/* Do checks for empty blocks here.  */
2662 	;
2663       else
2664 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2665 	  {
2666 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2667 	      {
2668 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2669 		       INSN_UID (x), bb->index);
2670 		err = 1;
2671 	      }
2672 
2673 	    if (x == BB_END (bb))
2674 	      break;
2675 
2676 	    if (control_flow_insn_p (x))
2677 	      {
2678 		error ("in basic block %d:", bb->index);
2679 		fatal_insn ("flow control insn inside a basic block", x);
2680 	      }
2681 	  }
2682     }
2683 
2684   /* Clean up.  */
2685   return err;
2686 }
2687 
2688 /* Verify that block pointers for instructions in basic blocks, headers and
2689    footers are set appropriately.  */
2690 
2691 static int
2692 rtl_verify_bb_pointers (void)
2693 {
2694   int err = 0;
2695   basic_block bb;
2696 
2697   /* Check the general integrity of the basic blocks.  */
2698   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2699     {
2700       rtx_insn *insn;
2701 
2702       if (!(bb->flags & BB_RTL))
2703 	{
2704 	  error ("BB_RTL flag not set for block %d", bb->index);
2705 	  err = 1;
2706 	}
2707 
2708       FOR_BB_INSNS (bb, insn)
2709 	if (BLOCK_FOR_INSN (insn) != bb)
2710 	  {
2711 	    error ("insn %d basic block pointer is %d, should be %d",
2712 		   INSN_UID (insn),
2713 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2714 		   bb->index);
2715 	    err = 1;
2716 	  }
2717 
2718       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2719 	if (!BARRIER_P (insn)
2720 	    && BLOCK_FOR_INSN (insn) != NULL)
2721 	  {
2722 	    error ("insn %d in header of bb %d has non-NULL basic block",
2723 		   INSN_UID (insn), bb->index);
2724 	    err = 1;
2725 	  }
2726       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2727 	if (!BARRIER_P (insn)
2728 	    && BLOCK_FOR_INSN (insn) != NULL)
2729 	  {
2730 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2731 		   INSN_UID (insn), bb->index);
2732 	    err = 1;
2733 	  }
2734     }
2735 
2736   /* Clean up.  */
2737   return err;
2738 }
2739 
2740 /* Verify the CFG and RTL consistency common for both underlying RTL and
2741    cfglayout RTL.
2742 
2743    Currently it does following checks:
2744 
2745    - overlapping of basic blocks
2746    - insns with wrong BLOCK_FOR_INSN pointers
2747    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2748    - tails of basic blocks (ensure that boundary is necessary)
2749    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2750      and NOTE_INSN_BASIC_BLOCK
2751    - verify that no fall_thru edge crosses hot/cold partition boundaries
2752    - verify that there are no pending RTL branch predictions
2753    - verify that hot blocks are not dominated by cold blocks
2754 
2755    In future it can be extended check a lot of other stuff as well
2756    (reachability of basic blocks, life information, etc. etc.).  */
2757 
2758 static int
2759 rtl_verify_flow_info_1 (void)
2760 {
2761   int err = 0;
2762 
2763   err |= rtl_verify_bb_pointers ();
2764 
2765   err |= rtl_verify_bb_insns ();
2766 
2767   err |= rtl_verify_edges ();
2768 
2769   return err;
2770 }
2771 
2772 /* Walk the instruction chain and verify that bb head/end pointers
2773   are correct, and that instructions are in exactly one bb and have
2774   correct block pointers.  */
2775 
2776 static int
2777 rtl_verify_bb_insn_chain (void)
2778 {
2779   basic_block bb;
2780   int err = 0;
2781   rtx_insn *x;
2782   rtx_insn *last_head = get_last_insn ();
2783   basic_block *bb_info;
2784   const int max_uid = get_max_uid ();
2785 
2786   bb_info = XCNEWVEC (basic_block, max_uid);
2787 
2788   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2789     {
2790       rtx_insn *head = BB_HEAD (bb);
2791       rtx_insn *end = BB_END (bb);
2792 
2793       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2794 	{
2795 	  /* Verify the end of the basic block is in the INSN chain.  */
2796 	  if (x == end)
2797 	    break;
2798 
2799             /* And that the code outside of basic blocks has NULL bb field.  */
2800           if (!BARRIER_P (x)
2801               && BLOCK_FOR_INSN (x) != NULL)
2802             {
2803               error ("insn %d outside of basic blocks has non-NULL bb field",
2804                      INSN_UID (x));
2805               err = 1;
2806             }
2807 	}
2808 
2809       if (!x)
2810 	{
2811 	  error ("end insn %d for block %d not found in the insn stream",
2812 		 INSN_UID (end), bb->index);
2813 	  err = 1;
2814 	}
2815 
2816       /* Work backwards from the end to the head of the basic block
2817 	 to verify the head is in the RTL chain.  */
2818       for (; x != NULL_RTX; x = PREV_INSN (x))
2819 	{
2820 	  /* While walking over the insn chain, verify insns appear
2821 	     in only one basic block.  */
2822 	  if (bb_info[INSN_UID (x)] != NULL)
2823 	    {
2824 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2825 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2826 	      err = 1;
2827 	    }
2828 
2829 	  bb_info[INSN_UID (x)] = bb;
2830 
2831 	  if (x == head)
2832 	    break;
2833 	}
2834       if (!x)
2835 	{
2836 	  error ("head insn %d for block %d not found in the insn stream",
2837 		 INSN_UID (head), bb->index);
2838 	  err = 1;
2839 	}
2840 
2841       last_head = PREV_INSN (x);
2842     }
2843 
2844   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2845     {
2846       /* Check that the code before the first basic block has NULL
2847 	 bb field.  */
2848       if (!BARRIER_P (x)
2849 	  && BLOCK_FOR_INSN (x) != NULL)
2850 	{
2851 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2852 		 INSN_UID (x));
2853 	  err = 1;
2854 	}
2855     }
2856   free (bb_info);
2857 
2858   return err;
2859 }
2860 
2861 /* Verify that fallthru edges point to adjacent blocks in layout order and
2862    that barriers exist after non-fallthru blocks.  */
2863 
2864 static int
2865 rtl_verify_fallthru (void)
2866 {
2867   basic_block bb;
2868   int err = 0;
2869 
2870   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2871     {
2872       edge e;
2873 
2874       e = find_fallthru_edge (bb->succs);
2875       if (!e)
2876 	{
2877 	  rtx_insn *insn;
2878 
2879 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2880 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2881 	    {
2882 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2883 		{
2884 		  error ("missing barrier after block %i", bb->index);
2885 		  err = 1;
2886 		  break;
2887 		}
2888 	      if (BARRIER_P (insn))
2889 		break;
2890 	    }
2891 	}
2892       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2893 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2894 	{
2895 	  rtx_insn *insn;
2896 
2897 	  if (e->src->next_bb != e->dest)
2898 	    {
2899 	      error
2900 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2901 		 e->src->index, e->dest->index);
2902 	      err = 1;
2903 	    }
2904 	  else
2905 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2906 		 insn = NEXT_INSN (insn))
2907 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
2908 		{
2909 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2910 			 e->src->index, e->dest->index);
2911 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2912 		  err = 1;
2913 		}
2914 	}
2915     }
2916 
2917    return err;
2918 }
2919 
2920 /* Verify that blocks are laid out in consecutive order. While walking the
2921    instructions, verify that all expected instructions are inside the basic
2922    blocks, and that all returns are followed by barriers.  */
2923 
2924 static int
2925 rtl_verify_bb_layout (void)
2926 {
2927   basic_block bb;
2928   int err = 0;
2929   rtx_insn *x, *y;
2930   int num_bb_notes;
2931   rtx_insn * const rtx_first = get_insns ();
2932   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2933 
2934   num_bb_notes = 0;
2935   last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2936 
2937   for (x = rtx_first; x; x = NEXT_INSN (x))
2938     {
2939       if (NOTE_INSN_BASIC_BLOCK_P (x))
2940 	{
2941 	  bb = NOTE_BASIC_BLOCK (x);
2942 
2943 	  num_bb_notes++;
2944 	  if (bb != last_bb_seen->next_bb)
2945 	    internal_error ("basic blocks not laid down consecutively");
2946 
2947 	  curr_bb = last_bb_seen = bb;
2948 	}
2949 
2950       if (!curr_bb)
2951 	{
2952 	  switch (GET_CODE (x))
2953 	    {
2954 	    case BARRIER:
2955 	    case NOTE:
2956 	      break;
2957 
2958 	    case CODE_LABEL:
2959 	      /* An ADDR_VEC is placed outside any basic block.  */
2960 	      if (NEXT_INSN (x)
2961 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2962 		x = NEXT_INSN (x);
2963 
2964 	      /* But in any case, non-deletable labels can appear anywhere.  */
2965 	      break;
2966 
2967 	    default:
2968 	      fatal_insn ("insn outside basic block", x);
2969 	    }
2970 	}
2971 
2972       if (JUMP_P (x)
2973 	  && returnjump_p (x) && ! condjump_p (x)
2974 	  && ! ((y = next_nonnote_nondebug_insn (x))
2975 		&& BARRIER_P (y)))
2976 	    fatal_insn ("return not followed by barrier", x);
2977 
2978       if (curr_bb && x == BB_END (curr_bb))
2979 	curr_bb = NULL;
2980     }
2981 
2982   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
2983     internal_error
2984       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2985        num_bb_notes, n_basic_blocks_for_fn (cfun));
2986 
2987    return err;
2988 }
2989 
2990 /* Verify the CFG and RTL consistency common for both underlying RTL and
2991    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2992 
2993    Currently it does following checks:
2994    - all checks of rtl_verify_flow_info_1
2995    - test head/end pointers
2996    - check that blocks are laid out in consecutive order
2997    - check that all insns are in the basic blocks
2998      (except the switch handling code, barriers and notes)
2999    - check that all returns are followed by barriers
3000    - check that all fallthru edge points to the adjacent blocks
3001    - verify that there is a single hot/cold partition boundary after bbro  */
3002 
3003 static int
3004 rtl_verify_flow_info (void)
3005 {
3006   int err = 0;
3007 
3008   err |= rtl_verify_flow_info_1 ();
3009 
3010   err |= rtl_verify_bb_insn_chain ();
3011 
3012   err |= rtl_verify_fallthru ();
3013 
3014   err |= rtl_verify_bb_layout ();
3015 
3016   err |= verify_hot_cold_block_grouping ();
3017 
3018   return err;
3019 }
3020 
3021 /* Assume that the preceding pass has possibly eliminated jump instructions
3022    or converted the unconditional jumps.  Eliminate the edges from CFG.
3023    Return true if any edges are eliminated.  */
3024 
3025 bool
3026 purge_dead_edges (basic_block bb)
3027 {
3028   edge e;
3029   rtx_insn *insn = BB_END (bb);
3030   rtx note;
3031   bool purged = false;
3032   bool found;
3033   edge_iterator ei;
3034 
3035   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3036     do
3037       insn = PREV_INSN (insn);
3038     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3039 
3040   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3041   if (NONJUMP_INSN_P (insn)
3042       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3043     {
3044       rtx eqnote;
3045 
3046       if (! may_trap_p (PATTERN (insn))
3047 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3048 	      && ! may_trap_p (XEXP (eqnote, 0))))
3049 	remove_note (insn, note);
3050     }
3051 
3052   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3053   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3054     {
3055       bool remove = false;
3056 
3057       /* There are three types of edges we need to handle correctly here: EH
3058 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3059 	 latter can appear when nonlocal gotos are used.  */
3060       if (e->flags & EDGE_ABNORMAL_CALL)
3061 	{
3062 	  if (!CALL_P (insn))
3063 	    remove = true;
3064 	  else if (can_nonlocal_goto (insn))
3065 	    ;
3066 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3067 	    ;
3068 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3069 	    ;
3070 	  else
3071 	    remove = true;
3072 	}
3073       else if (e->flags & EDGE_EH)
3074 	remove = !can_throw_internal (insn);
3075 
3076       if (remove)
3077 	{
3078 	  remove_edge (e);
3079 	  df_set_bb_dirty (bb);
3080 	  purged = true;
3081 	}
3082       else
3083 	ei_next (&ei);
3084     }
3085 
3086   if (JUMP_P (insn))
3087     {
3088       rtx note;
3089       edge b,f;
3090       edge_iterator ei;
3091 
3092       /* We do care only about conditional jumps and simplejumps.  */
3093       if (!any_condjump_p (insn)
3094 	  && !returnjump_p (insn)
3095 	  && !simplejump_p (insn))
3096 	return purged;
3097 
3098       /* Branch probability/prediction notes are defined only for
3099 	 condjumps.  We've possibly turned condjump into simplejump.  */
3100       if (simplejump_p (insn))
3101 	{
3102 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3103 	  if (note)
3104 	    remove_note (insn, note);
3105 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3106 	    remove_note (insn, note);
3107 	}
3108 
3109       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3110 	{
3111 	  /* Avoid abnormal flags to leak from computed jumps turned
3112 	     into simplejumps.  */
3113 
3114 	  e->flags &= ~EDGE_ABNORMAL;
3115 
3116 	  /* See if this edge is one we should keep.  */
3117 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3118 	    /* A conditional jump can fall through into the next
3119 	       block, so we should keep the edge.  */
3120 	    {
3121 	      ei_next (&ei);
3122 	      continue;
3123 	    }
3124 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3125 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3126 	    /* If the destination block is the target of the jump,
3127 	       keep the edge.  */
3128 	    {
3129 	      ei_next (&ei);
3130 	      continue;
3131 	    }
3132 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3133 		   && returnjump_p (insn))
3134 	    /* If the destination block is the exit block, and this
3135 	       instruction is a return, then keep the edge.  */
3136 	    {
3137 	      ei_next (&ei);
3138 	      continue;
3139 	    }
3140 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3141 	    /* Keep the edges that correspond to exceptions thrown by
3142 	       this instruction and rematerialize the EDGE_ABNORMAL
3143 	       flag we just cleared above.  */
3144 	    {
3145 	      e->flags |= EDGE_ABNORMAL;
3146 	      ei_next (&ei);
3147 	      continue;
3148 	    }
3149 
3150 	  /* We do not need this edge.  */
3151 	  df_set_bb_dirty (bb);
3152 	  purged = true;
3153 	  remove_edge (e);
3154 	}
3155 
3156       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3157 	return purged;
3158 
3159       if (dump_file)
3160 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3161 
3162       if (!optimize)
3163 	return purged;
3164 
3165       /* Redistribute probabilities.  */
3166       if (single_succ_p (bb))
3167 	{
3168 	  single_succ_edge (bb)->probability = profile_probability::always ();
3169 	}
3170       else
3171 	{
3172 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3173 	  if (!note)
3174 	    return purged;
3175 
3176 	  b = BRANCH_EDGE (bb);
3177 	  f = FALLTHRU_EDGE (bb);
3178 	  b->probability = profile_probability::from_reg_br_prob_note
3179 					 (XINT (note, 0));
3180 	  f->probability = b->probability.invert ();
3181 	}
3182 
3183       return purged;
3184     }
3185   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3186     {
3187       /* First, there should not be any EH or ABCALL edges resulting
3188 	 from non-local gotos and the like.  If there were, we shouldn't
3189 	 have created the sibcall in the first place.  Second, there
3190 	 should of course never have been a fallthru edge.  */
3191       gcc_assert (single_succ_p (bb));
3192       gcc_assert (single_succ_edge (bb)->flags
3193 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3194 
3195       return 0;
3196     }
3197 
3198   /* If we don't see a jump insn, we don't know exactly why the block would
3199      have been broken at this point.  Look for a simple, non-fallthru edge,
3200      as these are only created by conditional branches.  If we find such an
3201      edge we know that there used to be a jump here and can then safely
3202      remove all non-fallthru edges.  */
3203   found = false;
3204   FOR_EACH_EDGE (e, ei, bb->succs)
3205     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3206       {
3207 	found = true;
3208 	break;
3209       }
3210 
3211   if (!found)
3212     return purged;
3213 
3214   /* Remove all but the fake and fallthru edges.  The fake edge may be
3215      the only successor for this block in the case of noreturn
3216      calls.  */
3217   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3218     {
3219       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3220 	{
3221 	  df_set_bb_dirty (bb);
3222 	  remove_edge (e);
3223 	  purged = true;
3224 	}
3225       else
3226 	ei_next (&ei);
3227     }
3228 
3229   gcc_assert (single_succ_p (bb));
3230 
3231   single_succ_edge (bb)->probability = profile_probability::always ();
3232 
3233   if (dump_file)
3234     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3235 	     bb->index);
3236   return purged;
3237 }
3238 
3239 /* Search all basic blocks for potentially dead edges and purge them.  Return
3240    true if some edge has been eliminated.  */
3241 
3242 bool
3243 purge_all_dead_edges (void)
3244 {
3245   int purged = false;
3246   basic_block bb;
3247 
3248   FOR_EACH_BB_FN (bb, cfun)
3249     {
3250       bool purged_here = purge_dead_edges (bb);
3251 
3252       purged |= purged_here;
3253     }
3254 
3255   return purged;
3256 }
3257 
3258 /* This is used by a few passes that emit some instructions after abnormal
3259    calls, moving the basic block's end, while they in fact do want to emit
3260    them on the fallthru edge.  Look for abnormal call edges, find backward
3261    the call in the block and insert the instructions on the edge instead.
3262 
3263    Similarly, handle instructions throwing exceptions internally.
3264 
3265    Return true when instructions have been found and inserted on edges.  */
3266 
3267 bool
3268 fixup_abnormal_edges (void)
3269 {
3270   bool inserted = false;
3271   basic_block bb;
3272 
3273   FOR_EACH_BB_FN (bb, cfun)
3274     {
3275       edge e;
3276       edge_iterator ei;
3277 
3278       /* Look for cases we are interested in - calls or instructions causing
3279          exceptions.  */
3280       FOR_EACH_EDGE (e, ei, bb->succs)
3281 	if ((e->flags & EDGE_ABNORMAL_CALL)
3282 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3283 		== (EDGE_ABNORMAL | EDGE_EH)))
3284 	  break;
3285 
3286       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3287 	{
3288 	  rtx_insn *insn;
3289 
3290 	  /* Get past the new insns generated.  Allow notes, as the insns
3291 	     may be already deleted.  */
3292 	  insn = BB_END (bb);
3293 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3294 		 && !can_throw_internal (insn)
3295 		 && insn != BB_HEAD (bb))
3296 	    insn = PREV_INSN (insn);
3297 
3298 	  if (CALL_P (insn) || can_throw_internal (insn))
3299 	    {
3300 	      rtx_insn *stop, *next;
3301 
3302 	      e = find_fallthru_edge (bb->succs);
3303 
3304 	      stop = NEXT_INSN (BB_END (bb));
3305 	      BB_END (bb) = insn;
3306 
3307 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3308 		{
3309 		  next = NEXT_INSN (insn);
3310 		  if (INSN_P (insn))
3311 		    {
3312 		      delete_insn (insn);
3313 
3314 		      /* Sometimes there's still the return value USE.
3315 			 If it's placed after a trapping call (i.e. that
3316 			 call is the last insn anyway), we have no fallthru
3317 			 edge.  Simply delete this use and don't try to insert
3318 			 on the non-existent edge.  */
3319 		      if (GET_CODE (PATTERN (insn)) != USE)
3320 			{
3321 			  /* We're not deleting it, we're moving it.  */
3322 			  insn->set_undeleted ();
3323 			  SET_PREV_INSN (insn) = NULL_RTX;
3324 			  SET_NEXT_INSN (insn) = NULL_RTX;
3325 
3326 			  insert_insn_on_edge (insn, e);
3327 			  inserted = true;
3328 			}
3329 		    }
3330 		  else if (!BARRIER_P (insn))
3331 		    set_block_for_insn (insn, NULL);
3332 		}
3333 	    }
3334 
3335 	  /* It may be that we don't find any trapping insn.  In this
3336 	     case we discovered quite late that the insn that had been
3337 	     marked as can_throw_internal in fact couldn't trap at all.
3338 	     So we should in fact delete the EH edges out of the block.  */
3339 	  else
3340 	    purge_dead_edges (bb);
3341 	}
3342     }
3343 
3344   return inserted;
3345 }
3346 
3347 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3348 
3349 rtx_insn *
3350 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3351 {
3352   rtx_insn *prevfirst = PREV_INSN (first);
3353   rtx_insn *nextlast = NEXT_INSN (last);
3354 
3355   SET_PREV_INSN (first) = NULL;
3356   SET_NEXT_INSN (last) = NULL;
3357   if (prevfirst)
3358     SET_NEXT_INSN (prevfirst) = nextlast;
3359   if (nextlast)
3360     SET_PREV_INSN (nextlast) = prevfirst;
3361   else
3362     set_last_insn (prevfirst);
3363   if (!prevfirst)
3364     set_first_insn (nextlast);
3365   return first;
3366 }
3367 
3368 /* Skip over inter-block insns occurring after BB which are typically
3369    associated with BB (e.g., barriers). If there are any such insns,
3370    we return the last one. Otherwise, we return the end of BB.  */
3371 
3372 static rtx_insn *
3373 skip_insns_after_block (basic_block bb)
3374 {
3375   rtx_insn *insn, *last_insn, *next_head, *prev;
3376 
3377   next_head = NULL;
3378   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3379     next_head = BB_HEAD (bb->next_bb);
3380 
3381   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3382     {
3383       if (insn == next_head)
3384 	break;
3385 
3386       switch (GET_CODE (insn))
3387 	{
3388 	case BARRIER:
3389 	  last_insn = insn;
3390 	  continue;
3391 
3392 	case NOTE:
3393 	  switch (NOTE_KIND (insn))
3394 	    {
3395 	    case NOTE_INSN_BLOCK_END:
3396 	      gcc_unreachable ();
3397 	      continue;
3398 	    default:
3399 	      continue;
3400 	      break;
3401 	    }
3402 	  break;
3403 
3404 	case CODE_LABEL:
3405 	  if (NEXT_INSN (insn)
3406 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3407 	    {
3408 	      insn = NEXT_INSN (insn);
3409 	      last_insn = insn;
3410 	      continue;
3411 	    }
3412 	  break;
3413 
3414 	default:
3415 	  break;
3416 	}
3417 
3418       break;
3419     }
3420 
3421   /* It is possible to hit contradictory sequence.  For instance:
3422 
3423      jump_insn
3424      NOTE_INSN_BLOCK_BEG
3425      barrier
3426 
3427      Where barrier belongs to jump_insn, but the note does not.  This can be
3428      created by removing the basic block originally following
3429      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3430 
3431   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3432     {
3433       prev = PREV_INSN (insn);
3434       if (NOTE_P (insn))
3435 	switch (NOTE_KIND (insn))
3436 	  {
3437 	  case NOTE_INSN_BLOCK_END:
3438 	    gcc_unreachable ();
3439 	    break;
3440 	  case NOTE_INSN_DELETED:
3441 	  case NOTE_INSN_DELETED_LABEL:
3442 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3443 	    continue;
3444 	  default:
3445 	    reorder_insns (insn, insn, last_insn);
3446 	  }
3447     }
3448 
3449   return last_insn;
3450 }
3451 
3452 /* Locate or create a label for a given basic block.  */
3453 
3454 static rtx_insn *
3455 label_for_bb (basic_block bb)
3456 {
3457   rtx_insn *label = BB_HEAD (bb);
3458 
3459   if (!LABEL_P (label))
3460     {
3461       if (dump_file)
3462 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3463 
3464       label = block_label (bb);
3465     }
3466 
3467   return label;
3468 }
3469 
3470 /* Locate the effective beginning and end of the insn chain for each
3471    block, as defined by skip_insns_after_block above.  */
3472 
3473 static void
3474 record_effective_endpoints (void)
3475 {
3476   rtx_insn *next_insn;
3477   basic_block bb;
3478   rtx_insn *insn;
3479 
3480   for (insn = get_insns ();
3481        insn
3482        && NOTE_P (insn)
3483        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3484        insn = NEXT_INSN (insn))
3485     continue;
3486   /* No basic blocks at all?  */
3487   gcc_assert (insn);
3488 
3489   if (PREV_INSN (insn))
3490     cfg_layout_function_header =
3491 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3492   else
3493     cfg_layout_function_header = NULL;
3494 
3495   next_insn = get_insns ();
3496   FOR_EACH_BB_FN (bb, cfun)
3497     {
3498       rtx_insn *end;
3499 
3500       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3501 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3502 						PREV_INSN (BB_HEAD (bb)));
3503       end = skip_insns_after_block (bb);
3504       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3505 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3506       next_insn = NEXT_INSN (BB_END (bb));
3507     }
3508 
3509   cfg_layout_function_footer = next_insn;
3510   if (cfg_layout_function_footer)
3511     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3512 }
3513 
3514 namespace {
3515 
3516 const pass_data pass_data_into_cfg_layout_mode =
3517 {
3518   RTL_PASS, /* type */
3519   "into_cfglayout", /* name */
3520   OPTGROUP_NONE, /* optinfo_flags */
3521   TV_CFG, /* tv_id */
3522   0, /* properties_required */
3523   PROP_cfglayout, /* properties_provided */
3524   0, /* properties_destroyed */
3525   0, /* todo_flags_start */
3526   0, /* todo_flags_finish */
3527 };
3528 
3529 class pass_into_cfg_layout_mode : public rtl_opt_pass
3530 {
3531 public:
3532   pass_into_cfg_layout_mode (gcc::context *ctxt)
3533     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3534   {}
3535 
3536   /* opt_pass methods: */
3537   virtual unsigned int execute (function *)
3538     {
3539       cfg_layout_initialize (0);
3540       return 0;
3541     }
3542 
3543 }; // class pass_into_cfg_layout_mode
3544 
3545 } // anon namespace
3546 
3547 rtl_opt_pass *
3548 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3549 {
3550   return new pass_into_cfg_layout_mode (ctxt);
3551 }
3552 
3553 namespace {
3554 
3555 const pass_data pass_data_outof_cfg_layout_mode =
3556 {
3557   RTL_PASS, /* type */
3558   "outof_cfglayout", /* name */
3559   OPTGROUP_NONE, /* optinfo_flags */
3560   TV_CFG, /* tv_id */
3561   0, /* properties_required */
3562   0, /* properties_provided */
3563   PROP_cfglayout, /* properties_destroyed */
3564   0, /* todo_flags_start */
3565   0, /* todo_flags_finish */
3566 };
3567 
3568 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3569 {
3570 public:
3571   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3572     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3573   {}
3574 
3575   /* opt_pass methods: */
3576   virtual unsigned int execute (function *);
3577 
3578 }; // class pass_outof_cfg_layout_mode
3579 
3580 unsigned int
3581 pass_outof_cfg_layout_mode::execute (function *fun)
3582 {
3583   basic_block bb;
3584 
3585   FOR_EACH_BB_FN (bb, fun)
3586     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3587       bb->aux = bb->next_bb;
3588 
3589   cfg_layout_finalize ();
3590 
3591   return 0;
3592 }
3593 
3594 } // anon namespace
3595 
3596 rtl_opt_pass *
3597 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3598 {
3599   return new pass_outof_cfg_layout_mode (ctxt);
3600 }
3601 
3602 
3603 /* Link the basic blocks in the correct order, compacting the basic
3604    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3605    function also clears the basic block header and footer fields.
3606 
3607    This function is usually called after a pass (e.g. tracer) finishes
3608    some transformations while in cfglayout mode.  The required sequence
3609    of the basic blocks is in a linked list along the bb->aux field.
3610    This functions re-links the basic block prev_bb and next_bb pointers
3611    accordingly, and it compacts and renumbers the blocks.
3612 
3613    FIXME: This currently works only for RTL, but the only RTL-specific
3614    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3615    to GIMPLE a long time ago, but it doesn't relink the basic block
3616    chain.  It could do that (to give better initial RTL) if this function
3617    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
3618 
3619 void
3620 relink_block_chain (bool stay_in_cfglayout_mode)
3621 {
3622   basic_block bb, prev_bb;
3623   int index;
3624 
3625   /* Maybe dump the re-ordered sequence.  */
3626   if (dump_file)
3627     {
3628       fprintf (dump_file, "Reordered sequence:\n");
3629       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3630 	   NUM_FIXED_BLOCKS;
3631 	   bb;
3632 	   bb = (basic_block) bb->aux, index++)
3633 	{
3634 	  fprintf (dump_file, " %i ", index);
3635 	  if (get_bb_original (bb))
3636 	    fprintf (dump_file, "duplicate of %i ",
3637 		     get_bb_original (bb)->index);
3638 	  else if (forwarder_block_p (bb)
3639 		   && !LABEL_P (BB_HEAD (bb)))
3640 	    fprintf (dump_file, "compensation ");
3641 	  else
3642 	    fprintf (dump_file, "bb %i ", bb->index);
3643 	}
3644     }
3645 
3646   /* Now reorder the blocks.  */
3647   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3648   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3649   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3650     {
3651       bb->prev_bb = prev_bb;
3652       prev_bb->next_bb = bb;
3653     }
3654   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3655   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3656 
3657   /* Then, clean up the aux fields.  */
3658   FOR_ALL_BB_FN (bb, cfun)
3659     {
3660       bb->aux = NULL;
3661       if (!stay_in_cfglayout_mode)
3662 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3663     }
3664 
3665   /* Maybe reset the original copy tables, they are not valid anymore
3666      when we renumber the basic blocks in compact_blocks.  If we are
3667      are going out of cfglayout mode, don't re-allocate the tables.  */
3668   if (original_copy_tables_initialized_p ())
3669     free_original_copy_tables ();
3670   if (stay_in_cfglayout_mode)
3671     initialize_original_copy_tables ();
3672 
3673   /* Finally, put basic_block_info in the new order.  */
3674   compact_blocks ();
3675 }
3676 
3677 
3678 /* Given a reorder chain, rearrange the code to match.  */
3679 
3680 static void
3681 fixup_reorder_chain (void)
3682 {
3683   basic_block bb;
3684   rtx_insn *insn = NULL;
3685 
3686   if (cfg_layout_function_header)
3687     {
3688       set_first_insn (cfg_layout_function_header);
3689       insn = cfg_layout_function_header;
3690       while (NEXT_INSN (insn))
3691 	insn = NEXT_INSN (insn);
3692     }
3693 
3694   /* First do the bulk reordering -- rechain the blocks without regard to
3695      the needed changes to jumps and labels.  */
3696 
3697   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3698        bb->aux)
3699     {
3700       if (BB_HEADER (bb))
3701 	{
3702 	  if (insn)
3703 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
3704 	  else
3705 	    set_first_insn (BB_HEADER (bb));
3706 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
3707 	  insn = BB_HEADER (bb);
3708 	  while (NEXT_INSN (insn))
3709 	    insn = NEXT_INSN (insn);
3710 	}
3711       if (insn)
3712 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
3713       else
3714 	set_first_insn (BB_HEAD (bb));
3715       SET_PREV_INSN (BB_HEAD (bb)) = insn;
3716       insn = BB_END (bb);
3717       if (BB_FOOTER (bb))
3718 	{
3719 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3720 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3721 	  while (NEXT_INSN (insn))
3722 	    insn = NEXT_INSN (insn);
3723 	}
3724     }
3725 
3726   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3727   if (cfg_layout_function_footer)
3728     SET_PREV_INSN (cfg_layout_function_footer) = insn;
3729 
3730   while (NEXT_INSN (insn))
3731     insn = NEXT_INSN (insn);
3732 
3733   set_last_insn (insn);
3734   if (flag_checking)
3735     verify_insn_chain ();
3736 
3737   /* Now add jumps and labels as needed to match the blocks new
3738      outgoing edges.  */
3739 
3740   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3741        bb->aux)
3742     {
3743       edge e_fall, e_taken, e;
3744       rtx_insn *bb_end_insn;
3745       rtx ret_label = NULL_RTX;
3746       basic_block nb;
3747       edge_iterator ei;
3748 
3749       if (EDGE_COUNT (bb->succs) == 0)
3750 	continue;
3751 
3752       /* Find the old fallthru edge, and another non-EH edge for
3753 	 a taken jump.  */
3754       e_taken = e_fall = NULL;
3755 
3756       FOR_EACH_EDGE (e, ei, bb->succs)
3757 	if (e->flags & EDGE_FALLTHRU)
3758 	  e_fall = e;
3759 	else if (! (e->flags & EDGE_EH))
3760 	  e_taken = e;
3761 
3762       bb_end_insn = BB_END (bb);
3763       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3764 	{
3765 	  ret_label = JUMP_LABEL (bb_end_jump);
3766 	  if (any_condjump_p (bb_end_jump))
3767 	    {
3768 	      /* This might happen if the conditional jump has side
3769 		 effects and could therefore not be optimized away.
3770 		 Make the basic block to end with a barrier in order
3771 		 to prevent rtl_verify_flow_info from complaining.  */
3772 	      if (!e_fall)
3773 		{
3774 		  gcc_assert (!onlyjump_p (bb_end_jump)
3775 			      || returnjump_p (bb_end_jump)
3776                               || (e_taken->flags & EDGE_CROSSING));
3777 		  emit_barrier_after (bb_end_jump);
3778 		  continue;
3779 		}
3780 
3781 	      /* If the old fallthru is still next, nothing to do.  */
3782 	      if (bb->aux == e_fall->dest
3783 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3784 		continue;
3785 
3786 	      /* The degenerated case of conditional jump jumping to the next
3787 		 instruction can happen for jumps with side effects.  We need
3788 		 to construct a forwarder block and this will be done just
3789 		 fine by force_nonfallthru below.  */
3790 	      if (!e_taken)
3791 		;
3792 
3793 	      /* There is another special case: if *neither* block is next,
3794 		 such as happens at the very end of a function, then we'll
3795 		 need to add a new unconditional jump.  Choose the taken
3796 		 edge based on known or assumed probability.  */
3797 	      else if (bb->aux != e_taken->dest)
3798 		{
3799 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3800 
3801 		  if (note
3802 		      && profile_probability::from_reg_br_prob_note
3803 				 (XINT (note, 0)) < profile_probability::even ()
3804 		      && invert_jump (bb_end_jump,
3805 				      (e_fall->dest
3806 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3807 				       ? NULL_RTX
3808 				       : label_for_bb (e_fall->dest)), 0))
3809 		    {
3810 		      e_fall->flags &= ~EDGE_FALLTHRU;
3811 		      gcc_checking_assert (could_fall_through
3812 					   (e_taken->src, e_taken->dest));
3813 		      e_taken->flags |= EDGE_FALLTHRU;
3814 		      update_br_prob_note (bb);
3815 		      e = e_fall, e_fall = e_taken, e_taken = e;
3816 		    }
3817 		}
3818 
3819 	      /* If the "jumping" edge is a crossing edge, and the fall
3820 		 through edge is non-crossing, leave things as they are.  */
3821 	      else if ((e_taken->flags & EDGE_CROSSING)
3822 		       && !(e_fall->flags & EDGE_CROSSING))
3823 		continue;
3824 
3825 	      /* Otherwise we can try to invert the jump.  This will
3826 		 basically never fail, however, keep up the pretense.  */
3827 	      else if (invert_jump (bb_end_jump,
3828 				    (e_fall->dest
3829 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
3830 				     ? NULL_RTX
3831 				     : label_for_bb (e_fall->dest)), 0))
3832 		{
3833 		  e_fall->flags &= ~EDGE_FALLTHRU;
3834 		  gcc_checking_assert (could_fall_through
3835 				       (e_taken->src, e_taken->dest));
3836 		  e_taken->flags |= EDGE_FALLTHRU;
3837 		  update_br_prob_note (bb);
3838 		  if (LABEL_NUSES (ret_label) == 0
3839 		      && single_pred_p (e_taken->dest))
3840 		    delete_insn (as_a<rtx_insn *> (ret_label));
3841 		  continue;
3842 		}
3843 	    }
3844 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3845 	    {
3846 	      /* If the old fallthru is still next or if
3847 		 asm goto doesn't have a fallthru (e.g. when followed by
3848 		 __builtin_unreachable ()), nothing to do.  */
3849 	      if (! e_fall
3850 		  || bb->aux == e_fall->dest
3851 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3852 		continue;
3853 
3854 	      /* Otherwise we'll have to use the fallthru fixup below.  */
3855 	    }
3856 	  else
3857 	    {
3858 	      /* Otherwise we have some return, switch or computed
3859 		 jump.  In the 99% case, there should not have been a
3860 		 fallthru edge.  */
3861 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3862 	      continue;
3863 	    }
3864 	}
3865       else
3866 	{
3867 	  /* No fallthru implies a noreturn function with EH edges, or
3868 	     something similarly bizarre.  In any case, we don't need to
3869 	     do anything.  */
3870 	  if (! e_fall)
3871 	    continue;
3872 
3873 	  /* If the fallthru block is still next, nothing to do.  */
3874 	  if (bb->aux == e_fall->dest)
3875 	    continue;
3876 
3877 	  /* A fallthru to exit block.  */
3878 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3879 	    continue;
3880 	}
3881 
3882       /* We got here if we need to add a new jump insn.
3883 	 Note force_nonfallthru can delete E_FALL and thus we have to
3884 	 save E_FALL->src prior to the call to force_nonfallthru.  */
3885       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3886       if (nb)
3887 	{
3888 	  nb->aux = bb->aux;
3889 	  bb->aux = nb;
3890 	  /* Don't process this new block.  */
3891 	  bb = nb;
3892 	}
3893     }
3894 
3895   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3896 
3897   /* Annoying special case - jump around dead jumptables left in the code.  */
3898   FOR_EACH_BB_FN (bb, cfun)
3899     {
3900       edge e = find_fallthru_edge (bb->succs);
3901 
3902       if (e && !can_fallthru (e->src, e->dest))
3903 	force_nonfallthru (e);
3904     }
3905 
3906   /* Ensure goto_locus from edges has some instructions with that locus
3907      in RTL.  */
3908   if (!optimize)
3909     FOR_EACH_BB_FN (bb, cfun)
3910       {
3911         edge e;
3912         edge_iterator ei;
3913 
3914         FOR_EACH_EDGE (e, ei, bb->succs)
3915 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3916 	      && !(e->flags & EDGE_ABNORMAL))
3917 	    {
3918 	      edge e2;
3919 	      edge_iterator ei2;
3920 	      basic_block dest, nb;
3921 	      rtx_insn *end;
3922 
3923 	      insn = BB_END (e->src);
3924 	      end = PREV_INSN (BB_HEAD (e->src));
3925 	      while (insn != end
3926 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3927 		insn = PREV_INSN (insn);
3928 	      if (insn != end
3929 		  && INSN_LOCATION (insn) == e->goto_locus)
3930 		continue;
3931 	      if (simplejump_p (BB_END (e->src))
3932 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
3933 		{
3934 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3935 		  continue;
3936 		}
3937 	      dest = e->dest;
3938 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3939 		{
3940 		  /* Non-fallthru edges to the exit block cannot be split.  */
3941 		  if (!(e->flags & EDGE_FALLTHRU))
3942 		    continue;
3943 		}
3944 	      else
3945 		{
3946 		  insn = BB_HEAD (dest);
3947 		  end = NEXT_INSN (BB_END (dest));
3948 		  while (insn != end && !NONDEBUG_INSN_P (insn))
3949 		    insn = NEXT_INSN (insn);
3950 		  if (insn != end && INSN_HAS_LOCATION (insn)
3951 		      && INSN_LOCATION (insn) == e->goto_locus)
3952 		    continue;
3953 		}
3954 	      nb = split_edge (e);
3955 	      if (!INSN_P (BB_END (nb)))
3956 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3957 							 nb);
3958 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3959 
3960 	      /* If there are other incoming edges to the destination block
3961 		 with the same goto locus, redirect them to the new block as
3962 		 well, this can prevent other such blocks from being created
3963 		 in subsequent iterations of the loop.  */
3964 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3965 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3966 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3967 		    && e->goto_locus == e2->goto_locus)
3968 		  redirect_edge_and_branch (e2, nb);
3969 		else
3970 		  ei_next (&ei2);
3971 	    }
3972       }
3973 }
3974 
3975 /* Perform sanity checks on the insn chain.
3976    1. Check that next/prev pointers are consistent in both the forward and
3977       reverse direction.
3978    2. Count insns in chain, going both directions, and check if equal.
3979    3. Check that get_last_insn () returns the actual end of chain.  */
3980 
3981 DEBUG_FUNCTION void
3982 verify_insn_chain (void)
3983 {
3984   rtx_insn *x, *prevx, *nextx;
3985   int insn_cnt1, insn_cnt2;
3986 
3987   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3988        x != 0;
3989        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3990     gcc_assert (PREV_INSN (x) == prevx);
3991 
3992   gcc_assert (prevx == get_last_insn ());
3993 
3994   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3995        x != 0;
3996        nextx = x, insn_cnt2++, x = PREV_INSN (x))
3997     gcc_assert (NEXT_INSN (x) == nextx);
3998 
3999   gcc_assert (insn_cnt1 == insn_cnt2);
4000 }
4001 
4002 /* If we have assembler epilogues, the block falling through to exit must
4003    be the last one in the reordered chain when we reach final.  Ensure
4004    that this condition is met.  */
4005 static void
4006 fixup_fallthru_exit_predecessor (void)
4007 {
4008   edge e;
4009   basic_block bb = NULL;
4010 
4011   /* This transformation is not valid before reload, because we might
4012      separate a call from the instruction that copies the return
4013      value.  */
4014   gcc_assert (reload_completed);
4015 
4016   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4017   if (e)
4018     bb = e->src;
4019 
4020   if (bb && bb->aux)
4021     {
4022       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4023 
4024       /* If the very first block is the one with the fall-through exit
4025 	 edge, we have to split that block.  */
4026       if (c == bb)
4027 	{
4028 	  bb = split_block_after_labels (bb)->dest;
4029 	  bb->aux = c->aux;
4030 	  c->aux = bb;
4031 	  BB_FOOTER (bb) = BB_FOOTER (c);
4032 	  BB_FOOTER (c) = NULL;
4033 	}
4034 
4035       while (c->aux != bb)
4036 	c = (basic_block) c->aux;
4037 
4038       c->aux = bb->aux;
4039       while (c->aux)
4040 	c = (basic_block) c->aux;
4041 
4042       c->aux = bb;
4043       bb->aux = NULL;
4044     }
4045 }
4046 
4047 /* In case there are more than one fallthru predecessors of exit, force that
4048    there is only one.  */
4049 
4050 static void
4051 force_one_exit_fallthru (void)
4052 {
4053   edge e, predecessor = NULL;
4054   bool more = false;
4055   edge_iterator ei;
4056   basic_block forwarder, bb;
4057 
4058   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4059     if (e->flags & EDGE_FALLTHRU)
4060       {
4061 	if (predecessor == NULL)
4062 	  predecessor = e;
4063 	else
4064 	  {
4065 	    more = true;
4066 	    break;
4067 	  }
4068       }
4069 
4070   if (!more)
4071     return;
4072 
4073   /* Exit has several fallthru predecessors.  Create a forwarder block for
4074      them.  */
4075   forwarder = split_edge (predecessor);
4076   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4077        (e = ei_safe_edge (ei)); )
4078     {
4079       if (e->src == forwarder
4080 	  || !(e->flags & EDGE_FALLTHRU))
4081 	ei_next (&ei);
4082       else
4083 	redirect_edge_and_branch_force (e, forwarder);
4084     }
4085 
4086   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4087      exit block.  */
4088   FOR_EACH_BB_FN (bb, cfun)
4089     {
4090       if (bb->aux == NULL && bb != forwarder)
4091 	{
4092 	  bb->aux = forwarder;
4093 	  break;
4094 	}
4095     }
4096 }
4097 
4098 /* Return true in case it is possible to duplicate the basic block BB.  */
4099 
4100 static bool
4101 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4102 {
4103   /* Do not attempt to duplicate tablejumps, as we need to unshare
4104      the dispatch table.  This is difficult to do, as the instructions
4105      computing jump destination may be hoisted outside the basic block.  */
4106   if (tablejump_p (BB_END (bb), NULL, NULL))
4107     return false;
4108 
4109   /* Do not duplicate blocks containing insns that can't be copied.  */
4110   if (targetm.cannot_copy_insn_p)
4111     {
4112       rtx_insn *insn = BB_HEAD (bb);
4113       while (1)
4114 	{
4115 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4116 	    return false;
4117 	  if (insn == BB_END (bb))
4118 	    break;
4119 	  insn = NEXT_INSN (insn);
4120 	}
4121     }
4122 
4123   return true;
4124 }
4125 
4126 rtx_insn *
4127 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4128 {
4129   rtx_insn *insn, *next, *copy;
4130   rtx_note *last;
4131 
4132   /* Avoid updating of boundaries of previous basic block.  The
4133      note will get removed from insn stream in fixup.  */
4134   last = emit_note (NOTE_INSN_DELETED);
4135 
4136   /* Create copy at the end of INSN chain.  The chain will
4137      be reordered later.  */
4138   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4139     {
4140       switch (GET_CODE (insn))
4141 	{
4142 	case DEBUG_INSN:
4143 	  /* Don't duplicate label debug insns.  */
4144 	  if (DEBUG_BIND_INSN_P (insn)
4145 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4146 	    break;
4147 	  /* FALLTHRU */
4148 	case INSN:
4149 	case CALL_INSN:
4150 	case JUMP_INSN:
4151 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4152 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4153 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4154 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4155           maybe_copy_prologue_epilogue_insn (insn, copy);
4156 	  break;
4157 
4158 	case JUMP_TABLE_DATA:
4159 	  /* Avoid copying of dispatch tables.  We never duplicate
4160 	     tablejumps, so this can hit only in case the table got
4161 	     moved far from original jump.
4162 	     Avoid copying following barrier as well if any
4163 	     (and debug insns in between).  */
4164 	  for (next = NEXT_INSN (insn);
4165 	       next != NEXT_INSN (to);
4166 	       next = NEXT_INSN (next))
4167 	    if (!DEBUG_INSN_P (next))
4168 	      break;
4169 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4170 	    insn = next;
4171 	  break;
4172 
4173 	case CODE_LABEL:
4174 	  break;
4175 
4176 	case BARRIER:
4177 	  emit_barrier ();
4178 	  break;
4179 
4180 	case NOTE:
4181 	  switch (NOTE_KIND (insn))
4182 	    {
4183 	      /* In case prologue is empty and function contain label
4184 		 in first BB, we may want to copy the block.  */
4185 	    case NOTE_INSN_PROLOGUE_END:
4186 
4187 	    case NOTE_INSN_DELETED:
4188 	    case NOTE_INSN_DELETED_LABEL:
4189 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4190 	      /* No problem to strip these.  */
4191 	    case NOTE_INSN_FUNCTION_BEG:
4192 	      /* There is always just single entry to function.  */
4193 	    case NOTE_INSN_BASIC_BLOCK:
4194               /* We should only switch text sections once.  */
4195 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4196 	      break;
4197 
4198 	    case NOTE_INSN_EPILOGUE_BEG:
4199 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4200 	      emit_note_copy (as_a <rtx_note *> (insn));
4201 	      break;
4202 
4203 	    default:
4204 	      /* All other notes should have already been eliminated.  */
4205 	      gcc_unreachable ();
4206 	    }
4207 	  break;
4208 	default:
4209 	  gcc_unreachable ();
4210 	}
4211     }
4212   insn = NEXT_INSN (last);
4213   delete_insn (last);
4214   return insn;
4215 }
4216 
4217 /* Create a duplicate of the basic block BB.  */
4218 
4219 static basic_block
4220 cfg_layout_duplicate_bb (basic_block bb)
4221 {
4222   rtx_insn *insn;
4223   basic_block new_bb;
4224 
4225   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4226   new_bb = create_basic_block (insn,
4227 			       insn ? get_last_insn () : NULL,
4228 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4229 
4230   BB_COPY_PARTITION (new_bb, bb);
4231   if (BB_HEADER (bb))
4232     {
4233       insn = BB_HEADER (bb);
4234       while (NEXT_INSN (insn))
4235 	insn = NEXT_INSN (insn);
4236       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4237       if (insn)
4238 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4239     }
4240 
4241   if (BB_FOOTER (bb))
4242     {
4243       insn = BB_FOOTER (bb);
4244       while (NEXT_INSN (insn))
4245 	insn = NEXT_INSN (insn);
4246       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4247       if (insn)
4248 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4249     }
4250 
4251   return new_bb;
4252 }
4253 
4254 
4255 /* Main entry point to this module - initialize the datastructures for
4256    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4257 
4258    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4259 
4260 void
4261 cfg_layout_initialize (int flags)
4262 {
4263   rtx_insn_list *x;
4264   basic_block bb;
4265 
4266   /* Once bb partitioning is complete, cfg layout mode should not be
4267      re-entered.  Entering cfg layout mode may require fixups.  As an
4268      example, if edge forwarding performed when optimizing the cfg
4269      layout required moving a block from the hot to the cold
4270      section. This would create an illegal partitioning unless some
4271      manual fixup was performed.  */
4272   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4273 
4274   initialize_original_copy_tables ();
4275 
4276   cfg_layout_rtl_register_cfg_hooks ();
4277 
4278   record_effective_endpoints ();
4279 
4280   /* Make sure that the targets of non local gotos are marked.  */
4281   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4282     {
4283       bb = BLOCK_FOR_INSN (x->insn ());
4284       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4285     }
4286 
4287   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4288 }
4289 
4290 /* Splits superblocks.  */
4291 void
4292 break_superblocks (void)
4293 {
4294   bool need = false;
4295   basic_block bb;
4296 
4297   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4298   bitmap_clear (superblocks);
4299 
4300   FOR_EACH_BB_FN (bb, cfun)
4301     if (bb->flags & BB_SUPERBLOCK)
4302       {
4303 	bb->flags &= ~BB_SUPERBLOCK;
4304 	bitmap_set_bit (superblocks, bb->index);
4305 	need = true;
4306       }
4307 
4308   if (need)
4309     {
4310       rebuild_jump_labels (get_insns ());
4311       find_many_sub_basic_blocks (superblocks);
4312     }
4313 }
4314 
4315 /* Finalize the changes: reorder insn list according to the sequence specified
4316    by aux pointers, enter compensation code, rebuild scope forest.  */
4317 
4318 void
4319 cfg_layout_finalize (void)
4320 {
4321   free_dominance_info (CDI_DOMINATORS);
4322   force_one_exit_fallthru ();
4323   rtl_register_cfg_hooks ();
4324   if (reload_completed && !targetm.have_epilogue ())
4325     fixup_fallthru_exit_predecessor ();
4326   fixup_reorder_chain ();
4327 
4328   rebuild_jump_labels (get_insns ());
4329   delete_dead_jumptables ();
4330 
4331   if (flag_checking)
4332     verify_insn_chain ();
4333   checking_verify_flow_info ();
4334 }
4335 
4336 
4337 /* Same as split_block but update cfg_layout structures.  */
4338 
4339 static basic_block
4340 cfg_layout_split_block (basic_block bb, void *insnp)
4341 {
4342   rtx insn = (rtx) insnp;
4343   basic_block new_bb = rtl_split_block (bb, insn);
4344 
4345   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4346   BB_FOOTER (bb) = NULL;
4347 
4348   return new_bb;
4349 }
4350 
4351 /* Redirect Edge to DEST.  */
4352 static edge
4353 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4354 {
4355   basic_block src = e->src;
4356   edge ret;
4357 
4358   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4359     return NULL;
4360 
4361   if (e->dest == dest)
4362     return e;
4363 
4364   if (e->flags & EDGE_CROSSING
4365       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4366       && simplejump_p (BB_END (src)))
4367     {
4368       if (dump_file)
4369 	fprintf (dump_file,
4370 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4371 		 e->src->index, dest->index);
4372       delete_insn (BB_END (src));
4373       e->flags |= EDGE_FALLTHRU;
4374     }
4375 
4376   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4377       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4378     {
4379       df_set_bb_dirty (src);
4380       return ret;
4381     }
4382 
4383   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4384       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4385     {
4386       if (dump_file)
4387 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4388 		 e->src->index, dest->index);
4389 
4390       df_set_bb_dirty (e->src);
4391       redirect_edge_succ (e, dest);
4392       return e;
4393     }
4394 
4395   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4396      in the case the basic block appears to be in sequence.  Avoid this
4397      transformation.  */
4398 
4399   if (e->flags & EDGE_FALLTHRU)
4400     {
4401       /* Redirect any branch edges unified with the fallthru one.  */
4402       if (JUMP_P (BB_END (src))
4403 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4404 				     BB_END (src)))
4405 	{
4406 	  edge redirected;
4407 
4408 	  if (dump_file)
4409 	    fprintf (dump_file, "Fallthru edge unified with branch "
4410 		     "%i->%i redirected to %i\n",
4411 		     e->src->index, e->dest->index, dest->index);
4412 	  e->flags &= ~EDGE_FALLTHRU;
4413 	  redirected = redirect_branch_edge (e, dest);
4414 	  gcc_assert (redirected);
4415 	  redirected->flags |= EDGE_FALLTHRU;
4416 	  df_set_bb_dirty (redirected->src);
4417 	  return redirected;
4418 	}
4419       /* In case we are redirecting fallthru edge to the branch edge
4420 	 of conditional jump, remove it.  */
4421       if (EDGE_COUNT (src->succs) == 2)
4422 	{
4423 	  /* Find the edge that is different from E.  */
4424 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4425 
4426 	  if (s->dest == dest
4427 	      && any_condjump_p (BB_END (src))
4428 	      && onlyjump_p (BB_END (src)))
4429 	    delete_insn (BB_END (src));
4430 	}
4431       if (dump_file)
4432 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4433 		 e->src->index, e->dest->index, dest->index);
4434       ret = redirect_edge_succ_nodup (e, dest);
4435     }
4436   else
4437     ret = redirect_branch_edge (e, dest);
4438 
4439   fixup_partition_crossing (ret);
4440   /* We don't want simplejumps in the insn stream during cfglayout.  */
4441   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4442 
4443   df_set_bb_dirty (src);
4444   return ret;
4445 }
4446 
4447 /* Simple wrapper as we always can redirect fallthru edges.  */
4448 static basic_block
4449 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4450 {
4451   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4452 
4453   gcc_assert (redirected);
4454   return NULL;
4455 }
4456 
4457 /* Same as delete_basic_block but update cfg_layout structures.  */
4458 
4459 static void
4460 cfg_layout_delete_block (basic_block bb)
4461 {
4462   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4463   rtx_insn **to;
4464 
4465   if (BB_HEADER (bb))
4466     {
4467       next = BB_HEAD (bb);
4468       if (prev)
4469 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4470       else
4471 	set_first_insn (BB_HEADER (bb));
4472       SET_PREV_INSN (BB_HEADER (bb)) = prev;
4473       insn = BB_HEADER (bb);
4474       while (NEXT_INSN (insn))
4475 	insn = NEXT_INSN (insn);
4476       SET_NEXT_INSN (insn) = next;
4477       SET_PREV_INSN (next) = insn;
4478     }
4479   next = NEXT_INSN (BB_END (bb));
4480   if (BB_FOOTER (bb))
4481     {
4482       insn = BB_FOOTER (bb);
4483       while (insn)
4484 	{
4485 	  if (BARRIER_P (insn))
4486 	    {
4487 	      if (PREV_INSN (insn))
4488 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4489 	      else
4490 		BB_FOOTER (bb) = NEXT_INSN (insn);
4491 	      if (NEXT_INSN (insn))
4492 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4493 	    }
4494 	  if (LABEL_P (insn))
4495 	    break;
4496 	  insn = NEXT_INSN (insn);
4497 	}
4498       if (BB_FOOTER (bb))
4499 	{
4500 	  insn = BB_END (bb);
4501 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4502 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4503 	  while (NEXT_INSN (insn))
4504 	    insn = NEXT_INSN (insn);
4505 	  SET_NEXT_INSN (insn) = next;
4506 	  if (next)
4507 	    SET_PREV_INSN (next) = insn;
4508 	  else
4509 	    set_last_insn (insn);
4510 	}
4511     }
4512   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4513     to = &BB_HEADER (bb->next_bb);
4514   else
4515     to = &cfg_layout_function_footer;
4516 
4517   rtl_delete_block (bb);
4518 
4519   if (prev)
4520     prev = NEXT_INSN (prev);
4521   else
4522     prev = get_insns ();
4523   if (next)
4524     next = PREV_INSN (next);
4525   else
4526     next = get_last_insn ();
4527 
4528   if (next && NEXT_INSN (next) != prev)
4529     {
4530       remaints = unlink_insn_chain (prev, next);
4531       insn = remaints;
4532       while (NEXT_INSN (insn))
4533 	insn = NEXT_INSN (insn);
4534       SET_NEXT_INSN (insn) = *to;
4535       if (*to)
4536 	SET_PREV_INSN (*to) = insn;
4537       *to = remaints;
4538     }
4539 }
4540 
4541 /* Return true when blocks A and B can be safely merged.  */
4542 
4543 static bool
4544 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4545 {
4546   /* If we are partitioning hot/cold basic blocks, we don't want to
4547      mess up unconditional or indirect jumps that cross between hot
4548      and cold sections.
4549 
4550      Basic block partitioning may result in some jumps that appear to
4551      be optimizable (or blocks that appear to be mergeable), but which really
4552      must be left untouched (they are required to make it safely across
4553      partition boundaries).  See  the comments at the top of
4554      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4555 
4556   if (BB_PARTITION (a) != BB_PARTITION (b))
4557     return false;
4558 
4559   /* Protect the loop latches.  */
4560   if (current_loops && b->loop_father->latch == b)
4561     return false;
4562 
4563   /* If we would end up moving B's instructions, make sure it doesn't fall
4564      through into the exit block, since we cannot recover from a fallthrough
4565      edge into the exit block occurring in the middle of a function.  */
4566   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4567     {
4568       edge e = find_fallthru_edge (b->succs);
4569       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4570 	return false;
4571     }
4572 
4573   /* There must be exactly one edge in between the blocks.  */
4574   return (single_succ_p (a)
4575 	  && single_succ (a) == b
4576 	  && single_pred_p (b) == 1
4577 	  && a != b
4578 	  /* Must be simple edge.  */
4579 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4580 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4581 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4582 	  /* If the jump insn has side effects, we can't kill the edge.
4583 	     When not optimizing, try_redirect_by_replacing_jump will
4584 	     not allow us to redirect an edge by replacing a table jump.  */
4585 	  && (!JUMP_P (BB_END (a))
4586 	      || ((!optimize || reload_completed)
4587 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4588 }
4589 
4590 /* Merge block A and B.  The blocks must be mergeable.  */
4591 
4592 static void
4593 cfg_layout_merge_blocks (basic_block a, basic_block b)
4594 {
4595   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4596   rtx_insn *insn;
4597 
4598   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4599 
4600   if (dump_file)
4601     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4602 			 a->index);
4603 
4604   /* If there was a CODE_LABEL beginning B, delete it.  */
4605   if (LABEL_P (BB_HEAD (b)))
4606     {
4607       delete_insn (BB_HEAD (b));
4608     }
4609 
4610   /* We should have fallthru edge in a, or we can do dummy redirection to get
4611      it cleaned up.  */
4612   if (JUMP_P (BB_END (a)))
4613     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4614   gcc_assert (!JUMP_P (BB_END (a)));
4615 
4616   /* When not optimizing and the edge is the only place in RTL which holds
4617      some unique locus, emit a nop with that locus in between.  */
4618   if (!optimize)
4619     emit_nop_for_unique_locus_between (a, b);
4620 
4621   /* Move things from b->footer after a->footer.  */
4622   if (BB_FOOTER (b))
4623     {
4624       if (!BB_FOOTER (a))
4625 	BB_FOOTER (a) = BB_FOOTER (b);
4626       else
4627 	{
4628 	  rtx_insn *last = BB_FOOTER (a);
4629 
4630 	  while (NEXT_INSN (last))
4631 	    last = NEXT_INSN (last);
4632 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
4633 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4634 	}
4635       BB_FOOTER (b) = NULL;
4636     }
4637 
4638   /* Move things from b->header before a->footer.
4639      Note that this may include dead tablejump data, but we don't clean
4640      those up until we go out of cfglayout mode.  */
4641    if (BB_HEADER (b))
4642      {
4643       if (! BB_FOOTER (a))
4644 	BB_FOOTER (a) = BB_HEADER (b);
4645       else
4646 	{
4647 	  rtx_insn *last = BB_HEADER (b);
4648 
4649 	  while (NEXT_INSN (last))
4650 	    last = NEXT_INSN (last);
4651 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
4652 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4653 	  BB_FOOTER (a) = BB_HEADER (b);
4654 	}
4655       BB_HEADER (b) = NULL;
4656     }
4657 
4658   /* In the case basic blocks are not adjacent, move them around.  */
4659   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4660     {
4661       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4662 
4663       emit_insn_after_noloc (insn, BB_END (a), a);
4664     }
4665   /* Otherwise just re-associate the instructions.  */
4666   else
4667     {
4668       insn = BB_HEAD (b);
4669       BB_END (a) = BB_END (b);
4670     }
4671 
4672   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4673      We need to explicitly call. */
4674   update_bb_for_insn_chain (insn, BB_END (b), a);
4675 
4676   /* Skip possible DELETED_LABEL insn.  */
4677   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4678     insn = NEXT_INSN (insn);
4679   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4680   BB_HEAD (b) = BB_END (b) = NULL;
4681   delete_insn (insn);
4682 
4683   df_bb_delete (b->index);
4684 
4685   /* If B was a forwarder block, propagate the locus on the edge.  */
4686   if (forwarder_p
4687       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4688     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4689 
4690   if (dump_file)
4691     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4692 }
4693 
4694 /* Split edge E.  */
4695 
4696 static basic_block
4697 cfg_layout_split_edge (edge e)
4698 {
4699   basic_block new_bb =
4700     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4701 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4702 			NULL_RTX, e->src);
4703 
4704   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4705     BB_COPY_PARTITION (new_bb, e->src);
4706   else
4707     BB_COPY_PARTITION (new_bb, e->dest);
4708   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4709   redirect_edge_and_branch_force (e, new_bb);
4710 
4711   return new_bb;
4712 }
4713 
4714 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4715 
4716 static void
4717 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4718 {
4719 }
4720 
4721 /* Return true if BB contains only labels or non-executable
4722    instructions.  */
4723 
4724 static bool
4725 rtl_block_empty_p (basic_block bb)
4726 {
4727   rtx_insn *insn;
4728 
4729   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4730       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4731     return true;
4732 
4733   FOR_BB_INSNS (bb, insn)
4734     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4735       return false;
4736 
4737   return true;
4738 }
4739 
4740 /* Split a basic block if it ends with a conditional branch and if
4741    the other part of the block is not empty.  */
4742 
4743 static basic_block
4744 rtl_split_block_before_cond_jump (basic_block bb)
4745 {
4746   rtx_insn *insn;
4747   rtx_insn *split_point = NULL;
4748   rtx_insn *last = NULL;
4749   bool found_code = false;
4750 
4751   FOR_BB_INSNS (bb, insn)
4752     {
4753       if (any_condjump_p (insn))
4754 	split_point = last;
4755       else if (NONDEBUG_INSN_P (insn))
4756 	found_code = true;
4757       last = insn;
4758     }
4759 
4760   /* Did not find everything.  */
4761   if (found_code && split_point)
4762     return split_block (bb, split_point)->dest;
4763   else
4764     return NULL;
4765 }
4766 
4767 /* Return 1 if BB ends with a call, possibly followed by some
4768    instructions that must stay with the call, 0 otherwise.  */
4769 
4770 static bool
4771 rtl_block_ends_with_call_p (basic_block bb)
4772 {
4773   rtx_insn *insn = BB_END (bb);
4774 
4775   while (!CALL_P (insn)
4776 	 && insn != BB_HEAD (bb)
4777 	 && (keep_with_call_p (insn)
4778 	     || NOTE_P (insn)
4779 	     || DEBUG_INSN_P (insn)))
4780     insn = PREV_INSN (insn);
4781   return (CALL_P (insn));
4782 }
4783 
4784 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
4785 
4786 static bool
4787 rtl_block_ends_with_condjump_p (const_basic_block bb)
4788 {
4789   return any_condjump_p (BB_END (bb));
4790 }
4791 
4792 /* Return true if we need to add fake edge to exit.
4793    Helper function for rtl_flow_call_edges_add.  */
4794 
4795 static bool
4796 need_fake_edge_p (const rtx_insn *insn)
4797 {
4798   if (!INSN_P (insn))
4799     return false;
4800 
4801   if ((CALL_P (insn)
4802        && !SIBLING_CALL_P (insn)
4803        && !find_reg_note (insn, REG_NORETURN, NULL)
4804        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4805     return true;
4806 
4807   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4808 	   && MEM_VOLATILE_P (PATTERN (insn)))
4809 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4810 	      && asm_noperands (insn) != -1
4811 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4812 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4813 }
4814 
4815 /* Add fake edges to the function exit for any non constant and non noreturn
4816    calls, volatile inline assembly in the bitmap of blocks specified by
4817    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
4818    that were split.
4819 
4820    The goal is to expose cases in which entering a basic block does not imply
4821    that all subsequent instructions must be executed.  */
4822 
4823 static int
4824 rtl_flow_call_edges_add (sbitmap blocks)
4825 {
4826   int i;
4827   int blocks_split = 0;
4828   int last_bb = last_basic_block_for_fn (cfun);
4829   bool check_last_block = false;
4830 
4831   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4832     return 0;
4833 
4834   if (! blocks)
4835     check_last_block = true;
4836   else
4837     check_last_block = bitmap_bit_p (blocks,
4838 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4839 
4840   /* In the last basic block, before epilogue generation, there will be
4841      a fallthru edge to EXIT.  Special care is required if the last insn
4842      of the last basic block is a call because make_edge folds duplicate
4843      edges, which would result in the fallthru edge also being marked
4844      fake, which would result in the fallthru edge being removed by
4845      remove_fake_edges, which would result in an invalid CFG.
4846 
4847      Moreover, we can't elide the outgoing fake edge, since the block
4848      profiler needs to take this into account in order to solve the minimal
4849      spanning tree in the case that the call doesn't return.
4850 
4851      Handle this by adding a dummy instruction in a new last basic block.  */
4852   if (check_last_block)
4853     {
4854       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4855       rtx_insn *insn = BB_END (bb);
4856 
4857       /* Back up past insns that must be kept in the same block as a call.  */
4858       while (insn != BB_HEAD (bb)
4859 	     && keep_with_call_p (insn))
4860 	insn = PREV_INSN (insn);
4861 
4862       if (need_fake_edge_p (insn))
4863 	{
4864 	  edge e;
4865 
4866 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4867 	  if (e)
4868 	    {
4869 	      insert_insn_on_edge (gen_use (const0_rtx), e);
4870 	      commit_edge_insertions ();
4871 	    }
4872 	}
4873     }
4874 
4875   /* Now add fake edges to the function exit for any non constant
4876      calls since there is no way that we can determine if they will
4877      return or not...  */
4878 
4879   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4880     {
4881       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4882       rtx_insn *insn;
4883       rtx_insn *prev_insn;
4884 
4885       if (!bb)
4886 	continue;
4887 
4888       if (blocks && !bitmap_bit_p (blocks, i))
4889 	continue;
4890 
4891       for (insn = BB_END (bb); ; insn = prev_insn)
4892 	{
4893 	  prev_insn = PREV_INSN (insn);
4894 	  if (need_fake_edge_p (insn))
4895 	    {
4896 	      edge e;
4897 	      rtx_insn *split_at_insn = insn;
4898 
4899 	      /* Don't split the block between a call and an insn that should
4900 		 remain in the same block as the call.  */
4901 	      if (CALL_P (insn))
4902 		while (split_at_insn != BB_END (bb)
4903 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
4904 		  split_at_insn = NEXT_INSN (split_at_insn);
4905 
4906 	      /* The handling above of the final block before the epilogue
4907 		 should be enough to verify that there is no edge to the exit
4908 		 block in CFG already.  Calling make_edge in such case would
4909 		 cause us to mark that edge as fake and remove it later.  */
4910 
4911 	      if (flag_checking && split_at_insn == BB_END (bb))
4912 		{
4913 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4914 		  gcc_assert (e == NULL);
4915 		}
4916 
4917 	      /* Note that the following may create a new basic block
4918 		 and renumber the existing basic blocks.  */
4919 	      if (split_at_insn != BB_END (bb))
4920 		{
4921 		  e = split_block (bb, split_at_insn);
4922 		  if (e)
4923 		    blocks_split++;
4924 		}
4925 
4926 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4927 	      ne->probability = profile_probability::guessed_never ();
4928 	    }
4929 
4930 	  if (insn == BB_HEAD (bb))
4931 	    break;
4932 	}
4933     }
4934 
4935   if (blocks_split)
4936     verify_flow_info ();
4937 
4938   return blocks_split;
4939 }
4940 
4941 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
4942    the conditional branch target, SECOND_HEAD should be the fall-thru
4943    there is no need to handle this here the loop versioning code handles
4944    this.  the reason for SECON_HEAD is that it is needed for condition
4945    in trees, and this should be of the same type since it is a hook.  */
4946 static void
4947 rtl_lv_add_condition_to_bb (basic_block first_head ,
4948 			    basic_block second_head ATTRIBUTE_UNUSED,
4949 			    basic_block cond_bb, void *comp_rtx)
4950 {
4951   rtx_code_label *label;
4952   rtx_insn *seq, *jump;
4953   rtx op0 = XEXP ((rtx)comp_rtx, 0);
4954   rtx op1 = XEXP ((rtx)comp_rtx, 1);
4955   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4956   machine_mode mode;
4957 
4958 
4959   label = block_label (first_head);
4960   mode = GET_MODE (op0);
4961   if (mode == VOIDmode)
4962     mode = GET_MODE (op1);
4963 
4964   start_sequence ();
4965   op0 = force_operand (op0, NULL_RTX);
4966   op1 = force_operand (op1, NULL_RTX);
4967   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
4968 			   profile_probability::uninitialized ());
4969   jump = get_last_insn ();
4970   JUMP_LABEL (jump) = label;
4971   LABEL_NUSES (label)++;
4972   seq = get_insns ();
4973   end_sequence ();
4974 
4975   /* Add the new cond, in the new head.  */
4976   emit_insn_after (seq, BB_END (cond_bb));
4977 }
4978 
4979 
4980 /* Given a block B with unconditional branch at its end, get the
4981    store the return the branch edge and the fall-thru edge in
4982    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
4983 static void
4984 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4985 			   edge *fallthru_edge)
4986 {
4987   edge e = EDGE_SUCC (b, 0);
4988 
4989   if (e->flags & EDGE_FALLTHRU)
4990     {
4991       *fallthru_edge = e;
4992       *branch_edge = EDGE_SUCC (b, 1);
4993     }
4994   else
4995     {
4996       *branch_edge = e;
4997       *fallthru_edge = EDGE_SUCC (b, 1);
4998     }
4999 }
5000 
5001 void
5002 init_rtl_bb_info (basic_block bb)
5003 {
5004   gcc_assert (!bb->il.x.rtl);
5005   bb->il.x.head_ = NULL;
5006   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5007 }
5008 
5009 /* Returns true if it is possible to remove edge E by redirecting
5010    it to the destination of the other edge from E->src.  */
5011 
5012 static bool
5013 rtl_can_remove_branch_p (const_edge e)
5014 {
5015   const_basic_block src = e->src;
5016   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5017   const rtx_insn *insn = BB_END (src);
5018   rtx set;
5019 
5020   /* The conditions are taken from try_redirect_by_replacing_jump.  */
5021   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5022     return false;
5023 
5024   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5025     return false;
5026 
5027   if (BB_PARTITION (src) != BB_PARTITION (target))
5028     return false;
5029 
5030   if (!onlyjump_p (insn)
5031       || tablejump_p (insn, NULL, NULL))
5032     return false;
5033 
5034   set = single_set (insn);
5035   if (!set || side_effects_p (set))
5036     return false;
5037 
5038   return true;
5039 }
5040 
5041 static basic_block
5042 rtl_duplicate_bb (basic_block bb)
5043 {
5044   bb = cfg_layout_duplicate_bb (bb);
5045   bb->aux = NULL;
5046   return bb;
5047 }
5048 
5049 /* Do book-keeping of basic block BB for the profile consistency checker.
5050    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5051    then do post-pass accounting.  Store the counting in RECORD.  */
5052 static void
5053 rtl_account_profile_record (basic_block bb, int after_pass,
5054 			    struct profile_record *record)
5055 {
5056   rtx_insn *insn;
5057   FOR_BB_INSNS (bb, insn)
5058     if (INSN_P (insn))
5059       {
5060 	record->size[after_pass] += insn_cost (insn, false);
5061 	if (bb->count.initialized_p ())
5062 	  record->time[after_pass]
5063 	    += insn_cost (insn, true) * bb->count.to_gcov_type ();
5064 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5065 	  record->time[after_pass]
5066 	    += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5067       }
5068 }
5069 
5070 /* Implementation of CFG manipulation for linearized RTL.  */
5071 struct cfg_hooks rtl_cfg_hooks = {
5072   "rtl",
5073   rtl_verify_flow_info,
5074   rtl_dump_bb,
5075   rtl_dump_bb_for_graph,
5076   rtl_create_basic_block,
5077   rtl_redirect_edge_and_branch,
5078   rtl_redirect_edge_and_branch_force,
5079   rtl_can_remove_branch_p,
5080   rtl_delete_block,
5081   rtl_split_block,
5082   rtl_move_block_after,
5083   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5084   rtl_merge_blocks,
5085   rtl_predict_edge,
5086   rtl_predicted_by_p,
5087   cfg_layout_can_duplicate_bb_p,
5088   rtl_duplicate_bb,
5089   rtl_split_edge,
5090   rtl_make_forwarder_block,
5091   rtl_tidy_fallthru_edge,
5092   rtl_force_nonfallthru,
5093   rtl_block_ends_with_call_p,
5094   rtl_block_ends_with_condjump_p,
5095   rtl_flow_call_edges_add,
5096   NULL, /* execute_on_growing_pred */
5097   NULL, /* execute_on_shrinking_pred */
5098   NULL, /* duplicate loop for trees */
5099   NULL, /* lv_add_condition_to_bb */
5100   NULL, /* lv_adjust_loop_header_phi*/
5101   NULL, /* extract_cond_bb_edges */
5102   NULL, /* flush_pending_stmts */
5103   rtl_block_empty_p, /* block_empty_p */
5104   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5105   rtl_account_profile_record,
5106 };
5107 
5108 /* Implementation of CFG manipulation for cfg layout RTL, where
5109    basic block connected via fallthru edges does not have to be adjacent.
5110    This representation will hopefully become the default one in future
5111    version of the compiler.  */
5112 
5113 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5114   "cfglayout mode",
5115   rtl_verify_flow_info_1,
5116   rtl_dump_bb,
5117   rtl_dump_bb_for_graph,
5118   cfg_layout_create_basic_block,
5119   cfg_layout_redirect_edge_and_branch,
5120   cfg_layout_redirect_edge_and_branch_force,
5121   rtl_can_remove_branch_p,
5122   cfg_layout_delete_block,
5123   cfg_layout_split_block,
5124   rtl_move_block_after,
5125   cfg_layout_can_merge_blocks_p,
5126   cfg_layout_merge_blocks,
5127   rtl_predict_edge,
5128   rtl_predicted_by_p,
5129   cfg_layout_can_duplicate_bb_p,
5130   cfg_layout_duplicate_bb,
5131   cfg_layout_split_edge,
5132   rtl_make_forwarder_block,
5133   NULL, /* tidy_fallthru_edge */
5134   rtl_force_nonfallthru,
5135   rtl_block_ends_with_call_p,
5136   rtl_block_ends_with_condjump_p,
5137   rtl_flow_call_edges_add,
5138   NULL, /* execute_on_growing_pred */
5139   NULL, /* execute_on_shrinking_pred */
5140   duplicate_loop_to_header_edge, /* duplicate loop for trees */
5141   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5142   NULL, /* lv_adjust_loop_header_phi*/
5143   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5144   NULL, /* flush_pending_stmts */
5145   rtl_block_empty_p, /* block_empty_p */
5146   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5147   rtl_account_profile_record,
5148 };
5149 
5150 #include "gt-cfgrtl.h"
5151