1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "bb-reorder.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "function.h"
51 #include "except.h"
52 #include "rtl-error.h"
53 #include "tm_p.h"
54 #include "obstack.h"
55 #include "insn-attr.h"
56 #include "insn-config.h"
57 #include "expr.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "cfgloop.h"
61 #include "ggc.h"
62 #include "tree-pass.h"
63 #include "df.h"
64 
65 /* Holds the interesting leading and trailing notes for the function.
66    Only applicable if the CFG is in cfglayout mode.  */
67 static GTY(()) rtx cfg_layout_function_footer;
68 static GTY(()) rtx cfg_layout_function_header;
69 
70 static rtx skip_insns_after_block (basic_block);
71 static void record_effective_endpoints (void);
72 static rtx label_for_bb (basic_block);
73 static void fixup_reorder_chain (void);
74 
75 void verify_insn_chain (void);
76 static void fixup_fallthru_exit_predecessor (void);
77 static int can_delete_note_p (const_rtx);
78 static int can_delete_label_p (const_rtx);
79 static basic_block rtl_split_edge (edge);
80 static bool rtl_move_block_after (basic_block, basic_block);
81 static int rtl_verify_flow_info (void);
82 static basic_block cfg_layout_split_block (basic_block, void *);
83 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
84 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
85 static void cfg_layout_delete_block (basic_block);
86 static void rtl_delete_block (basic_block);
87 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
88 static edge rtl_redirect_edge_and_branch (edge, basic_block);
89 static basic_block rtl_split_block (basic_block, void *);
90 static void rtl_dump_bb (FILE *, basic_block, int, int);
91 static int rtl_verify_flow_info_1 (void);
92 static void rtl_make_forwarder_block (edge);
93 
94 /* Return true if NOTE is not one of the ones that must be kept paired,
95    so that we may simply delete it.  */
96 
97 static int
can_delete_note_p(const_rtx note)98 can_delete_note_p (const_rtx note)
99 {
100   switch (NOTE_KIND (note))
101     {
102     case NOTE_INSN_DELETED:
103     case NOTE_INSN_BASIC_BLOCK:
104     case NOTE_INSN_EPILOGUE_BEG:
105       return true;
106 
107     default:
108       return false;
109     }
110 }
111 
112 /* True if a given label can be deleted.  */
113 
114 static int
can_delete_label_p(const_rtx label)115 can_delete_label_p (const_rtx label)
116 {
117   return (!LABEL_PRESERVE_P (label)
118 	  /* User declared labels must be preserved.  */
119 	  && LABEL_NAME (label) == 0
120 	  && !in_expr_list_p (forced_labels, label));
121 }
122 
123 /* Delete INSN by patching it out.  */
124 
125 void
delete_insn(rtx insn)126 delete_insn (rtx insn)
127 {
128   rtx note;
129   bool really_delete = true;
130 
131   if (LABEL_P (insn))
132     {
133       /* Some labels can't be directly removed from the INSN chain, as they
134 	 might be references via variables, constant pool etc.
135 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
136       if (! can_delete_label_p (insn))
137 	{
138 	  const char *name = LABEL_NAME (insn);
139 	  basic_block bb = BLOCK_FOR_INSN (insn);
140 	  rtx bb_note = NEXT_INSN (insn);
141 
142 	  really_delete = false;
143 	  PUT_CODE (insn, NOTE);
144 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
145 	  NOTE_DELETED_LABEL_NAME (insn) = name;
146 
147 	  /* If the note following the label starts a basic block, and the
148 	     label is a member of the same basic block, interchange the two.  */
149 	  if (bb_note != NULL_RTX
150 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
151 	      && bb != NULL
152 	      && bb == BLOCK_FOR_INSN (bb_note))
153 	    {
154 	      reorder_insns_nobb (insn, insn, bb_note);
155 	      BB_HEAD (bb) = bb_note;
156 	      if (BB_END (bb) == bb_note)
157 		BB_END (bb) = insn;
158 	    }
159 	}
160 
161       remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
162     }
163 
164   if (really_delete)
165     {
166       /* If this insn has already been deleted, something is very wrong.  */
167       gcc_assert (!INSN_DELETED_P (insn));
168       if (INSN_P (insn))
169 	df_insn_delete (insn);
170       remove_insn (insn);
171       INSN_DELETED_P (insn) = 1;
172     }
173 
174   /* If deleting a jump, decrement the use count of the label.  Deleting
175      the label itself should happen in the normal course of block merging.  */
176   if (JUMP_P (insn))
177     {
178       if (JUMP_LABEL (insn)
179 	  && LABEL_P (JUMP_LABEL (insn)))
180 	LABEL_NUSES (JUMP_LABEL (insn))--;
181 
182       /* If there are more targets, remove them too.  */
183       while ((note
184 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
185 	     && LABEL_P (XEXP (note, 0)))
186 	{
187 	  LABEL_NUSES (XEXP (note, 0))--;
188 	  remove_note (insn, note);
189 	}
190     }
191 
192   /* Also if deleting any insn that references a label as an operand.  */
193   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 	 && LABEL_P (XEXP (note, 0)))
195     {
196       LABEL_NUSES (XEXP (note, 0))--;
197       remove_note (insn, note);
198     }
199 
200   if (JUMP_TABLE_DATA_P (insn))
201     {
202       rtx pat = PATTERN (insn);
203       int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
204       int len = XVECLEN (pat, diff_vec_p);
205       int i;
206 
207       for (i = 0; i < len; i++)
208 	{
209 	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
210 
211 	  /* When deleting code in bulk (e.g. removing many unreachable
212 	     blocks) we can delete a label that's a target of the vector
213 	     before deleting the vector itself.  */
214 	  if (!NOTE_P (label))
215 	    LABEL_NUSES (label)--;
216 	}
217     }
218 }
219 
220 /* Like delete_insn but also purge dead edges from BB.  */
221 
222 void
delete_insn_and_edges(rtx insn)223 delete_insn_and_edges (rtx insn)
224 {
225   bool purge = false;
226 
227   if (INSN_P (insn)
228       && BLOCK_FOR_INSN (insn)
229       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
230     purge = true;
231   delete_insn (insn);
232   if (purge)
233     purge_dead_edges (BLOCK_FOR_INSN (insn));
234 }
235 
236 /* Unlink a chain of insns between START and FINISH, leaving notes
237    that must be paired.  If CLEAR_BB is true, we set bb field for
238    insns that cannot be removed to NULL.  */
239 
240 void
delete_insn_chain(rtx start,rtx finish,bool clear_bb)241 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
242 {
243   rtx prev, current;
244 
245   /* Unchain the insns one by one.  It would be quicker to delete all of these
246      with a single unchaining, rather than one at a time, but we need to keep
247      the NOTE's.  */
248   current = finish;
249   while (1)
250     {
251       prev = PREV_INSN (current);
252       if (NOTE_P (current) && !can_delete_note_p (current))
253 	;
254       else
255 	delete_insn (current);
256 
257       if (clear_bb && !INSN_DELETED_P (current))
258 	set_block_for_insn (current, NULL);
259 
260       if (current == start)
261 	break;
262       current = prev;
263     }
264 }
265 
266 /* Create a new basic block consisting of the instructions between HEAD and END
267    inclusive.  This function is designed to allow fast BB construction - reuses
268    the note and basic block struct in BB_NOTE, if any and do not grow
269    BASIC_BLOCK chain and should be used directly only by CFG construction code.
270    END can be NULL in to create new empty basic block before HEAD.  Both END
271    and HEAD can be NULL to create basic block at the end of INSN chain.
272    AFTER is the basic block we should be put after.  */
273 
274 basic_block
create_basic_block_structure(rtx head,rtx end,rtx bb_note,basic_block after)275 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
276 {
277   basic_block bb;
278 
279   if (bb_note
280       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281       && bb->aux == NULL)
282     {
283       /* If we found an existing note, thread it back onto the chain.  */
284 
285       rtx after;
286 
287       if (LABEL_P (head))
288 	after = head;
289       else
290 	{
291 	  after = PREV_INSN (head);
292 	  head = bb_note;
293 	}
294 
295       if (after != bb_note && NEXT_INSN (after) != bb_note)
296 	reorder_insns_nobb (bb_note, bb_note, after);
297     }
298   else
299     {
300       /* Otherwise we must create a note and a basic block structure.  */
301 
302       bb = alloc_block ();
303 
304       init_rtl_bb_info (bb);
305       if (!head && !end)
306 	head = end = bb_note
307 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
308       else if (LABEL_P (head) && end)
309 	{
310 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 	  if (head == end)
312 	    end = bb_note;
313 	}
314       else
315 	{
316 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 	  head = bb_note;
318 	  if (!end)
319 	    end = head;
320 	}
321 
322       NOTE_BASIC_BLOCK (bb_note) = bb;
323     }
324 
325   /* Always include the bb note in the block.  */
326   if (NEXT_INSN (end) == bb_note)
327     end = bb_note;
328 
329   BB_HEAD (bb) = head;
330   BB_END (bb) = end;
331   bb->index = last_basic_block_for_fn (cfun)++;
332   bb->flags = BB_NEW | BB_RTL;
333   link_block (bb, after);
334   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
335   df_bb_refs_record (bb->index, false);
336   update_bb_for_insn (bb);
337   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
338 
339   /* Tag the block so that we know it has been used when considering
340      other basic block notes.  */
341   bb->aux = bb;
342 
343   return bb;
344 }
345 
346 /* Create new basic block consisting of instructions in between HEAD and END
347    and place it to the BB chain after block AFTER.  END can be NULL to
348    create a new empty basic block before HEAD.  Both END and HEAD can be
349    NULL to create basic block at the end of INSN chain.  */
350 
351 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)352 rtl_create_basic_block (void *headp, void *endp, basic_block after)
353 {
354   rtx head = (rtx) headp, end = (rtx) endp;
355   basic_block bb;
356 
357   /* Grow the basic block array if needed.  */
358   if ((size_t) last_basic_block_for_fn (cfun)
359       >= basic_block_info_for_fn (cfun)->length ())
360     {
361       size_t new_size =
362 	(last_basic_block_for_fn (cfun)
363 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
364       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
365     }
366 
367   n_basic_blocks_for_fn (cfun)++;
368 
369   bb = create_basic_block_structure (head, end, NULL, after);
370   bb->aux = NULL;
371   return bb;
372 }
373 
374 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)375 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
376 {
377   basic_block newbb = rtl_create_basic_block (head, end, after);
378 
379   return newbb;
380 }
381 
382 /* Delete the insns in a (non-live) block.  We physically delete every
383    non-deleted-note insn, and update the flow graph appropriately.
384 
385    Return nonzero if we deleted an exception handler.  */
386 
387 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
388    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
389 
390 static void
rtl_delete_block(basic_block b)391 rtl_delete_block (basic_block b)
392 {
393   rtx insn, end;
394 
395   /* If the head of this block is a CODE_LABEL, then it might be the
396      label for an exception handler which can't be reached.  We need
397      to remove the label from the exception_handler_label list.  */
398   insn = BB_HEAD (b);
399 
400   end = get_last_bb_insn (b);
401 
402   /* Selectively delete the entire chain.  */
403   BB_HEAD (b) = NULL;
404   delete_insn_chain (insn, end, true);
405 
406 
407   if (dump_file)
408     fprintf (dump_file, "deleting block %d\n", b->index);
409   df_bb_delete (b->index);
410 }
411 
412 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
413 
414 void
compute_bb_for_insn(void)415 compute_bb_for_insn (void)
416 {
417   basic_block bb;
418 
419   FOR_EACH_BB_FN (bb, cfun)
420     {
421       rtx end = BB_END (bb);
422       rtx insn;
423 
424       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
425 	{
426 	  BLOCK_FOR_INSN (insn) = bb;
427 	  if (insn == end)
428 	    break;
429 	}
430     }
431 }
432 
433 /* Release the basic_block_for_insn array.  */
434 
435 unsigned int
free_bb_for_insn(void)436 free_bb_for_insn (void)
437 {
438   rtx insn;
439   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
440     if (!BARRIER_P (insn))
441       BLOCK_FOR_INSN (insn) = NULL;
442   return 0;
443 }
444 
445 static unsigned int
rest_of_pass_free_cfg(void)446 rest_of_pass_free_cfg (void)
447 {
448 #ifdef DELAY_SLOTS
449   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
450      valid at that point so it would be too late to call df_analyze.  */
451   if (optimize > 0 && flag_delayed_branch)
452     {
453       df_note_add_problem ();
454       df_analyze ();
455     }
456 #endif
457 
458   if (crtl->has_bb_partition)
459     insert_section_boundary_note ();
460 
461   free_bb_for_insn ();
462   return 0;
463 }
464 
465 namespace {
466 
467 const pass_data pass_data_free_cfg =
468 {
469   RTL_PASS, /* type */
470   "*free_cfg", /* name */
471   OPTGROUP_NONE, /* optinfo_flags */
472   false, /* has_gate */
473   true, /* has_execute */
474   TV_NONE, /* tv_id */
475   0, /* properties_required */
476   0, /* properties_provided */
477   PROP_cfg, /* properties_destroyed */
478   0, /* todo_flags_start */
479   0, /* todo_flags_finish */
480 };
481 
482 class pass_free_cfg : public rtl_opt_pass
483 {
484 public:
pass_free_cfg(gcc::context * ctxt)485   pass_free_cfg (gcc::context *ctxt)
486     : rtl_opt_pass (pass_data_free_cfg, ctxt)
487   {}
488 
489   /* opt_pass methods: */
execute()490   unsigned int execute () { return rest_of_pass_free_cfg (); }
491 
492 }; // class pass_free_cfg
493 
494 } // anon namespace
495 
496 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)497 make_pass_free_cfg (gcc::context *ctxt)
498 {
499   return new pass_free_cfg (ctxt);
500 }
501 
502 /* Return RTX to emit after when we want to emit code on the entry of function.  */
503 rtx
entry_of_function(void)504 entry_of_function (void)
505 {
506   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
507 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
508 }
509 
510 /* Emit INSN at the entry point of the function, ensuring that it is only
511    executed once per function.  */
512 void
emit_insn_at_entry(rtx insn)513 emit_insn_at_entry (rtx insn)
514 {
515   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
516   edge e = ei_safe_edge (ei);
517   gcc_assert (e->flags & EDGE_FALLTHRU);
518 
519   insert_insn_on_edge (insn, e);
520   commit_edge_insertions ();
521 }
522 
523 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
524    (or BARRIER if found) and notify df of the bb change.
525    The insn chain range is inclusive
526    (i.e. both BEGIN and END will be updated. */
527 
528 static void
update_bb_for_insn_chain(rtx begin,rtx end,basic_block bb)529 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
530 {
531   rtx insn;
532 
533   end = NEXT_INSN (end);
534   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
535     if (!BARRIER_P (insn))
536       df_insn_change_bb (insn, bb);
537 }
538 
539 /* Update BLOCK_FOR_INSN of insns in BB to BB,
540    and notify df of the change.  */
541 
542 void
update_bb_for_insn(basic_block bb)543 update_bb_for_insn (basic_block bb)
544 {
545   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
546 }
547 
548 
549 /* Like active_insn_p, except keep the return value clobber around
550    even after reload.  */
551 
552 static bool
flow_active_insn_p(const_rtx insn)553 flow_active_insn_p (const_rtx insn)
554 {
555   if (active_insn_p (insn))
556     return true;
557 
558   /* A clobber of the function return value exists for buggy
559      programs that fail to return a value.  Its effect is to
560      keep the return value from being live across the entire
561      function.  If we allow it to be skipped, we introduce the
562      possibility for register lifetime confusion.  */
563   if (GET_CODE (PATTERN (insn)) == CLOBBER
564       && REG_P (XEXP (PATTERN (insn), 0))
565       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
566     return true;
567 
568   return false;
569 }
570 
571 /* Return true if the block has no effect and only forwards control flow to
572    its single destination.  */
573 
574 bool
contains_no_active_insn_p(const_basic_block bb)575 contains_no_active_insn_p (const_basic_block bb)
576 {
577   rtx insn;
578 
579   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
580       || !single_succ_p (bb))
581     return false;
582 
583   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
584     if (INSN_P (insn) && flow_active_insn_p (insn))
585       return false;
586 
587   return (!INSN_P (insn)
588 	  || (JUMP_P (insn) && simplejump_p (insn))
589 	  || !flow_active_insn_p (insn));
590 }
591 
592 /* Likewise, but protect loop latches, headers and preheaders.  */
593 /* FIXME: Make this a cfg hook.  */
594 
595 bool
forwarder_block_p(const_basic_block bb)596 forwarder_block_p (const_basic_block bb)
597 {
598   if (!contains_no_active_insn_p (bb))
599     return false;
600 
601   /* Protect loop latches, headers and preheaders.  */
602   if (current_loops)
603     {
604       basic_block dest;
605       if (bb->loop_father->header == bb)
606 	return false;
607       dest = EDGE_SUCC (bb, 0)->dest;
608       if (dest->loop_father->header == dest)
609 	return false;
610     }
611 
612   return true;
613 }
614 
615 /* Return nonzero if we can reach target from src by falling through.  */
616 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
617 
618 bool
can_fallthru(basic_block src,basic_block target)619 can_fallthru (basic_block src, basic_block target)
620 {
621   rtx insn = BB_END (src);
622   rtx insn2;
623   edge e;
624   edge_iterator ei;
625 
626   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
627     return true;
628   if (src->next_bb != target)
629     return false;
630 
631   /* ??? Later we may add code to move jump tables offline.  */
632   if (tablejump_p (insn, NULL, NULL))
633     return false;
634 
635   FOR_EACH_EDGE (e, ei, src->succs)
636     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
637 	&& e->flags & EDGE_FALLTHRU)
638       return false;
639 
640   insn2 = BB_HEAD (target);
641   if (!active_insn_p (insn2))
642     insn2 = next_active_insn (insn2);
643 
644   return next_active_insn (insn) == insn2;
645 }
646 
647 /* Return nonzero if we could reach target from src by falling through,
648    if the target was made adjacent.  If we already have a fall-through
649    edge to the exit block, we can't do that.  */
650 static bool
could_fall_through(basic_block src,basic_block target)651 could_fall_through (basic_block src, basic_block target)
652 {
653   edge e;
654   edge_iterator ei;
655 
656   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
657     return true;
658   FOR_EACH_EDGE (e, ei, src->succs)
659     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
660 	&& e->flags & EDGE_FALLTHRU)
661       return 0;
662   return true;
663 }
664 
665 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
666 rtx
bb_note(basic_block bb)667 bb_note (basic_block bb)
668 {
669   rtx note;
670 
671   note = BB_HEAD (bb);
672   if (LABEL_P (note))
673     note = NEXT_INSN (note);
674 
675   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
676   return note;
677 }
678 
679 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
680    note associated with the BLOCK.  */
681 
682 static rtx
first_insn_after_basic_block_note(basic_block block)683 first_insn_after_basic_block_note (basic_block block)
684 {
685   rtx insn;
686 
687   /* Get the first instruction in the block.  */
688   insn = BB_HEAD (block);
689 
690   if (insn == NULL_RTX)
691     return NULL_RTX;
692   if (LABEL_P (insn))
693     insn = NEXT_INSN (insn);
694   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
695 
696   return NEXT_INSN (insn);
697 }
698 
699 /* Creates a new basic block just after basic block B by splitting
700    everything after specified instruction I.  */
701 
702 static basic_block
rtl_split_block(basic_block bb,void * insnp)703 rtl_split_block (basic_block bb, void *insnp)
704 {
705   basic_block new_bb;
706   rtx insn = (rtx) insnp;
707   edge e;
708   edge_iterator ei;
709 
710   if (!insn)
711     {
712       insn = first_insn_after_basic_block_note (bb);
713 
714       if (insn)
715 	{
716 	  rtx next = insn;
717 
718 	  insn = PREV_INSN (insn);
719 
720 	  /* If the block contains only debug insns, insn would have
721 	     been NULL in a non-debug compilation, and then we'd end
722 	     up emitting a DELETED note.  For -fcompare-debug
723 	     stability, emit the note too.  */
724 	  if (insn != BB_END (bb)
725 	      && DEBUG_INSN_P (next)
726 	      && DEBUG_INSN_P (BB_END (bb)))
727 	    {
728 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
729 		next = NEXT_INSN (next);
730 
731 	      if (next == BB_END (bb))
732 		emit_note_after (NOTE_INSN_DELETED, next);
733 	    }
734 	}
735       else
736 	insn = get_last_insn ();
737     }
738 
739   /* We probably should check type of the insn so that we do not create
740      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
741      bother.  */
742   if (insn == BB_END (bb))
743     emit_note_after (NOTE_INSN_DELETED, insn);
744 
745   /* Create the new basic block.  */
746   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
747   BB_COPY_PARTITION (new_bb, bb);
748   BB_END (bb) = insn;
749 
750   /* Redirect the outgoing edges.  */
751   new_bb->succs = bb->succs;
752   bb->succs = NULL;
753   FOR_EACH_EDGE (e, ei, new_bb->succs)
754     e->src = new_bb;
755 
756   /* The new block starts off being dirty.  */
757   df_set_bb_dirty (bb);
758   return new_bb;
759 }
760 
761 /* Return true if the single edge between blocks A and B is the only place
762    in RTL which holds some unique locus.  */
763 
764 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)765 unique_locus_on_edge_between_p (basic_block a, basic_block b)
766 {
767   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
768   rtx insn, end;
769 
770   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
771     return false;
772 
773   /* First scan block A backward.  */
774   insn = BB_END (a);
775   end = PREV_INSN (BB_HEAD (a));
776   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
777     insn = PREV_INSN (insn);
778 
779   if (insn != end && INSN_LOCATION (insn) == goto_locus)
780     return false;
781 
782   /* Then scan block B forward.  */
783   insn = BB_HEAD (b);
784   if (insn)
785     {
786       end = NEXT_INSN (BB_END (b));
787       while (insn != end && !NONDEBUG_INSN_P (insn))
788 	insn = NEXT_INSN (insn);
789 
790       if (insn != end && INSN_HAS_LOCATION (insn)
791 	  && INSN_LOCATION (insn) == goto_locus)
792 	return false;
793     }
794 
795   return true;
796 }
797 
798 /* If the single edge between blocks A and B is the only place in RTL which
799    holds some unique locus, emit a nop with that locus between the blocks.  */
800 
801 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)802 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
803 {
804   if (!unique_locus_on_edge_between_p (a, b))
805     return;
806 
807   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
808   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
809 }
810 
811 /* Blocks A and B are to be merged into a single block A.  The insns
812    are already contiguous.  */
813 
814 static void
rtl_merge_blocks(basic_block a,basic_block b)815 rtl_merge_blocks (basic_block a, basic_block b)
816 {
817   rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
818   rtx del_first = NULL_RTX, del_last = NULL_RTX;
819   rtx b_debug_start = b_end, b_debug_end = b_end;
820   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
821   int b_empty = 0;
822 
823   if (dump_file)
824     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
825 	     a->index);
826 
827   while (DEBUG_INSN_P (b_end))
828     b_end = PREV_INSN (b_debug_start = b_end);
829 
830   /* If there was a CODE_LABEL beginning B, delete it.  */
831   if (LABEL_P (b_head))
832     {
833       /* Detect basic blocks with nothing but a label.  This can happen
834 	 in particular at the end of a function.  */
835       if (b_head == b_end)
836 	b_empty = 1;
837 
838       del_first = del_last = b_head;
839       b_head = NEXT_INSN (b_head);
840     }
841 
842   /* Delete the basic block note and handle blocks containing just that
843      note.  */
844   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
845     {
846       if (b_head == b_end)
847 	b_empty = 1;
848       if (! del_last)
849 	del_first = b_head;
850 
851       del_last = b_head;
852       b_head = NEXT_INSN (b_head);
853     }
854 
855   /* If there was a jump out of A, delete it.  */
856   if (JUMP_P (a_end))
857     {
858       rtx prev;
859 
860       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
861 	if (!NOTE_P (prev)
862 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
863 	    || prev == BB_HEAD (a))
864 	  break;
865 
866       del_first = a_end;
867 
868 #ifdef HAVE_cc0
869       /* If this was a conditional jump, we need to also delete
870 	 the insn that set cc0.  */
871       if (only_sets_cc0_p (prev))
872 	{
873 	  rtx tmp = prev;
874 
875 	  prev = prev_nonnote_insn (prev);
876 	  if (!prev)
877 	    prev = BB_HEAD (a);
878 	  del_first = tmp;
879 	}
880 #endif
881 
882       a_end = PREV_INSN (del_first);
883     }
884   else if (BARRIER_P (NEXT_INSN (a_end)))
885     del_first = NEXT_INSN (a_end);
886 
887   /* Delete everything marked above as well as crap that might be
888      hanging out between the two blocks.  */
889   BB_END (a) = a_end;
890   BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
891   delete_insn_chain (del_first, del_last, true);
892 
893   /* When not optimizing CFG and the edge is the only place in RTL which holds
894      some unique locus, emit a nop with that locus in between.  */
895   if (!optimize)
896     {
897       emit_nop_for_unique_locus_between (a, b);
898       a_end = BB_END (a);
899     }
900 
901   /* Reassociate the insns of B with A.  */
902   if (!b_empty)
903     {
904       update_bb_for_insn_chain (a_end, b_debug_end, a);
905 
906       BB_END (a) = b_debug_end;
907       BB_HEAD (b) = NULL_RTX;
908     }
909   else if (b_end != b_debug_end)
910     {
911       /* Move any deleted labels and other notes between the end of A
912 	 and the debug insns that make up B after the debug insns,
913 	 bringing the debug insns into A while keeping the notes after
914 	 the end of A.  */
915       if (NEXT_INSN (a_end) != b_debug_start)
916 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
917 			    b_debug_end);
918       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
919       BB_END (a) = b_debug_end;
920     }
921 
922   df_bb_delete (b->index);
923 
924   /* If B was a forwarder block, propagate the locus on the edge.  */
925   if (forwarder_p
926       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
927     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
928 
929   if (dump_file)
930     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
931 }
932 
933 
934 /* Return true when block A and B can be merged.  */
935 
936 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)937 rtl_can_merge_blocks (basic_block a, basic_block b)
938 {
939   /* If we are partitioning hot/cold basic blocks, we don't want to
940      mess up unconditional or indirect jumps that cross between hot
941      and cold sections.
942 
943      Basic block partitioning may result in some jumps that appear to
944      be optimizable (or blocks that appear to be mergeable), but which really
945      must be left untouched (they are required to make it safely across
946      partition boundaries).  See  the comments at the top of
947      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
948 
949   if (BB_PARTITION (a) != BB_PARTITION (b))
950     return false;
951 
952   /* Protect the loop latches.  */
953   if (current_loops && b->loop_father->latch == b)
954     return false;
955 
956   /* There must be exactly one edge in between the blocks.  */
957   return (single_succ_p (a)
958 	  && single_succ (a) == b
959 	  && single_pred_p (b)
960 	  && a != b
961 	  /* Must be simple edge.  */
962 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
963 	  && a->next_bb == b
964 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
965 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
966 	  /* If the jump insn has side effects,
967 	     we can't kill the edge.  */
968 	  && (!JUMP_P (BB_END (a))
969 	      || (reload_completed
970 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
971 }
972 
973 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
974    exist.  */
975 
976 rtx
block_label(basic_block block)977 block_label (basic_block block)
978 {
979   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
980     return NULL_RTX;
981 
982   if (!LABEL_P (BB_HEAD (block)))
983     {
984       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
985     }
986 
987   return BB_HEAD (block);
988 }
989 
990 /* Attempt to perform edge redirection by replacing possibly complex jump
991    instruction by unconditional jump or removing jump completely.  This can
992    apply only if all edges now point to the same block.  The parameters and
993    return values are equivalent to redirect_edge_and_branch.  */
994 
995 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)996 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
997 {
998   basic_block src = e->src;
999   rtx insn = BB_END (src), kill_from;
1000   rtx set;
1001   int fallthru = 0;
1002 
1003   /* If we are partitioning hot/cold basic blocks, we don't want to
1004      mess up unconditional or indirect jumps that cross between hot
1005      and cold sections.
1006 
1007      Basic block partitioning may result in some jumps that appear to
1008      be optimizable (or blocks that appear to be mergeable), but which really
1009      must be left untouched (they are required to make it safely across
1010      partition boundaries).  See  the comments at the top of
1011      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1012 
1013   if (BB_PARTITION (src) != BB_PARTITION (target))
1014     return NULL;
1015 
1016   /* We can replace or remove a complex jump only when we have exactly
1017      two edges.  Also, if we have exactly one outgoing edge, we can
1018      redirect that.  */
1019   if (EDGE_COUNT (src->succs) >= 3
1020       /* Verify that all targets will be TARGET.  Specifically, the
1021 	 edge that is not E must also go to TARGET.  */
1022       || (EDGE_COUNT (src->succs) == 2
1023 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1024     return NULL;
1025 
1026   if (!onlyjump_p (insn))
1027     return NULL;
1028   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1029     return NULL;
1030 
1031   /* Avoid removing branch with side effects.  */
1032   set = single_set (insn);
1033   if (!set || side_effects_p (set))
1034     return NULL;
1035 
1036   /* In case we zap a conditional jump, we'll need to kill
1037      the cc0 setter too.  */
1038   kill_from = insn;
1039 #ifdef HAVE_cc0
1040   if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1041       && only_sets_cc0_p (PREV_INSN (insn)))
1042     kill_from = PREV_INSN (insn);
1043 #endif
1044 
1045   /* See if we can create the fallthru edge.  */
1046   if (in_cfglayout || can_fallthru (src, target))
1047     {
1048       if (dump_file)
1049 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1050       fallthru = 1;
1051 
1052       /* Selectively unlink whole insn chain.  */
1053       if (in_cfglayout)
1054 	{
1055 	  rtx insn = BB_FOOTER (src);
1056 
1057 	  delete_insn_chain (kill_from, BB_END (src), false);
1058 
1059 	  /* Remove barriers but keep jumptables.  */
1060 	  while (insn)
1061 	    {
1062 	      if (BARRIER_P (insn))
1063 		{
1064 		  if (PREV_INSN (insn))
1065 		    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1066 		  else
1067 		    BB_FOOTER (src) = NEXT_INSN (insn);
1068 		  if (NEXT_INSN (insn))
1069 		    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1070 		}
1071 	      if (LABEL_P (insn))
1072 		break;
1073 	      insn = NEXT_INSN (insn);
1074 	    }
1075 	}
1076       else
1077 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1078 			   false);
1079     }
1080 
1081   /* If this already is simplejump, redirect it.  */
1082   else if (simplejump_p (insn))
1083     {
1084       if (e->dest == target)
1085 	return NULL;
1086       if (dump_file)
1087 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1088 		 INSN_UID (insn), e->dest->index, target->index);
1089       if (!redirect_jump (insn, block_label (target), 0))
1090 	{
1091 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1092 	  return NULL;
1093 	}
1094     }
1095 
1096   /* Cannot do anything for target exit block.  */
1097   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1098     return NULL;
1099 
1100   /* Or replace possibly complicated jump insn by simple jump insn.  */
1101   else
1102     {
1103       rtx target_label = block_label (target);
1104       rtx barrier, label, table;
1105 
1106       emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1107       JUMP_LABEL (BB_END (src)) = target_label;
1108       LABEL_NUSES (target_label)++;
1109       if (dump_file)
1110 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1111 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1112 
1113 
1114       delete_insn_chain (kill_from, insn, false);
1115 
1116       /* Recognize a tablejump that we are converting to a
1117 	 simple jump and remove its associated CODE_LABEL
1118 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1119       if (tablejump_p (insn, &label, &table))
1120 	delete_insn_chain (label, table, false);
1121 
1122       barrier = next_nonnote_insn (BB_END (src));
1123       if (!barrier || !BARRIER_P (barrier))
1124 	emit_barrier_after (BB_END (src));
1125       else
1126 	{
1127 	  if (barrier != NEXT_INSN (BB_END (src)))
1128 	    {
1129 	      /* Move the jump before barrier so that the notes
1130 		 which originally were or were created before jump table are
1131 		 inside the basic block.  */
1132 	      rtx new_insn = BB_END (src);
1133 
1134 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1135 				        PREV_INSN (barrier), src);
1136 
1137 	      NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1138 	      PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1139 
1140 	      NEXT_INSN (new_insn) = barrier;
1141 	      NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1142 
1143 	      PREV_INSN (new_insn) = PREV_INSN (barrier);
1144 	      PREV_INSN (barrier) = new_insn;
1145 	    }
1146 	}
1147     }
1148 
1149   /* Keep only one edge out and set proper flags.  */
1150   if (!single_succ_p (src))
1151     remove_edge (e);
1152   gcc_assert (single_succ_p (src));
1153 
1154   e = single_succ_edge (src);
1155   if (fallthru)
1156     e->flags = EDGE_FALLTHRU;
1157   else
1158     e->flags = 0;
1159 
1160   e->probability = REG_BR_PROB_BASE;
1161   e->count = src->count;
1162 
1163   if (e->dest != target)
1164     redirect_edge_succ (e, target);
1165   return e;
1166 }
1167 
1168 /* Subroutine of redirect_branch_edge that tries to patch the jump
1169    instruction INSN so that it reaches block NEW.  Do this
1170    only when it originally reached block OLD.  Return true if this
1171    worked or the original target wasn't OLD, return false if redirection
1172    doesn't work.  */
1173 
1174 static bool
patch_jump_insn(rtx insn,rtx old_label,basic_block new_bb)1175 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1176 {
1177   rtx tmp;
1178   /* Recognize a tablejump and adjust all matching cases.  */
1179   if (tablejump_p (insn, NULL, &tmp))
1180     {
1181       rtvec vec;
1182       int j;
1183       rtx new_label = block_label (new_bb);
1184 
1185       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1186 	return false;
1187       if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1188 	vec = XVEC (PATTERN (tmp), 0);
1189       else
1190 	vec = XVEC (PATTERN (tmp), 1);
1191 
1192       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1193 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1194 	  {
1195 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1196 	    --LABEL_NUSES (old_label);
1197 	    ++LABEL_NUSES (new_label);
1198 	  }
1199 
1200       /* Handle casesi dispatch insns.  */
1201       if ((tmp = single_set (insn)) != NULL
1202 	  && SET_DEST (tmp) == pc_rtx
1203 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1204 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1205 	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1206 	{
1207 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1208 						       new_label);
1209 	  --LABEL_NUSES (old_label);
1210 	  ++LABEL_NUSES (new_label);
1211 	}
1212     }
1213   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1214     {
1215       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1216       rtx new_label, note;
1217 
1218       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1219 	return false;
1220       new_label = block_label (new_bb);
1221 
1222       for (i = 0; i < n; ++i)
1223 	{
1224 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1225 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1226 	  if (XEXP (old_ref, 0) == old_label)
1227 	    {
1228 	      ASM_OPERANDS_LABEL (tmp, i)
1229 		= gen_rtx_LABEL_REF (Pmode, new_label);
1230 	      --LABEL_NUSES (old_label);
1231 	      ++LABEL_NUSES (new_label);
1232 	    }
1233 	}
1234 
1235       if (JUMP_LABEL (insn) == old_label)
1236 	{
1237 	  JUMP_LABEL (insn) = new_label;
1238 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1239 	  if (note)
1240 	    remove_note (insn, note);
1241 	}
1242       else
1243 	{
1244 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1245 	  if (note)
1246 	    remove_note (insn, note);
1247 	  if (JUMP_LABEL (insn) != new_label
1248 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1249 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1250 	}
1251       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1252 	     != NULL_RTX)
1253 	XEXP (note, 0) = new_label;
1254     }
1255   else
1256     {
1257       /* ?? We may play the games with moving the named labels from
1258 	 one basic block to the other in case only one computed_jump is
1259 	 available.  */
1260       if (computed_jump_p (insn)
1261 	  /* A return instruction can't be redirected.  */
1262 	  || returnjump_p (insn))
1263 	return false;
1264 
1265       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1266 	{
1267 	  /* If the insn doesn't go where we think, we're confused.  */
1268 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1269 
1270 	  /* If the substitution doesn't succeed, die.  This can happen
1271 	     if the back end emitted unrecognizable instructions or if
1272 	     target is exit block on some arches.  */
1273 	  if (!redirect_jump (insn, block_label (new_bb), 0))
1274 	    {
1275 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
1276 	      return false;
1277 	    }
1278 	}
1279     }
1280   return true;
1281 }
1282 
1283 
1284 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1285    NULL on failure  */
1286 static edge
redirect_branch_edge(edge e,basic_block target)1287 redirect_branch_edge (edge e, basic_block target)
1288 {
1289   rtx old_label = BB_HEAD (e->dest);
1290   basic_block src = e->src;
1291   rtx insn = BB_END (src);
1292 
1293   /* We can only redirect non-fallthru edges of jump insn.  */
1294   if (e->flags & EDGE_FALLTHRU)
1295     return NULL;
1296   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1297     return NULL;
1298 
1299   if (!currently_expanding_to_rtl)
1300     {
1301       if (!patch_jump_insn (insn, old_label, target))
1302 	return NULL;
1303     }
1304   else
1305     /* When expanding this BB might actually contain multiple
1306        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1307        Redirect all of those that match our label.  */
1308     FOR_BB_INSNS (src, insn)
1309       if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1310 	return NULL;
1311 
1312   if (dump_file)
1313     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1314 	     e->src->index, e->dest->index, target->index);
1315 
1316   if (e->dest != target)
1317     e = redirect_edge_succ_nodup (e, target);
1318 
1319   return e;
1320 }
1321 
1322 /* Called when edge E has been redirected to a new destination,
1323    in order to update the region crossing flag on the edge and
1324    jump.  */
1325 
1326 static void
fixup_partition_crossing(edge e)1327 fixup_partition_crossing (edge e)
1328 {
1329   rtx note;
1330 
1331   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1332       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1333     return;
1334   /* If we redirected an existing edge, it may already be marked
1335      crossing, even though the new src is missing a reg crossing note.
1336      But make sure reg crossing note doesn't already exist before
1337      inserting.  */
1338   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1339     {
1340       e->flags |= EDGE_CROSSING;
1341       note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1342       if (JUMP_P (BB_END (e->src))
1343           && !note)
1344         add_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1345     }
1346   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1347     {
1348       e->flags &= ~EDGE_CROSSING;
1349       /* Remove the section crossing note from jump at end of
1350          src if it exists, and if no other successors are
1351          still crossing.  */
1352       note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1353       if (note)
1354         {
1355           bool has_crossing_succ = false;
1356           edge e2;
1357           edge_iterator ei;
1358           FOR_EACH_EDGE (e2, ei, e->src->succs)
1359             {
1360               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1361               if (has_crossing_succ)
1362                 break;
1363             }
1364           if (!has_crossing_succ)
1365             remove_note (BB_END (e->src), note);
1366         }
1367     }
1368 }
1369 
1370 /* Called when block BB has been reassigned to the cold partition,
1371    because it is now dominated by another cold block,
1372    to ensure that the region crossing attributes are updated.  */
1373 
1374 static void
fixup_new_cold_bb(basic_block bb)1375 fixup_new_cold_bb (basic_block bb)
1376 {
1377   edge e;
1378   edge_iterator ei;
1379 
1380   /* This is called when a hot bb is found to now be dominated
1381      by a cold bb and therefore needs to become cold. Therefore,
1382      its preds will no longer be region crossing. Any non-dominating
1383      preds that were previously hot would also have become cold
1384      in the caller for the same region. Any preds that were previously
1385      region-crossing will be adjusted in fixup_partition_crossing.  */
1386   FOR_EACH_EDGE (e, ei, bb->preds)
1387     {
1388       fixup_partition_crossing (e);
1389     }
1390 
1391   /* Possibly need to make bb's successor edges region crossing,
1392      or remove stale region crossing.  */
1393   FOR_EACH_EDGE (e, ei, bb->succs)
1394     {
1395       /* We can't have fall-through edges across partition boundaries.
1396          Note that force_nonfallthru will do any necessary partition
1397          boundary fixup by calling fixup_partition_crossing itself.  */
1398       if ((e->flags & EDGE_FALLTHRU)
1399           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1400 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1401         force_nonfallthru (e);
1402       else
1403         fixup_partition_crossing (e);
1404     }
1405 }
1406 
1407 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1408    expense of adding new instructions or reordering basic blocks.
1409 
1410    Function can be also called with edge destination equivalent to the TARGET.
1411    Then it should try the simplifications and do nothing if none is possible.
1412 
1413    Return edge representing the branch if transformation succeeded.  Return NULL
1414    on failure.
1415    We still return NULL in case E already destinated TARGET and we didn't
1416    managed to simplify instruction stream.  */
1417 
1418 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1419 rtl_redirect_edge_and_branch (edge e, basic_block target)
1420 {
1421   edge ret;
1422   basic_block src = e->src;
1423   basic_block dest = e->dest;
1424 
1425   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1426     return NULL;
1427 
1428   if (dest == target)
1429     return e;
1430 
1431   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1432     {
1433       df_set_bb_dirty (src);
1434       fixup_partition_crossing (ret);
1435       return ret;
1436     }
1437 
1438   ret = redirect_branch_edge (e, target);
1439   if (!ret)
1440     return NULL;
1441 
1442   df_set_bb_dirty (src);
1443   fixup_partition_crossing (ret);
1444   return ret;
1445 }
1446 
1447 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1448 
1449 void
emit_barrier_after_bb(basic_block bb)1450 emit_barrier_after_bb (basic_block bb)
1451 {
1452   rtx barrier = emit_barrier_after (BB_END (bb));
1453   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1454               || current_ir_type () == IR_RTL_CFGLAYOUT);
1455   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1456     BB_FOOTER (bb) = unlink_insn_chain (barrier, barrier);
1457 }
1458 
1459 /* Like force_nonfallthru below, but additionally performs redirection
1460    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1461    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1462    simple_return_rtx, indicating which kind of returnjump to create.
1463    It should be NULL otherwise.  */
1464 
1465 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1466 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1467 {
1468   basic_block jump_block, new_bb = NULL, src = e->src;
1469   rtx note;
1470   edge new_edge;
1471   int abnormal_edge_flags = 0;
1472   bool asm_goto_edge = false;
1473   int loc;
1474 
1475   /* In the case the last instruction is conditional jump to the next
1476      instruction, first redirect the jump itself and then continue
1477      by creating a basic block afterwards to redirect fallthru edge.  */
1478   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1479       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1480       && any_condjump_p (BB_END (e->src))
1481       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1482     {
1483       rtx note;
1484       edge b = unchecked_make_edge (e->src, target, 0);
1485       bool redirected;
1486 
1487       redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1488       gcc_assert (redirected);
1489 
1490       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1491       if (note)
1492 	{
1493 	  int prob = XINT (note, 0);
1494 
1495 	  b->probability = prob;
1496           /* Update this to use GCOV_COMPUTE_SCALE.  */
1497 	  b->count = e->count * prob / REG_BR_PROB_BASE;
1498 	  e->probability -= e->probability;
1499 	  e->count -= b->count;
1500 	  if (e->probability < 0)
1501 	    e->probability = 0;
1502 	  if (e->count < 0)
1503 	    e->count = 0;
1504 	}
1505     }
1506 
1507   if (e->flags & EDGE_ABNORMAL)
1508     {
1509       /* Irritating special case - fallthru edge to the same block as abnormal
1510 	 edge.
1511 	 We can't redirect abnormal edge, but we still can split the fallthru
1512 	 one and create separate abnormal edge to original destination.
1513 	 This allows bb-reorder to make such edge non-fallthru.  */
1514       gcc_assert (e->dest == target);
1515       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1516       e->flags &= EDGE_FALLTHRU;
1517     }
1518   else
1519     {
1520       gcc_assert (e->flags & EDGE_FALLTHRU);
1521       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1522 	{
1523 	  /* We can't redirect the entry block.  Create an empty block
1524 	     at the start of the function which we use to add the new
1525 	     jump.  */
1526 	  edge tmp;
1527 	  edge_iterator ei;
1528 	  bool found = false;
1529 
1530 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1531 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1532 
1533 	  /* Change the existing edge's source to be the new block, and add
1534 	     a new edge from the entry block to the new block.  */
1535 	  e->src = bb;
1536 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1537 	       (tmp = ei_safe_edge (ei)); )
1538 	    {
1539 	      if (tmp == e)
1540 		{
1541 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1542 		  found = true;
1543 		  break;
1544 		}
1545 	      else
1546 		ei_next (&ei);
1547 	    }
1548 
1549 	  gcc_assert (found);
1550 
1551 	  vec_safe_push (bb->succs, e);
1552 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1553 				 EDGE_FALLTHRU);
1554 	}
1555     }
1556 
1557   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1558      don't point to the target or fallthru label.  */
1559   if (JUMP_P (BB_END (e->src))
1560       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1561       && (e->flags & EDGE_FALLTHRU)
1562       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1563     {
1564       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1565       bool adjust_jump_target = false;
1566 
1567       for (i = 0; i < n; ++i)
1568 	{
1569 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1570 	    {
1571 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1572 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1573 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1574 	      adjust_jump_target = true;
1575 	    }
1576 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1577 	    asm_goto_edge = true;
1578 	}
1579       if (adjust_jump_target)
1580 	{
1581 	  rtx insn = BB_END (e->src), note;
1582 	  rtx old_label = BB_HEAD (e->dest);
1583 	  rtx new_label = BB_HEAD (target);
1584 
1585 	  if (JUMP_LABEL (insn) == old_label)
1586 	    {
1587 	      JUMP_LABEL (insn) = new_label;
1588 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1589 	      if (note)
1590 		remove_note (insn, note);
1591 	    }
1592 	  else
1593 	    {
1594 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1595 	      if (note)
1596 		remove_note (insn, note);
1597 	      if (JUMP_LABEL (insn) != new_label
1598 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1599 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1600 	    }
1601 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1602 		 != NULL_RTX)
1603 	    XEXP (note, 0) = new_label;
1604 	}
1605     }
1606 
1607   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1608     {
1609       gcov_type count = e->count;
1610       int probability = e->probability;
1611       /* Create the new structures.  */
1612 
1613       /* If the old block ended with a tablejump, skip its table
1614 	 by searching forward from there.  Otherwise start searching
1615 	 forward from the last instruction of the old block.  */
1616       if (!tablejump_p (BB_END (e->src), NULL, &note))
1617 	note = BB_END (e->src);
1618       note = NEXT_INSN (note);
1619 
1620       jump_block = create_basic_block (note, NULL, e->src);
1621       jump_block->count = count;
1622       jump_block->frequency = EDGE_FREQUENCY (e);
1623 
1624       /* Make sure new block ends up in correct hot/cold section.  */
1625 
1626       BB_COPY_PARTITION (jump_block, e->src);
1627 
1628       /* Wire edge in.  */
1629       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1630       new_edge->probability = probability;
1631       new_edge->count = count;
1632 
1633       /* Redirect old edge.  */
1634       redirect_edge_pred (e, jump_block);
1635       e->probability = REG_BR_PROB_BASE;
1636 
1637       /* If e->src was previously region crossing, it no longer is
1638          and the reg crossing note should be removed.  */
1639       fixup_partition_crossing (new_edge);
1640 
1641       /* If asm goto has any label refs to target's label,
1642 	 add also edge from asm goto bb to target.  */
1643       if (asm_goto_edge)
1644 	{
1645 	  new_edge->probability /= 2;
1646 	  new_edge->count /= 2;
1647 	  jump_block->count /= 2;
1648 	  jump_block->frequency /= 2;
1649 	  new_edge = make_edge (new_edge->src, target,
1650 				e->flags & ~EDGE_FALLTHRU);
1651 	  new_edge->probability = probability - probability / 2;
1652 	  new_edge->count = count - count / 2;
1653 	}
1654 
1655       new_bb = jump_block;
1656     }
1657   else
1658     jump_block = e->src;
1659 
1660   loc = e->goto_locus;
1661   e->flags &= ~EDGE_FALLTHRU;
1662   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1663     {
1664       if (jump_label == ret_rtx)
1665 	{
1666 #ifdef HAVE_return
1667 	  emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1668 #else
1669 	  gcc_unreachable ();
1670 #endif
1671 	}
1672       else
1673 	{
1674 	  gcc_assert (jump_label == simple_return_rtx);
1675 #ifdef HAVE_simple_return
1676 	  emit_jump_insn_after_setloc (gen_simple_return (),
1677 				       BB_END (jump_block), loc);
1678 #else
1679 	  gcc_unreachable ();
1680 #endif
1681 	}
1682       set_return_jump_label (BB_END (jump_block));
1683     }
1684   else
1685     {
1686       rtx label = block_label (target);
1687       emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1688       JUMP_LABEL (BB_END (jump_block)) = label;
1689       LABEL_NUSES (label)++;
1690     }
1691 
1692   /* We might be in cfg layout mode, and if so, the following routine will
1693      insert the barrier correctly.  */
1694   emit_barrier_after_bb (jump_block);
1695   redirect_edge_succ_nodup (e, target);
1696 
1697   if (abnormal_edge_flags)
1698     make_edge (src, target, abnormal_edge_flags);
1699 
1700   df_mark_solutions_dirty ();
1701   fixup_partition_crossing (e);
1702   return new_bb;
1703 }
1704 
1705 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1706    (and possibly create new basic block) to make edge non-fallthru.
1707    Return newly created BB or NULL if none.  */
1708 
1709 static basic_block
rtl_force_nonfallthru(edge e)1710 rtl_force_nonfallthru (edge e)
1711 {
1712   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1713 }
1714 
1715 /* Redirect edge even at the expense of creating new jump insn or
1716    basic block.  Return new basic block if created, NULL otherwise.
1717    Conversion must be possible.  */
1718 
1719 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1720 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1721 {
1722   if (redirect_edge_and_branch (e, target)
1723       || e->dest == target)
1724     return NULL;
1725 
1726   /* In case the edge redirection failed, try to force it to be non-fallthru
1727      and redirect newly created simplejump.  */
1728   df_set_bb_dirty (e->src);
1729   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1730 }
1731 
1732 /* The given edge should potentially be a fallthru edge.  If that is in
1733    fact true, delete the jump and barriers that are in the way.  */
1734 
1735 static void
rtl_tidy_fallthru_edge(edge e)1736 rtl_tidy_fallthru_edge (edge e)
1737 {
1738   rtx q;
1739   basic_block b = e->src, c = b->next_bb;
1740 
1741   /* ??? In a late-running flow pass, other folks may have deleted basic
1742      blocks by nopping out blocks, leaving multiple BARRIERs between here
1743      and the target label. They ought to be chastised and fixed.
1744 
1745      We can also wind up with a sequence of undeletable labels between
1746      one block and the next.
1747 
1748      So search through a sequence of barriers, labels, and notes for
1749      the head of block C and assert that we really do fall through.  */
1750 
1751   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1752     if (INSN_P (q))
1753       return;
1754 
1755   /* Remove what will soon cease being the jump insn from the source block.
1756      If block B consisted only of this single jump, turn it into a deleted
1757      note.  */
1758   q = BB_END (b);
1759   if (JUMP_P (q)
1760       && onlyjump_p (q)
1761       && (any_uncondjump_p (q)
1762 	  || single_succ_p (b)))
1763     {
1764 #ifdef HAVE_cc0
1765       /* If this was a conditional jump, we need to also delete
1766 	 the insn that set cc0.  */
1767       if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1768 	q = PREV_INSN (q);
1769 #endif
1770 
1771       q = PREV_INSN (q);
1772     }
1773 
1774   /* Selectively unlink the sequence.  */
1775   if (q != PREV_INSN (BB_HEAD (c)))
1776     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1777 
1778   e->flags |= EDGE_FALLTHRU;
1779 }
1780 
1781 /* Should move basic block BB after basic block AFTER.  NIY.  */
1782 
1783 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1784 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1785 		      basic_block after ATTRIBUTE_UNUSED)
1786 {
1787   return false;
1788 }
1789 
1790 /* Locate the last bb in the same partition as START_BB.  */
1791 
1792 static basic_block
last_bb_in_partition(basic_block start_bb)1793 last_bb_in_partition (basic_block start_bb)
1794 {
1795   basic_block bb;
1796   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1797     {
1798       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1799         return bb;
1800     }
1801   /* Return bb before the exit block.  */
1802   return bb->prev_bb;
1803 }
1804 
1805 /* Split a (typically critical) edge.  Return the new block.
1806    The edge must not be abnormal.
1807 
1808    ??? The code generally expects to be called on critical edges.
1809    The case of a block ending in an unconditional jump to a
1810    block with multiple predecessors is not handled optimally.  */
1811 
1812 static basic_block
rtl_split_edge(edge edge_in)1813 rtl_split_edge (edge edge_in)
1814 {
1815   basic_block bb, new_bb;
1816   rtx before;
1817 
1818   /* Abnormal edges cannot be split.  */
1819   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1820 
1821   /* We are going to place the new block in front of edge destination.
1822      Avoid existence of fallthru predecessors.  */
1823   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1824     {
1825       edge e = find_fallthru_edge (edge_in->dest->preds);
1826 
1827       if (e)
1828 	force_nonfallthru (e);
1829     }
1830 
1831   /* Create the basic block note.  */
1832   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1833     before = BB_HEAD (edge_in->dest);
1834   else
1835     before = NULL_RTX;
1836 
1837   /* If this is a fall through edge to the exit block, the blocks might be
1838      not adjacent, and the right place is after the source.  */
1839   if ((edge_in->flags & EDGE_FALLTHRU)
1840       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1841     {
1842       before = NEXT_INSN (BB_END (edge_in->src));
1843       bb = create_basic_block (before, NULL, edge_in->src);
1844       BB_COPY_PARTITION (bb, edge_in->src);
1845     }
1846   else
1847     {
1848       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1849         {
1850           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1851           BB_COPY_PARTITION (bb, edge_in->dest);
1852         }
1853       else
1854         {
1855           basic_block after = edge_in->dest->prev_bb;
1856           /* If this is post-bb reordering, and the edge crosses a partition
1857              boundary, the new block needs to be inserted in the bb chain
1858              at the end of the src partition (since we put the new bb into
1859              that partition, see below). Otherwise we may end up creating
1860              an extra partition crossing in the chain, which is illegal.
1861              It can't go after the src, because src may have a fall-through
1862              to a different block.  */
1863           if (crtl->bb_reorder_complete
1864               && (edge_in->flags & EDGE_CROSSING))
1865             {
1866               after = last_bb_in_partition (edge_in->src);
1867               before = NEXT_INSN (BB_END (after));
1868               /* The instruction following the last bb in partition should
1869                  be a barrier, since it cannot end in a fall-through.  */
1870               gcc_checking_assert (BARRIER_P (before));
1871               before = NEXT_INSN (before);
1872             }
1873           bb = create_basic_block (before, NULL, after);
1874           /* Put the split bb into the src partition, to avoid creating
1875              a situation where a cold bb dominates a hot bb, in the case
1876              where src is cold and dest is hot. The src will dominate
1877              the new bb (whereas it might not have dominated dest).  */
1878           BB_COPY_PARTITION (bb, edge_in->src);
1879         }
1880     }
1881 
1882   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1883 
1884   /* Can't allow a region crossing edge to be fallthrough.  */
1885   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1886       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1887     {
1888       new_bb = force_nonfallthru (single_succ_edge (bb));
1889       gcc_assert (!new_bb);
1890     }
1891 
1892   /* For non-fallthru edges, we must adjust the predecessor's
1893      jump instruction to target our new block.  */
1894   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1895     {
1896       edge redirected = redirect_edge_and_branch (edge_in, bb);
1897       gcc_assert (redirected);
1898     }
1899   else
1900     {
1901       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1902 	{
1903 	  /* For asm goto even splitting of fallthru edge might
1904 	     need insn patching, as other labels might point to the
1905 	     old label.  */
1906 	  rtx last = BB_END (edge_in->src);
1907 	  if (last
1908 	      && JUMP_P (last)
1909 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1910 	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
1911 	      && patch_jump_insn (last, before, bb))
1912 	    df_set_bb_dirty (edge_in->src);
1913 	}
1914       redirect_edge_succ (edge_in, bb);
1915     }
1916 
1917   return bb;
1918 }
1919 
1920 /* Queue instructions for insertion on an edge between two basic blocks.
1921    The new instructions and basic blocks (if any) will not appear in the
1922    CFG until commit_edge_insertions is called.  */
1923 
1924 void
insert_insn_on_edge(rtx pattern,edge e)1925 insert_insn_on_edge (rtx pattern, edge e)
1926 {
1927   /* We cannot insert instructions on an abnormal critical edge.
1928      It will be easier to find the culprit if we die now.  */
1929   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1930 
1931   if (e->insns.r == NULL_RTX)
1932     start_sequence ();
1933   else
1934     push_to_sequence (e->insns.r);
1935 
1936   emit_insn (pattern);
1937 
1938   e->insns.r = get_insns ();
1939   end_sequence ();
1940 }
1941 
1942 /* Update the CFG for the instructions queued on edge E.  */
1943 
1944 void
commit_one_edge_insertion(edge e)1945 commit_one_edge_insertion (edge e)
1946 {
1947   rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1948   basic_block bb;
1949 
1950   /* Pull the insns off the edge now since the edge might go away.  */
1951   insns = e->insns.r;
1952   e->insns.r = NULL_RTX;
1953 
1954   /* Figure out where to put these insns.  If the destination has
1955      one predecessor, insert there.  Except for the exit block.  */
1956   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1957     {
1958       bb = e->dest;
1959 
1960       /* Get the location correct wrt a code label, and "nice" wrt
1961 	 a basic block note, and before everything else.  */
1962       tmp = BB_HEAD (bb);
1963       if (LABEL_P (tmp))
1964 	tmp = NEXT_INSN (tmp);
1965       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1966 	tmp = NEXT_INSN (tmp);
1967       if (tmp == BB_HEAD (bb))
1968 	before = tmp;
1969       else if (tmp)
1970 	after = PREV_INSN (tmp);
1971       else
1972 	after = get_last_insn ();
1973     }
1974 
1975   /* If the source has one successor and the edge is not abnormal,
1976      insert there.  Except for the entry block.
1977      Don't do this if the predecessor ends in a jump other than
1978      unconditional simple jump.  E.g. for asm goto that points all
1979      its labels at the fallthru basic block, we can't insert instructions
1980      before the asm goto, as the asm goto can have various of side effects,
1981      and can't emit instructions after the asm goto, as it must end
1982      the basic block.  */
1983   else if ((e->flags & EDGE_ABNORMAL) == 0
1984 	   && single_succ_p (e->src)
1985 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1986 	   && (!JUMP_P (BB_END (e->src))
1987 	       || simplejump_p (BB_END (e->src))))
1988     {
1989       bb = e->src;
1990 
1991       /* It is possible to have a non-simple jump here.  Consider a target
1992 	 where some forms of unconditional jumps clobber a register.  This
1993 	 happens on the fr30 for example.
1994 
1995 	 We know this block has a single successor, so we can just emit
1996 	 the queued insns before the jump.  */
1997       if (JUMP_P (BB_END (bb)))
1998 	before = BB_END (bb);
1999       else
2000 	{
2001 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2002 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2003 
2004 	  after = BB_END (bb);
2005 	}
2006     }
2007 
2008   /* Otherwise we must split the edge.  */
2009   else
2010     {
2011       bb = split_edge (e);
2012 
2013       /* If E crossed a partition boundary, we needed to make bb end in
2014          a region-crossing jump, even though it was originally fallthru.  */
2015       if (JUMP_P (BB_END (bb)))
2016 	before = BB_END (bb);
2017       else
2018         after = BB_END (bb);
2019     }
2020 
2021   /* Now that we've found the spot, do the insertion.  */
2022   if (before)
2023     {
2024       emit_insn_before_noloc (insns, before, bb);
2025       last = prev_nonnote_insn (before);
2026     }
2027   else
2028     last = emit_insn_after_noloc (insns, after, bb);
2029 
2030   if (returnjump_p (last))
2031     {
2032       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2033 	 This is not currently a problem because this only happens
2034 	 for the (single) epilogue, which already has a fallthru edge
2035 	 to EXIT.  */
2036 
2037       e = single_succ_edge (bb);
2038       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2039 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2040 
2041       e->flags &= ~EDGE_FALLTHRU;
2042       emit_barrier_after (last);
2043 
2044       if (before)
2045 	delete_insn (before);
2046     }
2047   else
2048     gcc_assert (!JUMP_P (last));
2049 }
2050 
2051 /* Update the CFG for all queued instructions.  */
2052 
2053 void
commit_edge_insertions(void)2054 commit_edge_insertions (void)
2055 {
2056   basic_block bb;
2057 
2058   /* Optimization passes that invoke this routine can cause hot blocks
2059      previously reached by both hot and cold blocks to become dominated only
2060      by cold blocks. This will cause the verification below to fail,
2061      and lead to now cold code in the hot section. In some cases this
2062      may only be visible after newly unreachable blocks are deleted,
2063      which will be done by fixup_partitions.  */
2064   fixup_partitions ();
2065 
2066 #ifdef ENABLE_CHECKING
2067   verify_flow_info ();
2068 #endif
2069 
2070   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2071 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2072     {
2073       edge e;
2074       edge_iterator ei;
2075 
2076       FOR_EACH_EDGE (e, ei, bb->succs)
2077 	if (e->insns.r)
2078 	  commit_one_edge_insertion (e);
2079     }
2080 }
2081 
2082 
2083 /* Print out RTL-specific basic block information (live information
2084    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2085    documented in dumpfile.h.  */
2086 
2087 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,int flags)2088 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2089 {
2090   rtx insn;
2091   rtx last;
2092   char *s_indent;
2093 
2094   s_indent = (char *) alloca ((size_t) indent + 1);
2095   memset (s_indent, ' ', (size_t) indent);
2096   s_indent[indent] = '\0';
2097 
2098   if (df && (flags & TDF_DETAILS))
2099     {
2100       df_dump_top (bb, outf);
2101       putc ('\n', outf);
2102     }
2103 
2104   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2105     for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2106 	 insn = NEXT_INSN (insn))
2107       {
2108 	if (flags & TDF_DETAILS)
2109 	  df_dump_insn_top (insn, outf);
2110 	if (! (flags & TDF_SLIM))
2111 	  print_rtl_single (outf, insn);
2112 	else
2113 	  dump_insn_slim (outf, insn);
2114 	if (flags & TDF_DETAILS)
2115 	  df_dump_insn_bottom (insn, outf);
2116       }
2117 
2118   if (df && (flags & TDF_DETAILS))
2119     {
2120       df_dump_bottom (bb, outf);
2121       putc ('\n', outf);
2122     }
2123 
2124 }
2125 
2126 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2127    for the start of each basic block.  FLAGS are the TDF_* masks documented
2128    in dumpfile.h.  */
2129 
2130 void
print_rtl_with_bb(FILE * outf,const_rtx rtx_first,int flags)2131 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
2132 {
2133   const_rtx tmp_rtx;
2134   if (rtx_first == 0)
2135     fprintf (outf, "(nil)\n");
2136   else
2137     {
2138       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2139       int max_uid = get_max_uid ();
2140       basic_block *start = XCNEWVEC (basic_block, max_uid);
2141       basic_block *end = XCNEWVEC (basic_block, max_uid);
2142       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2143       basic_block bb;
2144 
2145       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2146 	 insns, but the CFG is not maintained so the basic block info
2147 	 is not reliable.  Therefore it's omitted from the dumps.  */
2148       if (! (cfun->curr_properties & PROP_cfg))
2149         flags &= ~TDF_BLOCKS;
2150 
2151       if (df)
2152 	df_dump_start (outf);
2153 
2154       if (flags & TDF_BLOCKS)
2155 	{
2156 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2157 	    {
2158 	      rtx x;
2159 
2160 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2161 	      end[INSN_UID (BB_END (bb))] = bb;
2162 	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2163 		{
2164 		  enum bb_state state = IN_MULTIPLE_BB;
2165 
2166 		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2167 		    state = IN_ONE_BB;
2168 		  in_bb_p[INSN_UID (x)] = state;
2169 
2170 		  if (x == BB_END (bb))
2171 		    break;
2172 		}
2173 	    }
2174 	}
2175 
2176       for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2177 	{
2178 	  if (flags & TDF_BLOCKS)
2179 	    {
2180 	      bb = start[INSN_UID (tmp_rtx)];
2181 	      if (bb != NULL)
2182 		{
2183 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2184 		  if (df && (flags & TDF_DETAILS))
2185 		    df_dump_top (bb, outf);
2186 		}
2187 
2188 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2189 		  && !NOTE_P (tmp_rtx)
2190 		  && !BARRIER_P (tmp_rtx))
2191 		fprintf (outf, ";; Insn is not within a basic block\n");
2192 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2193 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2194 	    }
2195 
2196 	  if (flags & TDF_DETAILS)
2197 	    df_dump_insn_top (tmp_rtx, outf);
2198 	  if (! (flags & TDF_SLIM))
2199 	    print_rtl_single (outf, tmp_rtx);
2200 	  else
2201 	    dump_insn_slim (outf, tmp_rtx);
2202 	  if (flags & TDF_DETAILS)
2203 	    df_dump_insn_bottom (tmp_rtx, outf);
2204 
2205 	  if (flags & TDF_BLOCKS)
2206 	    {
2207 	      bb = end[INSN_UID (tmp_rtx)];
2208 	      if (bb != NULL)
2209 		{
2210 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2211 		  if (df && (flags & TDF_DETAILS))
2212 		    df_dump_bottom (bb, outf);
2213 		  putc ('\n', outf);
2214 		}
2215 	    }
2216 	}
2217 
2218       free (start);
2219       free (end);
2220       free (in_bb_p);
2221     }
2222 }
2223 
2224 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2225 
2226 void
update_br_prob_note(basic_block bb)2227 update_br_prob_note (basic_block bb)
2228 {
2229   rtx note;
2230   if (!JUMP_P (BB_END (bb)))
2231     return;
2232   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2233   if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
2234     return;
2235   XINT (note, 0) = BRANCH_EDGE (bb)->probability;
2236 }
2237 
2238 /* Get the last insn associated with block BB (that includes barriers and
2239    tablejumps after BB).  */
2240 rtx
get_last_bb_insn(basic_block bb)2241 get_last_bb_insn (basic_block bb)
2242 {
2243   rtx tmp;
2244   rtx end = BB_END (bb);
2245 
2246   /* Include any jump table following the basic block.  */
2247   if (tablejump_p (end, NULL, &tmp))
2248     end = tmp;
2249 
2250   /* Include any barriers that may follow the basic block.  */
2251   tmp = next_nonnote_insn_bb (end);
2252   while (tmp && BARRIER_P (tmp))
2253     {
2254       end = tmp;
2255       tmp = next_nonnote_insn_bb (end);
2256     }
2257 
2258   return end;
2259 }
2260 
2261 /* Sanity check partition hotness to ensure that basic blocks in
2262    the cold partition don't dominate basic blocks in the hot partition.
2263    If FLAG_ONLY is true, report violations as errors. Otherwise
2264    re-mark the dominated blocks as cold, since this is run after
2265    cfg optimizations that may make hot blocks previously reached
2266    by both hot and cold blocks now only reachable along cold paths.  */
2267 
2268 static vec<basic_block>
find_partition_fixes(bool flag_only)2269 find_partition_fixes (bool flag_only)
2270 {
2271   basic_block bb;
2272   vec<basic_block> bbs_in_cold_partition = vNULL;
2273   vec<basic_block> bbs_to_fix = vNULL;
2274 
2275   /* Callers check this.  */
2276   gcc_checking_assert (crtl->has_bb_partition);
2277 
2278   FOR_EACH_BB_FN (bb, cfun)
2279     if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2280       bbs_in_cold_partition.safe_push (bb);
2281 
2282   if (bbs_in_cold_partition.is_empty ())
2283     return vNULL;
2284 
2285   bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2286 
2287   if (dom_calculated_here)
2288     calculate_dominance_info (CDI_DOMINATORS);
2289 
2290   while (! bbs_in_cold_partition.is_empty  ())
2291     {
2292       bb = bbs_in_cold_partition.pop ();
2293       /* Any blocks dominated by a block in the cold section
2294          must also be cold.  */
2295       basic_block son;
2296       for (son = first_dom_son (CDI_DOMINATORS, bb);
2297            son;
2298            son = next_dom_son (CDI_DOMINATORS, son))
2299         {
2300           /* If son is not yet cold, then mark it cold here and
2301              enqueue it for further processing.  */
2302           if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2303             {
2304               if (flag_only)
2305                 error ("non-cold basic block %d dominated "
2306                        "by a block in the cold partition (%d)", son->index, bb->index);
2307               else
2308                 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2309               bbs_to_fix.safe_push (son);
2310               bbs_in_cold_partition.safe_push (son);
2311             }
2312         }
2313     }
2314 
2315   if (dom_calculated_here)
2316     free_dominance_info (CDI_DOMINATORS);
2317 
2318   return bbs_to_fix;
2319 }
2320 
2321 /* Perform cleanup on the hot/cold bb partitioning after optimization
2322    passes that modify the cfg.  */
2323 
2324 void
fixup_partitions(void)2325 fixup_partitions (void)
2326 {
2327   basic_block bb;
2328 
2329   if (!crtl->has_bb_partition)
2330     return;
2331 
2332   /* Delete any blocks that became unreachable and weren't
2333      already cleaned up, for example during edge forwarding
2334      and convert_jumps_to_returns. This will expose more
2335      opportunities for fixing the partition boundaries here.
2336      Also, the calculation of the dominance graph during verification
2337      will assert if there are unreachable nodes.  */
2338   delete_unreachable_blocks ();
2339 
2340   /* If there are partitions, do a sanity check on them: A basic block in
2341      a cold partition cannot dominate a basic block in a hot partition.
2342      Fixup any that now violate this requirement, as a result of edge
2343      forwarding and unreachable block deletion.  */
2344   vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2345 
2346   /* Do the partition fixup after all necessary blocks have been converted to
2347      cold, so that we only update the region crossings the minimum number of
2348      places, which can require forcing edges to be non fallthru.  */
2349   while (! bbs_to_fix.is_empty ())
2350     {
2351       bb = bbs_to_fix.pop ();
2352       fixup_new_cold_bb (bb);
2353     }
2354 }
2355 
2356 /* Verify, in the basic block chain, that there is at most one switch
2357    between hot/cold partitions. This condition will not be true until
2358    after reorder_basic_blocks is called.  */
2359 
2360 static int
verify_hot_cold_block_grouping(void)2361 verify_hot_cold_block_grouping (void)
2362 {
2363   basic_block bb;
2364   int err = 0;
2365   bool switched_sections = false;
2366   int current_partition = BB_UNPARTITIONED;
2367 
2368   /* Even after bb reordering is complete, we go into cfglayout mode
2369      again (in compgoto). Ensure we don't call this before going back
2370      into linearized RTL when any layout fixes would have been committed.  */
2371   if (!crtl->bb_reorder_complete
2372       || current_ir_type () != IR_RTL_CFGRTL)
2373     return err;
2374 
2375   FOR_EACH_BB_FN (bb, cfun)
2376     {
2377       if (current_partition != BB_UNPARTITIONED
2378           && BB_PARTITION (bb) != current_partition)
2379 	{
2380 	  if (switched_sections)
2381 	    {
2382 	      error ("multiple hot/cold transitions found (bb %i)",
2383 		     bb->index);
2384 	      err = 1;
2385 	    }
2386 	  else
2387             switched_sections = true;
2388 
2389           if (!crtl->has_bb_partition)
2390             error ("partition found but function partition flag not set");
2391 	}
2392       current_partition = BB_PARTITION (bb);
2393     }
2394 
2395   return err;
2396 }
2397 
2398 
2399 /* Perform several checks on the edges out of each block, such as
2400    the consistency of the branch probabilities, the correctness
2401    of hot/cold partition crossing edges, and the number of expected
2402    successor edges.  Also verify that the dominance relationship
2403    between hot/cold blocks is sane.  */
2404 
2405 static int
rtl_verify_edges(void)2406 rtl_verify_edges (void)
2407 {
2408   int err = 0;
2409   basic_block bb;
2410 
2411   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2412     {
2413       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2414       int n_eh = 0, n_abnormal = 0;
2415       edge e, fallthru = NULL;
2416       edge_iterator ei;
2417       rtx note;
2418       bool has_crossing_edge = false;
2419 
2420       if (JUMP_P (BB_END (bb))
2421 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2422 	  && EDGE_COUNT (bb->succs) >= 2
2423 	  && any_condjump_p (BB_END (bb)))
2424 	{
2425 	  if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
2426 	      && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2427 	    {
2428 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2429 		     XINT (note, 0), BRANCH_EDGE (bb)->probability);
2430 	      err = 1;
2431 	    }
2432 	}
2433 
2434       FOR_EACH_EDGE (e, ei, bb->succs)
2435 	{
2436 	  bool is_crossing;
2437 
2438 	  if (e->flags & EDGE_FALLTHRU)
2439 	    n_fallthru++, fallthru = e;
2440 
2441 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2442 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2443 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2444           has_crossing_edge |= is_crossing;
2445 	  if (e->flags & EDGE_CROSSING)
2446 	    {
2447 	      if (!is_crossing)
2448 		{
2449 		  error ("EDGE_CROSSING incorrectly set across same section");
2450 		  err = 1;
2451 		}
2452 	      if (e->flags & EDGE_FALLTHRU)
2453 		{
2454 		  error ("fallthru edge crosses section boundary in bb %i",
2455 			 e->src->index);
2456 		  err = 1;
2457 		}
2458 	      if (e->flags & EDGE_EH)
2459 		{
2460 		  error ("EH edge crosses section boundary in bb %i",
2461 			 e->src->index);
2462 		  err = 1;
2463 		}
2464               if (JUMP_P (BB_END (bb))
2465                   && !find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2466 		{
2467 		  error ("No region crossing jump at section boundary in bb %i",
2468 			 bb->index);
2469 		  err = 1;
2470 		}
2471 	    }
2472 	  else if (is_crossing)
2473 	    {
2474 	      error ("EDGE_CROSSING missing across section boundary");
2475 	      err = 1;
2476 	    }
2477 
2478 	  if ((e->flags & ~(EDGE_DFS_BACK
2479 			    | EDGE_CAN_FALLTHRU
2480 			    | EDGE_IRREDUCIBLE_LOOP
2481 			    | EDGE_LOOP_EXIT
2482 			    | EDGE_CROSSING
2483 			    | EDGE_PRESERVE)) == 0)
2484 	    n_branch++;
2485 
2486 	  if (e->flags & EDGE_ABNORMAL_CALL)
2487 	    n_abnormal_call++;
2488 
2489 	  if (e->flags & EDGE_SIBCALL)
2490 	    n_sibcall++;
2491 
2492 	  if (e->flags & EDGE_EH)
2493 	    n_eh++;
2494 
2495 	  if (e->flags & EDGE_ABNORMAL)
2496 	    n_abnormal++;
2497 	}
2498 
2499         if (!has_crossing_edge
2500             && find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2501           {
2502             print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2503             error ("Region crossing jump across same section in bb %i",
2504                    bb->index);
2505             err = 1;
2506           }
2507 
2508       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2509 	{
2510 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2511 	  err = 1;
2512 	}
2513       if (n_eh > 1)
2514 	{
2515 	  error ("too many exception handling edges in bb %i", bb->index);
2516 	  err = 1;
2517 	}
2518       if (n_branch
2519 	  && (!JUMP_P (BB_END (bb))
2520 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2521 				   || any_condjump_p (BB_END (bb))))))
2522 	{
2523 	  error ("too many outgoing branch edges from bb %i", bb->index);
2524 	  err = 1;
2525 	}
2526       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2527 	{
2528 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2529 	  err = 1;
2530 	}
2531       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2532 	{
2533 	  error ("wrong number of branch edges after unconditional jump"
2534 		 " in bb %i", bb->index);
2535 	  err = 1;
2536 	}
2537       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2538 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2539 	{
2540 	  error ("wrong amount of branch edges after conditional jump"
2541 		 " in bb %i", bb->index);
2542 	  err = 1;
2543 	}
2544       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2545 	{
2546 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2547 	  err = 1;
2548 	}
2549       if (n_sibcall && !CALL_P (BB_END (bb)))
2550 	{
2551 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2552 	  err = 1;
2553 	}
2554       if (n_abnormal > n_eh
2555 	  && !(CALL_P (BB_END (bb))
2556 	       && n_abnormal == n_abnormal_call + n_sibcall)
2557 	  && (!JUMP_P (BB_END (bb))
2558 	      || any_condjump_p (BB_END (bb))
2559 	      || any_uncondjump_p (BB_END (bb))))
2560 	{
2561 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2562 	  err = 1;
2563 	}
2564     }
2565 
2566   /* If there are partitions, do a sanity check on them: A basic block in
2567      a cold partition cannot dominate a basic block in a hot partition.  */
2568   if (crtl->has_bb_partition && !err)
2569     {
2570       vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2571       err = !bbs_to_fix.is_empty ();
2572     }
2573 
2574   /* Clean up.  */
2575   return err;
2576 }
2577 
2578 /* Checks on the instructions within blocks. Currently checks that each
2579    block starts with a basic block note, and that basic block notes and
2580    control flow jumps are not found in the middle of the block.  */
2581 
2582 static int
rtl_verify_bb_insns(void)2583 rtl_verify_bb_insns (void)
2584 {
2585   rtx x;
2586   int err = 0;
2587   basic_block bb;
2588 
2589   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2590     {
2591       /* Now check the header of basic
2592 	 block.  It ought to contain optional CODE_LABEL followed
2593 	 by NOTE_BASIC_BLOCK.  */
2594       x = BB_HEAD (bb);
2595       if (LABEL_P (x))
2596 	{
2597 	  if (BB_END (bb) == x)
2598 	    {
2599 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2600 		     bb->index);
2601 	      err = 1;
2602 	    }
2603 
2604 	  x = NEXT_INSN (x);
2605 	}
2606 
2607       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2608 	{
2609 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2610 		 bb->index);
2611 	  err = 1;
2612 	}
2613 
2614       if (BB_END (bb) == x)
2615 	/* Do checks for empty blocks here.  */
2616 	;
2617       else
2618 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2619 	  {
2620 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2621 	      {
2622 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2623 		       INSN_UID (x), bb->index);
2624 		err = 1;
2625 	      }
2626 
2627 	    if (x == BB_END (bb))
2628 	      break;
2629 
2630 	    if (control_flow_insn_p (x))
2631 	      {
2632 		error ("in basic block %d:", bb->index);
2633 		fatal_insn ("flow control insn inside a basic block", x);
2634 	      }
2635 	  }
2636     }
2637 
2638   /* Clean up.  */
2639   return err;
2640 }
2641 
2642 /* Verify that block pointers for instructions in basic blocks, headers and
2643    footers are set appropriately.  */
2644 
2645 static int
rtl_verify_bb_pointers(void)2646 rtl_verify_bb_pointers (void)
2647 {
2648   int err = 0;
2649   basic_block bb;
2650 
2651   /* Check the general integrity of the basic blocks.  */
2652   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2653     {
2654       rtx insn;
2655 
2656       if (!(bb->flags & BB_RTL))
2657 	{
2658 	  error ("BB_RTL flag not set for block %d", bb->index);
2659 	  err = 1;
2660 	}
2661 
2662       FOR_BB_INSNS (bb, insn)
2663 	if (BLOCK_FOR_INSN (insn) != bb)
2664 	  {
2665 	    error ("insn %d basic block pointer is %d, should be %d",
2666 		   INSN_UID (insn),
2667 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2668 		   bb->index);
2669 	    err = 1;
2670 	  }
2671 
2672       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2673 	if (!BARRIER_P (insn)
2674 	    && BLOCK_FOR_INSN (insn) != NULL)
2675 	  {
2676 	    error ("insn %d in header of bb %d has non-NULL basic block",
2677 		   INSN_UID (insn), bb->index);
2678 	    err = 1;
2679 	  }
2680       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2681 	if (!BARRIER_P (insn)
2682 	    && BLOCK_FOR_INSN (insn) != NULL)
2683 	  {
2684 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2685 		   INSN_UID (insn), bb->index);
2686 	    err = 1;
2687 	  }
2688     }
2689 
2690   /* Clean up.  */
2691   return err;
2692 }
2693 
2694 /* Verify the CFG and RTL consistency common for both underlying RTL and
2695    cfglayout RTL.
2696 
2697    Currently it does following checks:
2698 
2699    - overlapping of basic blocks
2700    - insns with wrong BLOCK_FOR_INSN pointers
2701    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2702    - tails of basic blocks (ensure that boundary is necessary)
2703    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2704      and NOTE_INSN_BASIC_BLOCK
2705    - verify that no fall_thru edge crosses hot/cold partition boundaries
2706    - verify that there are no pending RTL branch predictions
2707    - verify that hot blocks are not dominated by cold blocks
2708 
2709    In future it can be extended check a lot of other stuff as well
2710    (reachability of basic blocks, life information, etc. etc.).  */
2711 
2712 static int
rtl_verify_flow_info_1(void)2713 rtl_verify_flow_info_1 (void)
2714 {
2715   int err = 0;
2716 
2717   err |= rtl_verify_bb_pointers ();
2718 
2719   err |= rtl_verify_bb_insns ();
2720 
2721   err |= rtl_verify_edges ();
2722 
2723   return err;
2724 }
2725 
2726 /* Walk the instruction chain and verify that bb head/end pointers
2727   are correct, and that instructions are in exactly one bb and have
2728   correct block pointers.  */
2729 
2730 static int
rtl_verify_bb_insn_chain(void)2731 rtl_verify_bb_insn_chain (void)
2732 {
2733   basic_block bb;
2734   int err = 0;
2735   rtx x;
2736   rtx last_head = get_last_insn ();
2737   basic_block *bb_info;
2738   const int max_uid = get_max_uid ();
2739 
2740   bb_info = XCNEWVEC (basic_block, max_uid);
2741 
2742   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2743     {
2744       rtx head = BB_HEAD (bb);
2745       rtx end = BB_END (bb);
2746 
2747       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2748 	{
2749 	  /* Verify the end of the basic block is in the INSN chain.  */
2750 	  if (x == end)
2751 	    break;
2752 
2753             /* And that the code outside of basic blocks has NULL bb field.  */
2754           if (!BARRIER_P (x)
2755               && BLOCK_FOR_INSN (x) != NULL)
2756             {
2757               error ("insn %d outside of basic blocks has non-NULL bb field",
2758                      INSN_UID (x));
2759               err = 1;
2760             }
2761 	}
2762 
2763       if (!x)
2764 	{
2765 	  error ("end insn %d for block %d not found in the insn stream",
2766 		 INSN_UID (end), bb->index);
2767 	  err = 1;
2768 	}
2769 
2770       /* Work backwards from the end to the head of the basic block
2771 	 to verify the head is in the RTL chain.  */
2772       for (; x != NULL_RTX; x = PREV_INSN (x))
2773 	{
2774 	  /* While walking over the insn chain, verify insns appear
2775 	     in only one basic block.  */
2776 	  if (bb_info[INSN_UID (x)] != NULL)
2777 	    {
2778 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2779 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2780 	      err = 1;
2781 	    }
2782 
2783 	  bb_info[INSN_UID (x)] = bb;
2784 
2785 	  if (x == head)
2786 	    break;
2787 	}
2788       if (!x)
2789 	{
2790 	  error ("head insn %d for block %d not found in the insn stream",
2791 		 INSN_UID (head), bb->index);
2792 	  err = 1;
2793 	}
2794 
2795       last_head = PREV_INSN (x);
2796     }
2797 
2798   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2799     {
2800       /* Check that the code before the first basic block has NULL
2801 	 bb field.  */
2802       if (!BARRIER_P (x)
2803 	  && BLOCK_FOR_INSN (x) != NULL)
2804 	{
2805 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2806 		 INSN_UID (x));
2807 	  err = 1;
2808 	}
2809     }
2810   free (bb_info);
2811 
2812   return err;
2813 }
2814 
2815 /* Verify that fallthru edges point to adjacent blocks in layout order and
2816    that barriers exist after non-fallthru blocks.  */
2817 
2818 static int
rtl_verify_fallthru(void)2819 rtl_verify_fallthru (void)
2820 {
2821   basic_block bb;
2822   int err = 0;
2823 
2824   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2825     {
2826       edge e;
2827 
2828       e = find_fallthru_edge (bb->succs);
2829       if (!e)
2830 	{
2831 	  rtx insn;
2832 
2833 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2834 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2835 	    {
2836 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2837 		{
2838 		  error ("missing barrier after block %i", bb->index);
2839 		  err = 1;
2840 		  break;
2841 		}
2842 	      if (BARRIER_P (insn))
2843 		break;
2844 	    }
2845 	}
2846       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2847 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2848 	{
2849 	  rtx insn;
2850 
2851 	  if (e->src->next_bb != e->dest)
2852 	    {
2853 	      error
2854 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2855 		 e->src->index, e->dest->index);
2856 	      err = 1;
2857 	    }
2858 	  else
2859 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2860 		 insn = NEXT_INSN (insn))
2861 	      if (BARRIER_P (insn) || INSN_P (insn))
2862 		{
2863 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2864 			 e->src->index, e->dest->index);
2865 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2866 		  err = 1;
2867 		}
2868 	}
2869     }
2870 
2871    return err;
2872 }
2873 
2874 /* Verify that blocks are laid out in consecutive order. While walking the
2875    instructions, verify that all expected instructions are inside the basic
2876    blocks, and that all returns are followed by barriers.  */
2877 
2878 static int
rtl_verify_bb_layout(void)2879 rtl_verify_bb_layout (void)
2880 {
2881   basic_block bb;
2882   int err = 0;
2883   rtx x;
2884   int num_bb_notes;
2885   const rtx rtx_first = get_insns ();
2886   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2887 
2888   num_bb_notes = 0;
2889   last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2890 
2891   for (x = rtx_first; x; x = NEXT_INSN (x))
2892     {
2893       if (NOTE_INSN_BASIC_BLOCK_P (x))
2894 	{
2895 	  bb = NOTE_BASIC_BLOCK (x);
2896 
2897 	  num_bb_notes++;
2898 	  if (bb != last_bb_seen->next_bb)
2899 	    internal_error ("basic blocks not laid down consecutively");
2900 
2901 	  curr_bb = last_bb_seen = bb;
2902 	}
2903 
2904       if (!curr_bb)
2905 	{
2906 	  switch (GET_CODE (x))
2907 	    {
2908 	    case BARRIER:
2909 	    case NOTE:
2910 	      break;
2911 
2912 	    case CODE_LABEL:
2913 	      /* An ADDR_VEC is placed outside any basic block.  */
2914 	      if (NEXT_INSN (x)
2915 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2916 		x = NEXT_INSN (x);
2917 
2918 	      /* But in any case, non-deletable labels can appear anywhere.  */
2919 	      break;
2920 
2921 	    default:
2922 	      fatal_insn ("insn outside basic block", x);
2923 	    }
2924 	}
2925 
2926       if (JUMP_P (x)
2927 	  && returnjump_p (x) && ! condjump_p (x)
2928 	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2929 	    fatal_insn ("return not followed by barrier", x);
2930 
2931       if (curr_bb && x == BB_END (curr_bb))
2932 	curr_bb = NULL;
2933     }
2934 
2935   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
2936     internal_error
2937       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2938        num_bb_notes, n_basic_blocks_for_fn (cfun));
2939 
2940    return err;
2941 }
2942 
2943 /* Verify the CFG and RTL consistency common for both underlying RTL and
2944    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2945 
2946    Currently it does following checks:
2947    - all checks of rtl_verify_flow_info_1
2948    - test head/end pointers
2949    - check that blocks are laid out in consecutive order
2950    - check that all insns are in the basic blocks
2951      (except the switch handling code, barriers and notes)
2952    - check that all returns are followed by barriers
2953    - check that all fallthru edge points to the adjacent blocks
2954    - verify that there is a single hot/cold partition boundary after bbro  */
2955 
2956 static int
rtl_verify_flow_info(void)2957 rtl_verify_flow_info (void)
2958 {
2959   int err = 0;
2960 
2961   err |= rtl_verify_flow_info_1 ();
2962 
2963   err |= rtl_verify_bb_insn_chain ();
2964 
2965   err |= rtl_verify_fallthru ();
2966 
2967   err |= rtl_verify_bb_layout ();
2968 
2969   err |= verify_hot_cold_block_grouping ();
2970 
2971   return err;
2972 }
2973 
2974 /* Assume that the preceding pass has possibly eliminated jump instructions
2975    or converted the unconditional jumps.  Eliminate the edges from CFG.
2976    Return true if any edges are eliminated.  */
2977 
2978 bool
purge_dead_edges(basic_block bb)2979 purge_dead_edges (basic_block bb)
2980 {
2981   edge e;
2982   rtx insn = BB_END (bb), note;
2983   bool purged = false;
2984   bool found;
2985   edge_iterator ei;
2986 
2987   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2988     do
2989       insn = PREV_INSN (insn);
2990     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2991 
2992   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2993   if (NONJUMP_INSN_P (insn)
2994       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2995     {
2996       rtx eqnote;
2997 
2998       if (! may_trap_p (PATTERN (insn))
2999 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3000 	      && ! may_trap_p (XEXP (eqnote, 0))))
3001 	remove_note (insn, note);
3002     }
3003 
3004   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3005   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3006     {
3007       bool remove = false;
3008 
3009       /* There are three types of edges we need to handle correctly here: EH
3010 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3011 	 latter can appear when nonlocal gotos are used.  */
3012       if (e->flags & EDGE_ABNORMAL_CALL)
3013 	{
3014 	  if (!CALL_P (insn))
3015 	    remove = true;
3016 	  else if (can_nonlocal_goto (insn))
3017 	    ;
3018 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3019 	    ;
3020 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3021 	    ;
3022 	  else
3023 	    remove = true;
3024 	}
3025       else if (e->flags & EDGE_EH)
3026 	remove = !can_throw_internal (insn);
3027 
3028       if (remove)
3029 	{
3030 	  remove_edge (e);
3031 	  df_set_bb_dirty (bb);
3032 	  purged = true;
3033 	}
3034       else
3035 	ei_next (&ei);
3036     }
3037 
3038   if (JUMP_P (insn))
3039     {
3040       rtx note;
3041       edge b,f;
3042       edge_iterator ei;
3043 
3044       /* We do care only about conditional jumps and simplejumps.  */
3045       if (!any_condjump_p (insn)
3046 	  && !returnjump_p (insn)
3047 	  && !simplejump_p (insn))
3048 	return purged;
3049 
3050       /* Branch probability/prediction notes are defined only for
3051 	 condjumps.  We've possibly turned condjump into simplejump.  */
3052       if (simplejump_p (insn))
3053 	{
3054 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3055 	  if (note)
3056 	    remove_note (insn, note);
3057 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3058 	    remove_note (insn, note);
3059 	}
3060 
3061       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3062 	{
3063 	  /* Avoid abnormal flags to leak from computed jumps turned
3064 	     into simplejumps.  */
3065 
3066 	  e->flags &= ~EDGE_ABNORMAL;
3067 
3068 	  /* See if this edge is one we should keep.  */
3069 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3070 	    /* A conditional jump can fall through into the next
3071 	       block, so we should keep the edge.  */
3072 	    {
3073 	      ei_next (&ei);
3074 	      continue;
3075 	    }
3076 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3077 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3078 	    /* If the destination block is the target of the jump,
3079 	       keep the edge.  */
3080 	    {
3081 	      ei_next (&ei);
3082 	      continue;
3083 	    }
3084 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3085 		   && returnjump_p (insn))
3086 	    /* If the destination block is the exit block, and this
3087 	       instruction is a return, then keep the edge.  */
3088 	    {
3089 	      ei_next (&ei);
3090 	      continue;
3091 	    }
3092 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3093 	    /* Keep the edges that correspond to exceptions thrown by
3094 	       this instruction and rematerialize the EDGE_ABNORMAL
3095 	       flag we just cleared above.  */
3096 	    {
3097 	      e->flags |= EDGE_ABNORMAL;
3098 	      ei_next (&ei);
3099 	      continue;
3100 	    }
3101 
3102 	  /* We do not need this edge.  */
3103 	  df_set_bb_dirty (bb);
3104 	  purged = true;
3105 	  remove_edge (e);
3106 	}
3107 
3108       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3109 	return purged;
3110 
3111       if (dump_file)
3112 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3113 
3114       if (!optimize)
3115 	return purged;
3116 
3117       /* Redistribute probabilities.  */
3118       if (single_succ_p (bb))
3119 	{
3120 	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3121 	  single_succ_edge (bb)->count = bb->count;
3122 	}
3123       else
3124 	{
3125 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3126 	  if (!note)
3127 	    return purged;
3128 
3129 	  b = BRANCH_EDGE (bb);
3130 	  f = FALLTHRU_EDGE (bb);
3131 	  b->probability = XINT (note, 0);
3132 	  f->probability = REG_BR_PROB_BASE - b->probability;
3133           /* Update these to use GCOV_COMPUTE_SCALE.  */
3134 	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3135 	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3136 	}
3137 
3138       return purged;
3139     }
3140   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3141     {
3142       /* First, there should not be any EH or ABCALL edges resulting
3143 	 from non-local gotos and the like.  If there were, we shouldn't
3144 	 have created the sibcall in the first place.  Second, there
3145 	 should of course never have been a fallthru edge.  */
3146       gcc_assert (single_succ_p (bb));
3147       gcc_assert (single_succ_edge (bb)->flags
3148 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3149 
3150       return 0;
3151     }
3152 
3153   /* If we don't see a jump insn, we don't know exactly why the block would
3154      have been broken at this point.  Look for a simple, non-fallthru edge,
3155      as these are only created by conditional branches.  If we find such an
3156      edge we know that there used to be a jump here and can then safely
3157      remove all non-fallthru edges.  */
3158   found = false;
3159   FOR_EACH_EDGE (e, ei, bb->succs)
3160     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3161       {
3162 	found = true;
3163 	break;
3164       }
3165 
3166   if (!found)
3167     return purged;
3168 
3169   /* Remove all but the fake and fallthru edges.  The fake edge may be
3170      the only successor for this block in the case of noreturn
3171      calls.  */
3172   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3173     {
3174       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3175 	{
3176 	  df_set_bb_dirty (bb);
3177 	  remove_edge (e);
3178 	  purged = true;
3179 	}
3180       else
3181 	ei_next (&ei);
3182     }
3183 
3184   gcc_assert (single_succ_p (bb));
3185 
3186   single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3187   single_succ_edge (bb)->count = bb->count;
3188 
3189   if (dump_file)
3190     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3191 	     bb->index);
3192   return purged;
3193 }
3194 
3195 /* Search all basic blocks for potentially dead edges and purge them.  Return
3196    true if some edge has been eliminated.  */
3197 
3198 bool
purge_all_dead_edges(void)3199 purge_all_dead_edges (void)
3200 {
3201   int purged = false;
3202   basic_block bb;
3203 
3204   FOR_EACH_BB_FN (bb, cfun)
3205     {
3206       bool purged_here = purge_dead_edges (bb);
3207 
3208       purged |= purged_here;
3209     }
3210 
3211   return purged;
3212 }
3213 
3214 /* This is used by a few passes that emit some instructions after abnormal
3215    calls, moving the basic block's end, while they in fact do want to emit
3216    them on the fallthru edge.  Look for abnormal call edges, find backward
3217    the call in the block and insert the instructions on the edge instead.
3218 
3219    Similarly, handle instructions throwing exceptions internally.
3220 
3221    Return true when instructions have been found and inserted on edges.  */
3222 
3223 bool
fixup_abnormal_edges(void)3224 fixup_abnormal_edges (void)
3225 {
3226   bool inserted = false;
3227   basic_block bb;
3228 
3229   FOR_EACH_BB_FN (bb, cfun)
3230     {
3231       edge e;
3232       edge_iterator ei;
3233 
3234       /* Look for cases we are interested in - calls or instructions causing
3235          exceptions.  */
3236       FOR_EACH_EDGE (e, ei, bb->succs)
3237 	if ((e->flags & EDGE_ABNORMAL_CALL)
3238 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3239 		== (EDGE_ABNORMAL | EDGE_EH)))
3240 	  break;
3241 
3242       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3243 	{
3244 	  rtx insn;
3245 
3246 	  /* Get past the new insns generated.  Allow notes, as the insns
3247 	     may be already deleted.  */
3248 	  insn = BB_END (bb);
3249 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3250 		 && !can_throw_internal (insn)
3251 		 && insn != BB_HEAD (bb))
3252 	    insn = PREV_INSN (insn);
3253 
3254 	  if (CALL_P (insn) || can_throw_internal (insn))
3255 	    {
3256 	      rtx stop, next;
3257 
3258 	      e = find_fallthru_edge (bb->succs);
3259 
3260 	      stop = NEXT_INSN (BB_END (bb));
3261 	      BB_END (bb) = insn;
3262 
3263 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3264 		{
3265 		  next = NEXT_INSN (insn);
3266 		  if (INSN_P (insn))
3267 		    {
3268 		      delete_insn (insn);
3269 
3270 		      /* Sometimes there's still the return value USE.
3271 			 If it's placed after a trapping call (i.e. that
3272 			 call is the last insn anyway), we have no fallthru
3273 			 edge.  Simply delete this use and don't try to insert
3274 			 on the non-existent edge.  */
3275 		      if (GET_CODE (PATTERN (insn)) != USE)
3276 			{
3277 			  /* We're not deleting it, we're moving it.  */
3278 			  INSN_DELETED_P (insn) = 0;
3279 			  PREV_INSN (insn) = NULL_RTX;
3280 			  NEXT_INSN (insn) = NULL_RTX;
3281 
3282 			  insert_insn_on_edge (insn, e);
3283 			  inserted = true;
3284 			}
3285 		    }
3286 		  else if (!BARRIER_P (insn))
3287 		    set_block_for_insn (insn, NULL);
3288 		}
3289 	    }
3290 
3291 	  /* It may be that we don't find any trapping insn.  In this
3292 	     case we discovered quite late that the insn that had been
3293 	     marked as can_throw_internal in fact couldn't trap at all.
3294 	     So we should in fact delete the EH edges out of the block.  */
3295 	  else
3296 	    purge_dead_edges (bb);
3297 	}
3298     }
3299 
3300   return inserted;
3301 }
3302 
3303 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3304 
3305 rtx
unlink_insn_chain(rtx first,rtx last)3306 unlink_insn_chain (rtx first, rtx last)
3307 {
3308   rtx prevfirst = PREV_INSN (first);
3309   rtx nextlast = NEXT_INSN (last);
3310 
3311   PREV_INSN (first) = NULL;
3312   NEXT_INSN (last) = NULL;
3313   if (prevfirst)
3314     NEXT_INSN (prevfirst) = nextlast;
3315   if (nextlast)
3316     PREV_INSN (nextlast) = prevfirst;
3317   else
3318     set_last_insn (prevfirst);
3319   if (!prevfirst)
3320     set_first_insn (nextlast);
3321   return first;
3322 }
3323 
3324 /* Skip over inter-block insns occurring after BB which are typically
3325    associated with BB (e.g., barriers). If there are any such insns,
3326    we return the last one. Otherwise, we return the end of BB.  */
3327 
3328 static rtx
skip_insns_after_block(basic_block bb)3329 skip_insns_after_block (basic_block bb)
3330 {
3331   rtx insn, last_insn, next_head, prev;
3332 
3333   next_head = NULL_RTX;
3334   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3335     next_head = BB_HEAD (bb->next_bb);
3336 
3337   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3338     {
3339       if (insn == next_head)
3340 	break;
3341 
3342       switch (GET_CODE (insn))
3343 	{
3344 	case BARRIER:
3345 	  last_insn = insn;
3346 	  continue;
3347 
3348 	case NOTE:
3349 	  switch (NOTE_KIND (insn))
3350 	    {
3351 	    case NOTE_INSN_BLOCK_END:
3352 	      gcc_unreachable ();
3353 	      continue;
3354 	    default:
3355 	      continue;
3356 	      break;
3357 	    }
3358 	  break;
3359 
3360 	case CODE_LABEL:
3361 	  if (NEXT_INSN (insn)
3362 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3363 	    {
3364 	      insn = NEXT_INSN (insn);
3365 	      last_insn = insn;
3366 	      continue;
3367 	    }
3368 	  break;
3369 
3370 	default:
3371 	  break;
3372 	}
3373 
3374       break;
3375     }
3376 
3377   /* It is possible to hit contradictory sequence.  For instance:
3378 
3379      jump_insn
3380      NOTE_INSN_BLOCK_BEG
3381      barrier
3382 
3383      Where barrier belongs to jump_insn, but the note does not.  This can be
3384      created by removing the basic block originally following
3385      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3386 
3387   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3388     {
3389       prev = PREV_INSN (insn);
3390       if (NOTE_P (insn))
3391 	switch (NOTE_KIND (insn))
3392 	  {
3393 	  case NOTE_INSN_BLOCK_END:
3394 	    gcc_unreachable ();
3395 	    break;
3396 	  case NOTE_INSN_DELETED:
3397 	  case NOTE_INSN_DELETED_LABEL:
3398 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3399 	    continue;
3400 	  default:
3401 	    reorder_insns (insn, insn, last_insn);
3402 	  }
3403     }
3404 
3405   return last_insn;
3406 }
3407 
3408 /* Locate or create a label for a given basic block.  */
3409 
3410 static rtx
label_for_bb(basic_block bb)3411 label_for_bb (basic_block bb)
3412 {
3413   rtx label = BB_HEAD (bb);
3414 
3415   if (!LABEL_P (label))
3416     {
3417       if (dump_file)
3418 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3419 
3420       label = block_label (bb);
3421     }
3422 
3423   return label;
3424 }
3425 
3426 /* Locate the effective beginning and end of the insn chain for each
3427    block, as defined by skip_insns_after_block above.  */
3428 
3429 static void
record_effective_endpoints(void)3430 record_effective_endpoints (void)
3431 {
3432   rtx next_insn;
3433   basic_block bb;
3434   rtx insn;
3435 
3436   for (insn = get_insns ();
3437        insn
3438        && NOTE_P (insn)
3439        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3440        insn = NEXT_INSN (insn))
3441     continue;
3442   /* No basic blocks at all?  */
3443   gcc_assert (insn);
3444 
3445   if (PREV_INSN (insn))
3446     cfg_layout_function_header =
3447 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3448   else
3449     cfg_layout_function_header = NULL_RTX;
3450 
3451   next_insn = get_insns ();
3452   FOR_EACH_BB_FN (bb, cfun)
3453     {
3454       rtx end;
3455 
3456       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3457 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3458 					      PREV_INSN (BB_HEAD (bb)));
3459       end = skip_insns_after_block (bb);
3460       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3461 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3462       next_insn = NEXT_INSN (BB_END (bb));
3463     }
3464 
3465   cfg_layout_function_footer = next_insn;
3466   if (cfg_layout_function_footer)
3467     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3468 }
3469 
3470 static unsigned int
into_cfg_layout_mode(void)3471 into_cfg_layout_mode (void)
3472 {
3473   cfg_layout_initialize (0);
3474   return 0;
3475 }
3476 
3477 static unsigned int
outof_cfg_layout_mode(void)3478 outof_cfg_layout_mode (void)
3479 {
3480   basic_block bb;
3481 
3482   FOR_EACH_BB_FN (bb, cfun)
3483     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3484       bb->aux = bb->next_bb;
3485 
3486   cfg_layout_finalize ();
3487 
3488   return 0;
3489 }
3490 
3491 namespace {
3492 
3493 const pass_data pass_data_into_cfg_layout_mode =
3494 {
3495   RTL_PASS, /* type */
3496   "into_cfglayout", /* name */
3497   OPTGROUP_NONE, /* optinfo_flags */
3498   false, /* has_gate */
3499   true, /* has_execute */
3500   TV_CFG, /* tv_id */
3501   0, /* properties_required */
3502   PROP_cfglayout, /* properties_provided */
3503   0, /* properties_destroyed */
3504   0, /* todo_flags_start */
3505   0, /* todo_flags_finish */
3506 };
3507 
3508 class pass_into_cfg_layout_mode : public rtl_opt_pass
3509 {
3510 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3511   pass_into_cfg_layout_mode (gcc::context *ctxt)
3512     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3513   {}
3514 
3515   /* opt_pass methods: */
execute()3516   unsigned int execute () { return into_cfg_layout_mode (); }
3517 
3518 }; // class pass_into_cfg_layout_mode
3519 
3520 } // anon namespace
3521 
3522 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3523 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3524 {
3525   return new pass_into_cfg_layout_mode (ctxt);
3526 }
3527 
3528 namespace {
3529 
3530 const pass_data pass_data_outof_cfg_layout_mode =
3531 {
3532   RTL_PASS, /* type */
3533   "outof_cfglayout", /* name */
3534   OPTGROUP_NONE, /* optinfo_flags */
3535   false, /* has_gate */
3536   true, /* has_execute */
3537   TV_CFG, /* tv_id */
3538   0, /* properties_required */
3539   0, /* properties_provided */
3540   PROP_cfglayout, /* properties_destroyed */
3541   0, /* todo_flags_start */
3542   0, /* todo_flags_finish */
3543 };
3544 
3545 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3546 {
3547 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3548   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3549     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3550   {}
3551 
3552   /* opt_pass methods: */
execute()3553   unsigned int execute () { return outof_cfg_layout_mode (); }
3554 
3555 }; // class pass_outof_cfg_layout_mode
3556 
3557 } // anon namespace
3558 
3559 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3560 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3561 {
3562   return new pass_outof_cfg_layout_mode (ctxt);
3563 }
3564 
3565 
3566 /* Link the basic blocks in the correct order, compacting the basic
3567    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3568    function also clears the basic block header and footer fields.
3569 
3570    This function is usually called after a pass (e.g. tracer) finishes
3571    some transformations while in cfglayout mode.  The required sequence
3572    of the basic blocks is in a linked list along the bb->aux field.
3573    This functions re-links the basic block prev_bb and next_bb pointers
3574    accordingly, and it compacts and renumbers the blocks.
3575 
3576    FIXME: This currently works only for RTL, but the only RTL-specific
3577    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3578    to GIMPLE a long time ago, but it doesn't relink the basic block
3579    chain.  It could do that (to give better initial RTL) if this function
3580    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
3581 
3582 void
relink_block_chain(bool stay_in_cfglayout_mode)3583 relink_block_chain (bool stay_in_cfglayout_mode)
3584 {
3585   basic_block bb, prev_bb;
3586   int index;
3587 
3588   /* Maybe dump the re-ordered sequence.  */
3589   if (dump_file)
3590     {
3591       fprintf (dump_file, "Reordered sequence:\n");
3592       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3593 	   NUM_FIXED_BLOCKS;
3594 	   bb;
3595 	   bb = (basic_block) bb->aux, index++)
3596 	{
3597 	  fprintf (dump_file, " %i ", index);
3598 	  if (get_bb_original (bb))
3599 	    fprintf (dump_file, "duplicate of %i ",
3600 		     get_bb_original (bb)->index);
3601 	  else if (forwarder_block_p (bb)
3602 		   && !LABEL_P (BB_HEAD (bb)))
3603 	    fprintf (dump_file, "compensation ");
3604 	  else
3605 	    fprintf (dump_file, "bb %i ", bb->index);
3606 	  fprintf (dump_file, " [%i]\n", bb->frequency);
3607 	}
3608     }
3609 
3610   /* Now reorder the blocks.  */
3611   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3612   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3613   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3614     {
3615       bb->prev_bb = prev_bb;
3616       prev_bb->next_bb = bb;
3617     }
3618   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3619   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3620 
3621   /* Then, clean up the aux fields.  */
3622   FOR_ALL_BB_FN (bb, cfun)
3623     {
3624       bb->aux = NULL;
3625       if (!stay_in_cfglayout_mode)
3626 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3627     }
3628 
3629   /* Maybe reset the original copy tables, they are not valid anymore
3630      when we renumber the basic blocks in compact_blocks.  If we are
3631      are going out of cfglayout mode, don't re-allocate the tables.  */
3632   free_original_copy_tables ();
3633   if (stay_in_cfglayout_mode)
3634     initialize_original_copy_tables ();
3635 
3636   /* Finally, put basic_block_info in the new order.  */
3637   compact_blocks ();
3638 }
3639 
3640 
3641 /* Given a reorder chain, rearrange the code to match.  */
3642 
3643 static void
fixup_reorder_chain(void)3644 fixup_reorder_chain (void)
3645 {
3646   basic_block bb;
3647   rtx insn = NULL;
3648 
3649   if (cfg_layout_function_header)
3650     {
3651       set_first_insn (cfg_layout_function_header);
3652       insn = cfg_layout_function_header;
3653       while (NEXT_INSN (insn))
3654 	insn = NEXT_INSN (insn);
3655     }
3656 
3657   /* First do the bulk reordering -- rechain the blocks without regard to
3658      the needed changes to jumps and labels.  */
3659 
3660   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3661        bb->aux)
3662     {
3663       if (BB_HEADER (bb))
3664 	{
3665 	  if (insn)
3666 	    NEXT_INSN (insn) = BB_HEADER (bb);
3667 	  else
3668 	    set_first_insn (BB_HEADER (bb));
3669 	  PREV_INSN (BB_HEADER (bb)) = insn;
3670 	  insn = BB_HEADER (bb);
3671 	  while (NEXT_INSN (insn))
3672 	    insn = NEXT_INSN (insn);
3673 	}
3674       if (insn)
3675 	NEXT_INSN (insn) = BB_HEAD (bb);
3676       else
3677 	set_first_insn (BB_HEAD (bb));
3678       PREV_INSN (BB_HEAD (bb)) = insn;
3679       insn = BB_END (bb);
3680       if (BB_FOOTER (bb))
3681 	{
3682 	  NEXT_INSN (insn) = BB_FOOTER (bb);
3683 	  PREV_INSN (BB_FOOTER (bb)) = insn;
3684 	  while (NEXT_INSN (insn))
3685 	    insn = NEXT_INSN (insn);
3686 	}
3687     }
3688 
3689   NEXT_INSN (insn) = cfg_layout_function_footer;
3690   if (cfg_layout_function_footer)
3691     PREV_INSN (cfg_layout_function_footer) = insn;
3692 
3693   while (NEXT_INSN (insn))
3694     insn = NEXT_INSN (insn);
3695 
3696   set_last_insn (insn);
3697 #ifdef ENABLE_CHECKING
3698   verify_insn_chain ();
3699 #endif
3700 
3701   /* Now add jumps and labels as needed to match the blocks new
3702      outgoing edges.  */
3703 
3704   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3705        bb->aux)
3706     {
3707       edge e_fall, e_taken, e;
3708       rtx bb_end_insn;
3709       rtx ret_label = NULL_RTX;
3710       basic_block nb;
3711       edge_iterator ei;
3712 
3713       if (EDGE_COUNT (bb->succs) == 0)
3714 	continue;
3715 
3716       /* Find the old fallthru edge, and another non-EH edge for
3717 	 a taken jump.  */
3718       e_taken = e_fall = NULL;
3719 
3720       FOR_EACH_EDGE (e, ei, bb->succs)
3721 	if (e->flags & EDGE_FALLTHRU)
3722 	  e_fall = e;
3723 	else if (! (e->flags & EDGE_EH))
3724 	  e_taken = e;
3725 
3726       bb_end_insn = BB_END (bb);
3727       if (JUMP_P (bb_end_insn))
3728 	{
3729 	  ret_label = JUMP_LABEL (bb_end_insn);
3730 	  if (any_condjump_p (bb_end_insn))
3731 	    {
3732 	      /* This might happen if the conditional jump has side
3733 		 effects and could therefore not be optimized away.
3734 		 Make the basic block to end with a barrier in order
3735 		 to prevent rtl_verify_flow_info from complaining.  */
3736 	      if (!e_fall)
3737 		{
3738 		  gcc_assert (!onlyjump_p (bb_end_insn)
3739 			      || returnjump_p (bb_end_insn)
3740                               || (e_taken->flags & EDGE_CROSSING));
3741 		  emit_barrier_after (bb_end_insn);
3742 		  continue;
3743 		}
3744 
3745 	      /* If the old fallthru is still next, nothing to do.  */
3746 	      if (bb->aux == e_fall->dest
3747 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3748 		continue;
3749 
3750 	      /* The degenerated case of conditional jump jumping to the next
3751 		 instruction can happen for jumps with side effects.  We need
3752 		 to construct a forwarder block and this will be done just
3753 		 fine by force_nonfallthru below.  */
3754 	      if (!e_taken)
3755 		;
3756 
3757 	      /* There is another special case: if *neither* block is next,
3758 		 such as happens at the very end of a function, then we'll
3759 		 need to add a new unconditional jump.  Choose the taken
3760 		 edge based on known or assumed probability.  */
3761 	      else if (bb->aux != e_taken->dest)
3762 		{
3763 		  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3764 
3765 		  if (note
3766 		      && XINT (note, 0) < REG_BR_PROB_BASE / 2
3767 		      && invert_jump (bb_end_insn,
3768 				      (e_fall->dest
3769 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3770 				       ? NULL_RTX
3771 				       : label_for_bb (e_fall->dest)), 0))
3772 		    {
3773 		      e_fall->flags &= ~EDGE_FALLTHRU;
3774 		      gcc_checking_assert (could_fall_through
3775 					   (e_taken->src, e_taken->dest));
3776 		      e_taken->flags |= EDGE_FALLTHRU;
3777 		      update_br_prob_note (bb);
3778 		      e = e_fall, e_fall = e_taken, e_taken = e;
3779 		    }
3780 		}
3781 
3782 	      /* If the "jumping" edge is a crossing edge, and the fall
3783 		 through edge is non-crossing, leave things as they are.  */
3784 	      else if ((e_taken->flags & EDGE_CROSSING)
3785 		       && !(e_fall->flags & EDGE_CROSSING))
3786 		continue;
3787 
3788 	      /* Otherwise we can try to invert the jump.  This will
3789 		 basically never fail, however, keep up the pretense.  */
3790 	      else if (invert_jump (bb_end_insn,
3791 				    (e_fall->dest
3792 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
3793 				     ? NULL_RTX
3794 				     : label_for_bb (e_fall->dest)), 0))
3795 		{
3796 		  e_fall->flags &= ~EDGE_FALLTHRU;
3797 		  gcc_checking_assert (could_fall_through
3798 				       (e_taken->src, e_taken->dest));
3799 		  e_taken->flags |= EDGE_FALLTHRU;
3800 		  update_br_prob_note (bb);
3801 		  if (LABEL_NUSES (ret_label) == 0
3802 		      && single_pred_p (e_taken->dest))
3803 		    delete_insn (ret_label);
3804 		  continue;
3805 		}
3806 	    }
3807 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3808 	    {
3809 	      /* If the old fallthru is still next or if
3810 		 asm goto doesn't have a fallthru (e.g. when followed by
3811 		 __builtin_unreachable ()), nothing to do.  */
3812 	      if (! e_fall
3813 		  || bb->aux == e_fall->dest
3814 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3815 		continue;
3816 
3817 	      /* Otherwise we'll have to use the fallthru fixup below.  */
3818 	    }
3819 	  else
3820 	    {
3821 	      /* Otherwise we have some return, switch or computed
3822 		 jump.  In the 99% case, there should not have been a
3823 		 fallthru edge.  */
3824 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3825 	      continue;
3826 	    }
3827 	}
3828       else
3829 	{
3830 	  /* No fallthru implies a noreturn function with EH edges, or
3831 	     something similarly bizarre.  In any case, we don't need to
3832 	     do anything.  */
3833 	  if (! e_fall)
3834 	    continue;
3835 
3836 	  /* If the fallthru block is still next, nothing to do.  */
3837 	  if (bb->aux == e_fall->dest)
3838 	    continue;
3839 
3840 	  /* A fallthru to exit block.  */
3841 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3842 	    continue;
3843 	}
3844 
3845       /* We got here if we need to add a new jump insn.
3846 	 Note force_nonfallthru can delete E_FALL and thus we have to
3847 	 save E_FALL->src prior to the call to force_nonfallthru.  */
3848       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3849       if (nb)
3850 	{
3851 	  nb->aux = bb->aux;
3852 	  bb->aux = nb;
3853 	  /* Don't process this new block.  */
3854 	  bb = nb;
3855 	}
3856     }
3857 
3858   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3859 
3860   /* Annoying special case - jump around dead jumptables left in the code.  */
3861   FOR_EACH_BB_FN (bb, cfun)
3862     {
3863       edge e = find_fallthru_edge (bb->succs);
3864 
3865       if (e && !can_fallthru (e->src, e->dest))
3866 	force_nonfallthru (e);
3867     }
3868 
3869   /* Ensure goto_locus from edges has some instructions with that locus
3870      in RTL.  */
3871   if (!optimize)
3872     FOR_EACH_BB_FN (bb, cfun)
3873       {
3874         edge e;
3875         edge_iterator ei;
3876 
3877         FOR_EACH_EDGE (e, ei, bb->succs)
3878 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3879 	      && !(e->flags & EDGE_ABNORMAL))
3880 	    {
3881 	      edge e2;
3882 	      edge_iterator ei2;
3883 	      basic_block dest, nb;
3884 	      rtx end;
3885 
3886 	      insn = BB_END (e->src);
3887 	      end = PREV_INSN (BB_HEAD (e->src));
3888 	      while (insn != end
3889 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3890 		insn = PREV_INSN (insn);
3891 	      if (insn != end
3892 		  && INSN_LOCATION (insn) == e->goto_locus)
3893 		continue;
3894 	      if (simplejump_p (BB_END (e->src))
3895 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
3896 		{
3897 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3898 		  continue;
3899 		}
3900 	      dest = e->dest;
3901 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3902 		{
3903 		  /* Non-fallthru edges to the exit block cannot be split.  */
3904 		  if (!(e->flags & EDGE_FALLTHRU))
3905 		    continue;
3906 		}
3907 	      else
3908 		{
3909 		  insn = BB_HEAD (dest);
3910 		  end = NEXT_INSN (BB_END (dest));
3911 		  while (insn != end && !NONDEBUG_INSN_P (insn))
3912 		    insn = NEXT_INSN (insn);
3913 		  if (insn != end && INSN_HAS_LOCATION (insn)
3914 		      && INSN_LOCATION (insn) == e->goto_locus)
3915 		    continue;
3916 		}
3917 	      nb = split_edge (e);
3918 	      if (!INSN_P (BB_END (nb)))
3919 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3920 						     nb);
3921 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3922 
3923 	      /* If there are other incoming edges to the destination block
3924 		 with the same goto locus, redirect them to the new block as
3925 		 well, this can prevent other such blocks from being created
3926 		 in subsequent iterations of the loop.  */
3927 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3928 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3929 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3930 		    && e->goto_locus == e2->goto_locus)
3931 		  redirect_edge_and_branch (e2, nb);
3932 		else
3933 		  ei_next (&ei2);
3934 	    }
3935       }
3936 }
3937 
3938 /* Perform sanity checks on the insn chain.
3939    1. Check that next/prev pointers are consistent in both the forward and
3940       reverse direction.
3941    2. Count insns in chain, going both directions, and check if equal.
3942    3. Check that get_last_insn () returns the actual end of chain.  */
3943 
3944 DEBUG_FUNCTION void
verify_insn_chain(void)3945 verify_insn_chain (void)
3946 {
3947   rtx x, prevx, nextx;
3948   int insn_cnt1, insn_cnt2;
3949 
3950   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3951        x != 0;
3952        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3953     gcc_assert (PREV_INSN (x) == prevx);
3954 
3955   gcc_assert (prevx == get_last_insn ());
3956 
3957   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3958        x != 0;
3959        nextx = x, insn_cnt2++, x = PREV_INSN (x))
3960     gcc_assert (NEXT_INSN (x) == nextx);
3961 
3962   gcc_assert (insn_cnt1 == insn_cnt2);
3963 }
3964 
3965 /* If we have assembler epilogues, the block falling through to exit must
3966    be the last one in the reordered chain when we reach final.  Ensure
3967    that this condition is met.  */
3968 static void
fixup_fallthru_exit_predecessor(void)3969 fixup_fallthru_exit_predecessor (void)
3970 {
3971   edge e;
3972   basic_block bb = NULL;
3973 
3974   /* This transformation is not valid before reload, because we might
3975      separate a call from the instruction that copies the return
3976      value.  */
3977   gcc_assert (reload_completed);
3978 
3979   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
3980   if (e)
3981     bb = e->src;
3982 
3983   if (bb && bb->aux)
3984     {
3985       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3986 
3987       /* If the very first block is the one with the fall-through exit
3988 	 edge, we have to split that block.  */
3989       if (c == bb)
3990 	{
3991 	  bb = split_block (bb, NULL)->dest;
3992 	  bb->aux = c->aux;
3993 	  c->aux = bb;
3994 	  BB_FOOTER (bb) = BB_FOOTER (c);
3995 	  BB_FOOTER (c) = NULL;
3996 	}
3997 
3998       while (c->aux != bb)
3999 	c = (basic_block) c->aux;
4000 
4001       c->aux = bb->aux;
4002       while (c->aux)
4003 	c = (basic_block) c->aux;
4004 
4005       c->aux = bb;
4006       bb->aux = NULL;
4007     }
4008 }
4009 
4010 /* In case there are more than one fallthru predecessors of exit, force that
4011    there is only one.  */
4012 
4013 static void
force_one_exit_fallthru(void)4014 force_one_exit_fallthru (void)
4015 {
4016   edge e, predecessor = NULL;
4017   bool more = false;
4018   edge_iterator ei;
4019   basic_block forwarder, bb;
4020 
4021   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4022     if (e->flags & EDGE_FALLTHRU)
4023       {
4024 	if (predecessor == NULL)
4025 	  predecessor = e;
4026 	else
4027 	  {
4028 	    more = true;
4029 	    break;
4030 	  }
4031       }
4032 
4033   if (!more)
4034     return;
4035 
4036   /* Exit has several fallthru predecessors.  Create a forwarder block for
4037      them.  */
4038   forwarder = split_edge (predecessor);
4039   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4040        (e = ei_safe_edge (ei)); )
4041     {
4042       if (e->src == forwarder
4043 	  || !(e->flags & EDGE_FALLTHRU))
4044 	ei_next (&ei);
4045       else
4046 	redirect_edge_and_branch_force (e, forwarder);
4047     }
4048 
4049   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4050      exit block.  */
4051   FOR_EACH_BB_FN (bb, cfun)
4052     {
4053       if (bb->aux == NULL && bb != forwarder)
4054 	{
4055 	  bb->aux = forwarder;
4056 	  break;
4057 	}
4058     }
4059 }
4060 
4061 /* Return true in case it is possible to duplicate the basic block BB.  */
4062 
4063 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4064 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4065 {
4066   /* Do not attempt to duplicate tablejumps, as we need to unshare
4067      the dispatch table.  This is difficult to do, as the instructions
4068      computing jump destination may be hoisted outside the basic block.  */
4069   if (tablejump_p (BB_END (bb), NULL, NULL))
4070     return false;
4071 
4072   /* Do not duplicate blocks containing insns that can't be copied.  */
4073   if (targetm.cannot_copy_insn_p)
4074     {
4075       rtx insn = BB_HEAD (bb);
4076       while (1)
4077 	{
4078 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4079 	    return false;
4080 	  if (insn == BB_END (bb))
4081 	    break;
4082 	  insn = NEXT_INSN (insn);
4083 	}
4084     }
4085 
4086   return true;
4087 }
4088 
4089 rtx
duplicate_insn_chain(rtx from,rtx to)4090 duplicate_insn_chain (rtx from, rtx to)
4091 {
4092   rtx insn, next, last, copy;
4093 
4094   /* Avoid updating of boundaries of previous basic block.  The
4095      note will get removed from insn stream in fixup.  */
4096   last = emit_note (NOTE_INSN_DELETED);
4097 
4098   /* Create copy at the end of INSN chain.  The chain will
4099      be reordered later.  */
4100   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4101     {
4102       switch (GET_CODE (insn))
4103 	{
4104 	case DEBUG_INSN:
4105 	  /* Don't duplicate label debug insns.  */
4106 	  if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4107 	    break;
4108 	  /* FALLTHRU */
4109 	case INSN:
4110 	case CALL_INSN:
4111 	case JUMP_INSN:
4112 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4113 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4114 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4115 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4116           maybe_copy_prologue_epilogue_insn (insn, copy);
4117 	  break;
4118 
4119 	case JUMP_TABLE_DATA:
4120 	  /* Avoid copying of dispatch tables.  We never duplicate
4121 	     tablejumps, so this can hit only in case the table got
4122 	     moved far from original jump.
4123 	     Avoid copying following barrier as well if any
4124 	     (and debug insns in between).  */
4125 	  for (next = NEXT_INSN (insn);
4126 	       next != NEXT_INSN (to);
4127 	       next = NEXT_INSN (next))
4128 	    if (!DEBUG_INSN_P (next))
4129 	      break;
4130 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4131 	    insn = next;
4132 	  break;
4133 
4134 	case CODE_LABEL:
4135 	  break;
4136 
4137 	case BARRIER:
4138 	  emit_barrier ();
4139 	  break;
4140 
4141 	case NOTE:
4142 	  switch (NOTE_KIND (insn))
4143 	    {
4144 	      /* In case prologue is empty and function contain label
4145 		 in first BB, we may want to copy the block.  */
4146 	    case NOTE_INSN_PROLOGUE_END:
4147 
4148 	    case NOTE_INSN_DELETED:
4149 	    case NOTE_INSN_DELETED_LABEL:
4150 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4151 	      /* No problem to strip these.  */
4152 	    case NOTE_INSN_FUNCTION_BEG:
4153 	      /* There is always just single entry to function.  */
4154 	    case NOTE_INSN_BASIC_BLOCK:
4155               /* We should only switch text sections once.  */
4156 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4157 	      break;
4158 
4159 	    case NOTE_INSN_EPILOGUE_BEG:
4160 	      emit_note_copy (insn);
4161 	      break;
4162 
4163 	    default:
4164 	      /* All other notes should have already been eliminated.  */
4165 	      gcc_unreachable ();
4166 	    }
4167 	  break;
4168 	default:
4169 	  gcc_unreachable ();
4170 	}
4171     }
4172   insn = NEXT_INSN (last);
4173   delete_insn (last);
4174   return insn;
4175 }
4176 
4177 /* Create a duplicate of the basic block BB.  */
4178 
4179 static basic_block
cfg_layout_duplicate_bb(basic_block bb)4180 cfg_layout_duplicate_bb (basic_block bb)
4181 {
4182   rtx insn;
4183   basic_block new_bb;
4184 
4185   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4186   new_bb = create_basic_block (insn,
4187 			       insn ? get_last_insn () : NULL,
4188 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4189 
4190   BB_COPY_PARTITION (new_bb, bb);
4191   if (BB_HEADER (bb))
4192     {
4193       insn = BB_HEADER (bb);
4194       while (NEXT_INSN (insn))
4195 	insn = NEXT_INSN (insn);
4196       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4197       if (insn)
4198 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4199     }
4200 
4201   if (BB_FOOTER (bb))
4202     {
4203       insn = BB_FOOTER (bb);
4204       while (NEXT_INSN (insn))
4205 	insn = NEXT_INSN (insn);
4206       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4207       if (insn)
4208 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4209     }
4210 
4211   return new_bb;
4212 }
4213 
4214 
4215 /* Main entry point to this module - initialize the datastructures for
4216    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4217 
4218    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4219 
4220 void
cfg_layout_initialize(unsigned int flags)4221 cfg_layout_initialize (unsigned int flags)
4222 {
4223   rtx x;
4224   basic_block bb;
4225 
4226   /* Once bb reordering is complete, cfg layout mode should not be re-entered.
4227      Entering cfg layout mode will perform optimizations on the cfg that
4228      could affect the bb layout negatively or even require fixups. An
4229      example of the latter is if edge forwarding performed when optimizing
4230      the cfg layout required moving a block from the hot to the cold section
4231      under -freorder-blocks-and-partition. This would create an illegal
4232      partitioning unless some manual fixup was performed.  */
4233   gcc_assert (!crtl->bb_reorder_complete);
4234 
4235   initialize_original_copy_tables ();
4236 
4237   cfg_layout_rtl_register_cfg_hooks ();
4238 
4239   record_effective_endpoints ();
4240 
4241   /* Make sure that the targets of non local gotos are marked.  */
4242   for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
4243     {
4244       bb = BLOCK_FOR_INSN (XEXP (x, 0));
4245       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4246     }
4247 
4248   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4249 }
4250 
4251 /* Splits superblocks.  */
4252 void
break_superblocks(void)4253 break_superblocks (void)
4254 {
4255   sbitmap superblocks;
4256   bool need = false;
4257   basic_block bb;
4258 
4259   superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
4260   bitmap_clear (superblocks);
4261 
4262   FOR_EACH_BB_FN (bb, cfun)
4263     if (bb->flags & BB_SUPERBLOCK)
4264       {
4265 	bb->flags &= ~BB_SUPERBLOCK;
4266 	bitmap_set_bit (superblocks, bb->index);
4267 	need = true;
4268       }
4269 
4270   if (need)
4271     {
4272       rebuild_jump_labels (get_insns ());
4273       find_many_sub_basic_blocks (superblocks);
4274     }
4275 
4276   free (superblocks);
4277 }
4278 
4279 /* Finalize the changes: reorder insn list according to the sequence specified
4280    by aux pointers, enter compensation code, rebuild scope forest.  */
4281 
4282 void
cfg_layout_finalize(void)4283 cfg_layout_finalize (void)
4284 {
4285 #ifdef ENABLE_CHECKING
4286   verify_flow_info ();
4287 #endif
4288   force_one_exit_fallthru ();
4289   rtl_register_cfg_hooks ();
4290   if (reload_completed
4291 #ifdef HAVE_epilogue
4292       && !HAVE_epilogue
4293 #endif
4294       )
4295     fixup_fallthru_exit_predecessor ();
4296   fixup_reorder_chain ();
4297 
4298   rebuild_jump_labels (get_insns ());
4299   delete_dead_jumptables ();
4300 
4301 #ifdef ENABLE_CHECKING
4302   verify_insn_chain ();
4303   verify_flow_info ();
4304 #endif
4305 }
4306 
4307 
4308 /* Same as split_block but update cfg_layout structures.  */
4309 
4310 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4311 cfg_layout_split_block (basic_block bb, void *insnp)
4312 {
4313   rtx insn = (rtx) insnp;
4314   basic_block new_bb = rtl_split_block (bb, insn);
4315 
4316   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4317   BB_FOOTER (bb) = NULL;
4318 
4319   return new_bb;
4320 }
4321 
4322 /* Redirect Edge to DEST.  */
4323 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4324 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4325 {
4326   basic_block src = e->src;
4327   edge ret;
4328 
4329   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4330     return NULL;
4331 
4332   if (e->dest == dest)
4333     return e;
4334 
4335   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4336       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4337     {
4338       df_set_bb_dirty (src);
4339       return ret;
4340     }
4341 
4342   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4343       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4344     {
4345       if (dump_file)
4346 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4347 		 e->src->index, dest->index);
4348 
4349       df_set_bb_dirty (e->src);
4350       redirect_edge_succ (e, dest);
4351       return e;
4352     }
4353 
4354   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4355      in the case the basic block appears to be in sequence.  Avoid this
4356      transformation.  */
4357 
4358   if (e->flags & EDGE_FALLTHRU)
4359     {
4360       /* Redirect any branch edges unified with the fallthru one.  */
4361       if (JUMP_P (BB_END (src))
4362 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4363 				     BB_END (src)))
4364 	{
4365 	  edge redirected;
4366 
4367 	  if (dump_file)
4368 	    fprintf (dump_file, "Fallthru edge unified with branch "
4369 		     "%i->%i redirected to %i\n",
4370 		     e->src->index, e->dest->index, dest->index);
4371 	  e->flags &= ~EDGE_FALLTHRU;
4372 	  redirected = redirect_branch_edge (e, dest);
4373 	  gcc_assert (redirected);
4374 	  redirected->flags |= EDGE_FALLTHRU;
4375 	  df_set_bb_dirty (redirected->src);
4376 	  return redirected;
4377 	}
4378       /* In case we are redirecting fallthru edge to the branch edge
4379 	 of conditional jump, remove it.  */
4380       if (EDGE_COUNT (src->succs) == 2)
4381 	{
4382 	  /* Find the edge that is different from E.  */
4383 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4384 
4385 	  if (s->dest == dest
4386 	      && any_condjump_p (BB_END (src))
4387 	      && onlyjump_p (BB_END (src)))
4388 	    delete_insn (BB_END (src));
4389 	}
4390       if (dump_file)
4391 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4392 		 e->src->index, e->dest->index, dest->index);
4393       ret = redirect_edge_succ_nodup (e, dest);
4394     }
4395   else
4396     ret = redirect_branch_edge (e, dest);
4397 
4398   /* We don't want simplejumps in the insn stream during cfglayout.  */
4399   gcc_assert (!simplejump_p (BB_END (src)));
4400 
4401   df_set_bb_dirty (src);
4402   return ret;
4403 }
4404 
4405 /* Simple wrapper as we always can redirect fallthru edges.  */
4406 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4407 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4408 {
4409   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4410 
4411   gcc_assert (redirected);
4412   return NULL;
4413 }
4414 
4415 /* Same as delete_basic_block but update cfg_layout structures.  */
4416 
4417 static void
cfg_layout_delete_block(basic_block bb)4418 cfg_layout_delete_block (basic_block bb)
4419 {
4420   rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
4421 
4422   if (BB_HEADER (bb))
4423     {
4424       next = BB_HEAD (bb);
4425       if (prev)
4426 	NEXT_INSN (prev) = BB_HEADER (bb);
4427       else
4428 	set_first_insn (BB_HEADER (bb));
4429       PREV_INSN (BB_HEADER (bb)) = prev;
4430       insn = BB_HEADER (bb);
4431       while (NEXT_INSN (insn))
4432 	insn = NEXT_INSN (insn);
4433       NEXT_INSN (insn) = next;
4434       PREV_INSN (next) = insn;
4435     }
4436   next = NEXT_INSN (BB_END (bb));
4437   if (BB_FOOTER (bb))
4438     {
4439       insn = BB_FOOTER (bb);
4440       while (insn)
4441 	{
4442 	  if (BARRIER_P (insn))
4443 	    {
4444 	      if (PREV_INSN (insn))
4445 		NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4446 	      else
4447 		BB_FOOTER (bb) = NEXT_INSN (insn);
4448 	      if (NEXT_INSN (insn))
4449 		PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4450 	    }
4451 	  if (LABEL_P (insn))
4452 	    break;
4453 	  insn = NEXT_INSN (insn);
4454 	}
4455       if (BB_FOOTER (bb))
4456 	{
4457 	  insn = BB_END (bb);
4458 	  NEXT_INSN (insn) = BB_FOOTER (bb);
4459 	  PREV_INSN (BB_FOOTER (bb)) = insn;
4460 	  while (NEXT_INSN (insn))
4461 	    insn = NEXT_INSN (insn);
4462 	  NEXT_INSN (insn) = next;
4463 	  if (next)
4464 	    PREV_INSN (next) = insn;
4465 	  else
4466 	    set_last_insn (insn);
4467 	}
4468     }
4469   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4470     to = &BB_HEADER (bb->next_bb);
4471   else
4472     to = &cfg_layout_function_footer;
4473 
4474   rtl_delete_block (bb);
4475 
4476   if (prev)
4477     prev = NEXT_INSN (prev);
4478   else
4479     prev = get_insns ();
4480   if (next)
4481     next = PREV_INSN (next);
4482   else
4483     next = get_last_insn ();
4484 
4485   if (next && NEXT_INSN (next) != prev)
4486     {
4487       remaints = unlink_insn_chain (prev, next);
4488       insn = remaints;
4489       while (NEXT_INSN (insn))
4490 	insn = NEXT_INSN (insn);
4491       NEXT_INSN (insn) = *to;
4492       if (*to)
4493 	PREV_INSN (*to) = insn;
4494       *to = remaints;
4495     }
4496 }
4497 
4498 /* Return true when blocks A and B can be safely merged.  */
4499 
4500 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4501 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4502 {
4503   /* If we are partitioning hot/cold basic blocks, we don't want to
4504      mess up unconditional or indirect jumps that cross between hot
4505      and cold sections.
4506 
4507      Basic block partitioning may result in some jumps that appear to
4508      be optimizable (or blocks that appear to be mergeable), but which really
4509      must be left untouched (they are required to make it safely across
4510      partition boundaries).  See  the comments at the top of
4511      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4512 
4513   if (BB_PARTITION (a) != BB_PARTITION (b))
4514     return false;
4515 
4516   /* Protect the loop latches.  */
4517   if (current_loops && b->loop_father->latch == b)
4518     return false;
4519 
4520   /* If we would end up moving B's instructions, make sure it doesn't fall
4521      through into the exit block, since we cannot recover from a fallthrough
4522      edge into the exit block occurring in the middle of a function.  */
4523   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4524     {
4525       edge e = find_fallthru_edge (b->succs);
4526       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4527 	return false;
4528     }
4529 
4530   /* There must be exactly one edge in between the blocks.  */
4531   return (single_succ_p (a)
4532 	  && single_succ (a) == b
4533 	  && single_pred_p (b) == 1
4534 	  && a != b
4535 	  /* Must be simple edge.  */
4536 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4537 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4538 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4539 	  /* If the jump insn has side effects, we can't kill the edge.
4540 	     When not optimizing, try_redirect_by_replacing_jump will
4541 	     not allow us to redirect an edge by replacing a table jump.  */
4542 	  && (!JUMP_P (BB_END (a))
4543 	      || ((!optimize || reload_completed)
4544 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4545 }
4546 
4547 /* Merge block A and B.  The blocks must be mergeable.  */
4548 
4549 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4550 cfg_layout_merge_blocks (basic_block a, basic_block b)
4551 {
4552   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4553   rtx insn;
4554 
4555   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4556 
4557   if (dump_file)
4558     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4559 			 a->index);
4560 
4561   /* If there was a CODE_LABEL beginning B, delete it.  */
4562   if (LABEL_P (BB_HEAD (b)))
4563     {
4564       delete_insn (BB_HEAD (b));
4565     }
4566 
4567   /* We should have fallthru edge in a, or we can do dummy redirection to get
4568      it cleaned up.  */
4569   if (JUMP_P (BB_END (a)))
4570     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4571   gcc_assert (!JUMP_P (BB_END (a)));
4572 
4573   /* When not optimizing CFG and the edge is the only place in RTL which holds
4574      some unique locus, emit a nop with that locus in between.  */
4575   if (!optimize)
4576     emit_nop_for_unique_locus_between (a, b);
4577 
4578   /* Move things from b->footer after a->footer.  */
4579   if (BB_FOOTER (b))
4580     {
4581       if (!BB_FOOTER (a))
4582 	BB_FOOTER (a) = BB_FOOTER (b);
4583       else
4584 	{
4585 	  rtx last = BB_FOOTER (a);
4586 
4587 	  while (NEXT_INSN (last))
4588 	    last = NEXT_INSN (last);
4589 	  NEXT_INSN (last) = BB_FOOTER (b);
4590 	  PREV_INSN (BB_FOOTER (b)) = last;
4591 	}
4592       BB_FOOTER (b) = NULL;
4593     }
4594 
4595   /* Move things from b->header before a->footer.
4596      Note that this may include dead tablejump data, but we don't clean
4597      those up until we go out of cfglayout mode.  */
4598    if (BB_HEADER (b))
4599      {
4600       if (! BB_FOOTER (a))
4601 	BB_FOOTER (a) = BB_HEADER (b);
4602       else
4603 	{
4604 	  rtx last = BB_HEADER (b);
4605 
4606 	  while (NEXT_INSN (last))
4607 	    last = NEXT_INSN (last);
4608 	  NEXT_INSN (last) = BB_FOOTER (a);
4609 	  PREV_INSN (BB_FOOTER (a)) = last;
4610 	  BB_FOOTER (a) = BB_HEADER (b);
4611 	}
4612       BB_HEADER (b) = NULL;
4613     }
4614 
4615   /* In the case basic blocks are not adjacent, move them around.  */
4616   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4617     {
4618       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4619 
4620       emit_insn_after_noloc (insn, BB_END (a), a);
4621     }
4622   /* Otherwise just re-associate the instructions.  */
4623   else
4624     {
4625       insn = BB_HEAD (b);
4626       BB_END (a) = BB_END (b);
4627     }
4628 
4629   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4630      We need to explicitly call. */
4631   update_bb_for_insn_chain (insn, BB_END (b), a);
4632 
4633   /* Skip possible DELETED_LABEL insn.  */
4634   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4635     insn = NEXT_INSN (insn);
4636   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4637   BB_HEAD (b) = BB_END (b) = NULL;
4638   delete_insn (insn);
4639 
4640   df_bb_delete (b->index);
4641 
4642   /* If B was a forwarder block, propagate the locus on the edge.  */
4643   if (forwarder_p
4644       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4645     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4646 
4647   if (dump_file)
4648     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4649 }
4650 
4651 /* Split edge E.  */
4652 
4653 static basic_block
cfg_layout_split_edge(edge e)4654 cfg_layout_split_edge (edge e)
4655 {
4656   basic_block new_bb =
4657     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4658 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4659 			NULL_RTX, e->src);
4660 
4661   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4662     BB_COPY_PARTITION (new_bb, e->src);
4663   else
4664     BB_COPY_PARTITION (new_bb, e->dest);
4665   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4666   redirect_edge_and_branch_force (e, new_bb);
4667 
4668   return new_bb;
4669 }
4670 
4671 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4672 
4673 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4674 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4675 {
4676 }
4677 
4678 /* Return true if BB contains only labels or non-executable
4679    instructions.  */
4680 
4681 static bool
rtl_block_empty_p(basic_block bb)4682 rtl_block_empty_p (basic_block bb)
4683 {
4684   rtx insn;
4685 
4686   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4687       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4688     return true;
4689 
4690   FOR_BB_INSNS (bb, insn)
4691     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4692       return false;
4693 
4694   return true;
4695 }
4696 
4697 /* Split a basic block if it ends with a conditional branch and if
4698    the other part of the block is not empty.  */
4699 
4700 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4701 rtl_split_block_before_cond_jump (basic_block bb)
4702 {
4703   rtx insn;
4704   rtx split_point = NULL;
4705   rtx last = NULL;
4706   bool found_code = false;
4707 
4708   FOR_BB_INSNS (bb, insn)
4709     {
4710       if (any_condjump_p (insn))
4711 	split_point = last;
4712       else if (NONDEBUG_INSN_P (insn))
4713 	found_code = true;
4714       last = insn;
4715     }
4716 
4717   /* Did not find everything.  */
4718   if (found_code && split_point)
4719     return split_block (bb, split_point)->dest;
4720   else
4721     return NULL;
4722 }
4723 
4724 /* Return 1 if BB ends with a call, possibly followed by some
4725    instructions that must stay with the call, 0 otherwise.  */
4726 
4727 static bool
rtl_block_ends_with_call_p(basic_block bb)4728 rtl_block_ends_with_call_p (basic_block bb)
4729 {
4730   rtx insn = BB_END (bb);
4731 
4732   while (!CALL_P (insn)
4733 	 && insn != BB_HEAD (bb)
4734 	 && (keep_with_call_p (insn)
4735 	     || NOTE_P (insn)
4736 	     || DEBUG_INSN_P (insn)))
4737     insn = PREV_INSN (insn);
4738   return (CALL_P (insn));
4739 }
4740 
4741 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
4742 
4743 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)4744 rtl_block_ends_with_condjump_p (const_basic_block bb)
4745 {
4746   return any_condjump_p (BB_END (bb));
4747 }
4748 
4749 /* Return true if we need to add fake edge to exit.
4750    Helper function for rtl_flow_call_edges_add.  */
4751 
4752 static bool
need_fake_edge_p(const_rtx insn)4753 need_fake_edge_p (const_rtx insn)
4754 {
4755   if (!INSN_P (insn))
4756     return false;
4757 
4758   if ((CALL_P (insn)
4759        && !SIBLING_CALL_P (insn)
4760        && !find_reg_note (insn, REG_NORETURN, NULL)
4761        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4762     return true;
4763 
4764   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4765 	   && MEM_VOLATILE_P (PATTERN (insn)))
4766 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4767 	      && asm_noperands (insn) != -1
4768 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4769 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4770 }
4771 
4772 /* Add fake edges to the function exit for any non constant and non noreturn
4773    calls, volatile inline assembly in the bitmap of blocks specified by
4774    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
4775    that were split.
4776 
4777    The goal is to expose cases in which entering a basic block does not imply
4778    that all subsequent instructions must be executed.  */
4779 
4780 static int
rtl_flow_call_edges_add(sbitmap blocks)4781 rtl_flow_call_edges_add (sbitmap blocks)
4782 {
4783   int i;
4784   int blocks_split = 0;
4785   int last_bb = last_basic_block_for_fn (cfun);
4786   bool check_last_block = false;
4787 
4788   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4789     return 0;
4790 
4791   if (! blocks)
4792     check_last_block = true;
4793   else
4794     check_last_block = bitmap_bit_p (blocks,
4795 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4796 
4797   /* In the last basic block, before epilogue generation, there will be
4798      a fallthru edge to EXIT.  Special care is required if the last insn
4799      of the last basic block is a call because make_edge folds duplicate
4800      edges, which would result in the fallthru edge also being marked
4801      fake, which would result in the fallthru edge being removed by
4802      remove_fake_edges, which would result in an invalid CFG.
4803 
4804      Moreover, we can't elide the outgoing fake edge, since the block
4805      profiler needs to take this into account in order to solve the minimal
4806      spanning tree in the case that the call doesn't return.
4807 
4808      Handle this by adding a dummy instruction in a new last basic block.  */
4809   if (check_last_block)
4810     {
4811       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4812       rtx insn = BB_END (bb);
4813 
4814       /* Back up past insns that must be kept in the same block as a call.  */
4815       while (insn != BB_HEAD (bb)
4816 	     && keep_with_call_p (insn))
4817 	insn = PREV_INSN (insn);
4818 
4819       if (need_fake_edge_p (insn))
4820 	{
4821 	  edge e;
4822 
4823 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4824 	  if (e)
4825 	    {
4826 	      insert_insn_on_edge (gen_use (const0_rtx), e);
4827 	      commit_edge_insertions ();
4828 	    }
4829 	}
4830     }
4831 
4832   /* Now add fake edges to the function exit for any non constant
4833      calls since there is no way that we can determine if they will
4834      return or not...  */
4835 
4836   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4837     {
4838       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4839       rtx insn;
4840       rtx prev_insn;
4841 
4842       if (!bb)
4843 	continue;
4844 
4845       if (blocks && !bitmap_bit_p (blocks, i))
4846 	continue;
4847 
4848       for (insn = BB_END (bb); ; insn = prev_insn)
4849 	{
4850 	  prev_insn = PREV_INSN (insn);
4851 	  if (need_fake_edge_p (insn))
4852 	    {
4853 	      edge e;
4854 	      rtx split_at_insn = insn;
4855 
4856 	      /* Don't split the block between a call and an insn that should
4857 		 remain in the same block as the call.  */
4858 	      if (CALL_P (insn))
4859 		while (split_at_insn != BB_END (bb)
4860 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
4861 		  split_at_insn = NEXT_INSN (split_at_insn);
4862 
4863 	      /* The handling above of the final block before the epilogue
4864 		 should be enough to verify that there is no edge to the exit
4865 		 block in CFG already.  Calling make_edge in such case would
4866 		 cause us to mark that edge as fake and remove it later.  */
4867 
4868 #ifdef ENABLE_CHECKING
4869 	      if (split_at_insn == BB_END (bb))
4870 		{
4871 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4872 		  gcc_assert (e == NULL);
4873 		}
4874 #endif
4875 
4876 	      /* Note that the following may create a new basic block
4877 		 and renumber the existing basic blocks.  */
4878 	      if (split_at_insn != BB_END (bb))
4879 		{
4880 		  e = split_block (bb, split_at_insn);
4881 		  if (e)
4882 		    blocks_split++;
4883 		}
4884 
4885 	      make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4886 	    }
4887 
4888 	  if (insn == BB_HEAD (bb))
4889 	    break;
4890 	}
4891     }
4892 
4893   if (blocks_split)
4894     verify_flow_info ();
4895 
4896   return blocks_split;
4897 }
4898 
4899 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
4900    the conditional branch target, SECOND_HEAD should be the fall-thru
4901    there is no need to handle this here the loop versioning code handles
4902    this.  the reason for SECON_HEAD is that it is needed for condition
4903    in trees, and this should be of the same type since it is a hook.  */
4904 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)4905 rtl_lv_add_condition_to_bb (basic_block first_head ,
4906 			    basic_block second_head ATTRIBUTE_UNUSED,
4907 			    basic_block cond_bb, void *comp_rtx)
4908 {
4909   rtx label, seq, jump;
4910   rtx op0 = XEXP ((rtx)comp_rtx, 0);
4911   rtx op1 = XEXP ((rtx)comp_rtx, 1);
4912   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4913   enum machine_mode mode;
4914 
4915 
4916   label = block_label (first_head);
4917   mode = GET_MODE (op0);
4918   if (mode == VOIDmode)
4919     mode = GET_MODE (op1);
4920 
4921   start_sequence ();
4922   op0 = force_operand (op0, NULL_RTX);
4923   op1 = force_operand (op1, NULL_RTX);
4924   do_compare_rtx_and_jump (op0, op1, comp, 0,
4925 			   mode, NULL_RTX, NULL_RTX, label, -1);
4926   jump = get_last_insn ();
4927   JUMP_LABEL (jump) = label;
4928   LABEL_NUSES (label)++;
4929   seq = get_insns ();
4930   end_sequence ();
4931 
4932   /* Add the new cond , in the new head.  */
4933   emit_insn_after (seq, BB_END (cond_bb));
4934 }
4935 
4936 
4937 /* Given a block B with unconditional branch at its end, get the
4938    store the return the branch edge and the fall-thru edge in
4939    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
4940 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)4941 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4942 			   edge *fallthru_edge)
4943 {
4944   edge e = EDGE_SUCC (b, 0);
4945 
4946   if (e->flags & EDGE_FALLTHRU)
4947     {
4948       *fallthru_edge = e;
4949       *branch_edge = EDGE_SUCC (b, 1);
4950     }
4951   else
4952     {
4953       *branch_edge = e;
4954       *fallthru_edge = EDGE_SUCC (b, 1);
4955     }
4956 }
4957 
4958 void
init_rtl_bb_info(basic_block bb)4959 init_rtl_bb_info (basic_block bb)
4960 {
4961   gcc_assert (!bb->il.x.rtl);
4962   bb->il.x.head_ = NULL;
4963   bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4964 }
4965 
4966 /* Returns true if it is possible to remove edge E by redirecting
4967    it to the destination of the other edge from E->src.  */
4968 
4969 static bool
rtl_can_remove_branch_p(const_edge e)4970 rtl_can_remove_branch_p (const_edge e)
4971 {
4972   const_basic_block src = e->src;
4973   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4974   const_rtx insn = BB_END (src), set;
4975 
4976   /* The conditions are taken from try_redirect_by_replacing_jump.  */
4977   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
4978     return false;
4979 
4980   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4981     return false;
4982 
4983   if (BB_PARTITION (src) != BB_PARTITION (target))
4984     return false;
4985 
4986   if (!onlyjump_p (insn)
4987       || tablejump_p (insn, NULL, NULL))
4988     return false;
4989 
4990   set = single_set (insn);
4991   if (!set || side_effects_p (set))
4992     return false;
4993 
4994   return true;
4995 }
4996 
4997 static basic_block
rtl_duplicate_bb(basic_block bb)4998 rtl_duplicate_bb (basic_block bb)
4999 {
5000   bb = cfg_layout_duplicate_bb (bb);
5001   bb->aux = NULL;
5002   return bb;
5003 }
5004 
5005 /* Do book-keeping of basic block BB for the profile consistency checker.
5006    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5007    then do post-pass accounting.  Store the counting in RECORD.  */
5008 static void
rtl_account_profile_record(basic_block bb,int after_pass,struct profile_record * record)5009 rtl_account_profile_record (basic_block bb, int after_pass,
5010 			    struct profile_record *record)
5011 {
5012   rtx insn;
5013   FOR_BB_INSNS (bb, insn)
5014     if (INSN_P (insn))
5015       {
5016 	record->size[after_pass]
5017 	  += insn_rtx_cost (PATTERN (insn), false);
5018 	if (profile_status_for_fn (cfun) == PROFILE_READ)
5019 	  record->time[after_pass]
5020 	    += insn_rtx_cost (PATTERN (insn), true) * bb->count;
5021 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5022 	  record->time[after_pass]
5023 	    += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5024       }
5025 }
5026 
5027 /* Implementation of CFG manipulation for linearized RTL.  */
5028 struct cfg_hooks rtl_cfg_hooks = {
5029   "rtl",
5030   rtl_verify_flow_info,
5031   rtl_dump_bb,
5032   rtl_dump_bb_for_graph,
5033   rtl_create_basic_block,
5034   rtl_redirect_edge_and_branch,
5035   rtl_redirect_edge_and_branch_force,
5036   rtl_can_remove_branch_p,
5037   rtl_delete_block,
5038   rtl_split_block,
5039   rtl_move_block_after,
5040   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5041   rtl_merge_blocks,
5042   rtl_predict_edge,
5043   rtl_predicted_by_p,
5044   cfg_layout_can_duplicate_bb_p,
5045   rtl_duplicate_bb,
5046   rtl_split_edge,
5047   rtl_make_forwarder_block,
5048   rtl_tidy_fallthru_edge,
5049   rtl_force_nonfallthru,
5050   rtl_block_ends_with_call_p,
5051   rtl_block_ends_with_condjump_p,
5052   rtl_flow_call_edges_add,
5053   NULL, /* execute_on_growing_pred */
5054   NULL, /* execute_on_shrinking_pred */
5055   NULL, /* duplicate loop for trees */
5056   NULL, /* lv_add_condition_to_bb */
5057   NULL, /* lv_adjust_loop_header_phi*/
5058   NULL, /* extract_cond_bb_edges */
5059   NULL, /* flush_pending_stmts */
5060   rtl_block_empty_p, /* block_empty_p */
5061   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5062   rtl_account_profile_record,
5063 };
5064 
5065 /* Implementation of CFG manipulation for cfg layout RTL, where
5066    basic block connected via fallthru edges does not have to be adjacent.
5067    This representation will hopefully become the default one in future
5068    version of the compiler.  */
5069 
5070 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5071   "cfglayout mode",
5072   rtl_verify_flow_info_1,
5073   rtl_dump_bb,
5074   rtl_dump_bb_for_graph,
5075   cfg_layout_create_basic_block,
5076   cfg_layout_redirect_edge_and_branch,
5077   cfg_layout_redirect_edge_and_branch_force,
5078   rtl_can_remove_branch_p,
5079   cfg_layout_delete_block,
5080   cfg_layout_split_block,
5081   rtl_move_block_after,
5082   cfg_layout_can_merge_blocks_p,
5083   cfg_layout_merge_blocks,
5084   rtl_predict_edge,
5085   rtl_predicted_by_p,
5086   cfg_layout_can_duplicate_bb_p,
5087   cfg_layout_duplicate_bb,
5088   cfg_layout_split_edge,
5089   rtl_make_forwarder_block,
5090   NULL, /* tidy_fallthru_edge */
5091   rtl_force_nonfallthru,
5092   rtl_block_ends_with_call_p,
5093   rtl_block_ends_with_condjump_p,
5094   rtl_flow_call_edges_add,
5095   NULL, /* execute_on_growing_pred */
5096   NULL, /* execute_on_shrinking_pred */
5097   duplicate_loop_to_header_edge, /* duplicate loop for trees */
5098   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5099   NULL, /* lv_adjust_loop_header_phi*/
5100   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5101   NULL, /* flush_pending_stmts */
5102   rtl_block_empty_p, /* block_empty_p */
5103   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5104   rtl_account_profile_record,
5105 };
5106 
5107 #include "gt-cfgrtl.h"
5108