1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "regs.h"
48 #include "flags.h"
49 #include "function.h"
50 #include "except.h"
51 #include "rtl-error.h"
52 #include "tm_p.h"
53 #include "obstack.h"
54 #include "insn-attr.h"
55 #include "insn-config.h"
56 #include "expr.h"
57 #include "target.h"
58 #include "common/common-target.h"
59 #include "cfgloop.h"
60 #include "ggc.h"
61 #include "tree-pass.h"
62 #include "df.h"
63 
64 /* Holds the interesting leading and trailing notes for the function.
65    Only applicable if the CFG is in cfglayout mode.  */
66 static GTY(()) rtx cfg_layout_function_footer;
67 static GTY(()) rtx cfg_layout_function_header;
68 
69 static rtx skip_insns_after_block (basic_block);
70 static void record_effective_endpoints (void);
71 static rtx label_for_bb (basic_block);
72 static void fixup_reorder_chain (void);
73 
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const_rtx);
77 static int can_delete_label_p (const_rtx);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
92 
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94    so that we may simply delete it.  */
95 
96 static int
can_delete_note_p(const_rtx note)97 can_delete_note_p (const_rtx note)
98 {
99   switch (NOTE_KIND (note))
100     {
101     case NOTE_INSN_DELETED:
102     case NOTE_INSN_BASIC_BLOCK:
103     case NOTE_INSN_EPILOGUE_BEG:
104       return true;
105 
106     default:
107       return false;
108     }
109 }
110 
111 /* True if a given label can be deleted.  */
112 
113 static int
can_delete_label_p(const_rtx label)114 can_delete_label_p (const_rtx label)
115 {
116   return (!LABEL_PRESERVE_P (label)
117 	  /* User declared labels must be preserved.  */
118 	  && LABEL_NAME (label) == 0
119 	  && !in_expr_list_p (forced_labels, label));
120 }
121 
122 /* Delete INSN by patching it out.  */
123 
124 void
delete_insn(rtx insn)125 delete_insn (rtx insn)
126 {
127   rtx note;
128   bool really_delete = true;
129 
130   if (LABEL_P (insn))
131     {
132       /* Some labels can't be directly removed from the INSN chain, as they
133 	 might be references via variables, constant pool etc.
134 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
135       if (! can_delete_label_p (insn))
136 	{
137 	  const char *name = LABEL_NAME (insn);
138 	  basic_block bb = BLOCK_FOR_INSN (insn);
139 	  rtx bb_note = NEXT_INSN (insn);
140 
141 	  really_delete = false;
142 	  PUT_CODE (insn, NOTE);
143 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
144 	  NOTE_DELETED_LABEL_NAME (insn) = name;
145 
146 	  /* If the note following the label starts a basic block, and the
147 	     label is a member of the same basic block, interchange the two.  */
148 	  if (bb_note != NULL_RTX
149 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
150 	      && bb != NULL
151 	      && bb == BLOCK_FOR_INSN (bb_note))
152 	    {
153 	      reorder_insns_nobb (insn, insn, bb_note);
154 	      BB_HEAD (bb) = bb_note;
155 	      if (BB_END (bb) == bb_note)
156 		BB_END (bb) = insn;
157 	    }
158 	}
159 
160       remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
161     }
162 
163   if (really_delete)
164     {
165       /* If this insn has already been deleted, something is very wrong.  */
166       gcc_assert (!INSN_DELETED_P (insn));
167       remove_insn (insn);
168       INSN_DELETED_P (insn) = 1;
169     }
170 
171   /* If deleting a jump, decrement the use count of the label.  Deleting
172      the label itself should happen in the normal course of block merging.  */
173   if (JUMP_P (insn))
174     {
175       if (JUMP_LABEL (insn)
176 	  && LABEL_P (JUMP_LABEL (insn)))
177 	LABEL_NUSES (JUMP_LABEL (insn))--;
178 
179       /* If there are more targets, remove them too.  */
180       while ((note
181 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
182 	     && LABEL_P (XEXP (note, 0)))
183 	{
184 	  LABEL_NUSES (XEXP (note, 0))--;
185 	  remove_note (insn, note);
186 	}
187     }
188 
189   /* Also if deleting any insn that references a label as an operand.  */
190   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
191 	 && LABEL_P (XEXP (note, 0)))
192     {
193       LABEL_NUSES (XEXP (note, 0))--;
194       remove_note (insn, note);
195     }
196 
197   if (JUMP_TABLE_DATA_P (insn))
198     {
199       rtx pat = PATTERN (insn);
200       int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
201       int len = XVECLEN (pat, diff_vec_p);
202       int i;
203 
204       for (i = 0; i < len; i++)
205 	{
206 	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
207 
208 	  /* When deleting code in bulk (e.g. removing many unreachable
209 	     blocks) we can delete a label that's a target of the vector
210 	     before deleting the vector itself.  */
211 	  if (!NOTE_P (label))
212 	    LABEL_NUSES (label)--;
213 	}
214     }
215 }
216 
217 /* Like delete_insn but also purge dead edges from BB.  */
218 
219 void
delete_insn_and_edges(rtx insn)220 delete_insn_and_edges (rtx insn)
221 {
222   bool purge = false;
223 
224   if (INSN_P (insn)
225       && BLOCK_FOR_INSN (insn)
226       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
227     purge = true;
228   delete_insn (insn);
229   if (purge)
230     purge_dead_edges (BLOCK_FOR_INSN (insn));
231 }
232 
233 /* Unlink a chain of insns between START and FINISH, leaving notes
234    that must be paired.  If CLEAR_BB is true, we set bb field for
235    insns that cannot be removed to NULL.  */
236 
237 void
delete_insn_chain(rtx start,rtx finish,bool clear_bb)238 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
239 {
240   rtx prev, current;
241 
242   /* Unchain the insns one by one.  It would be quicker to delete all of these
243      with a single unchaining, rather than one at a time, but we need to keep
244      the NOTE's.  */
245   current = finish;
246   while (1)
247     {
248       prev = PREV_INSN (current);
249       if (NOTE_P (current) && !can_delete_note_p (current))
250 	;
251       else
252 	delete_insn (current);
253 
254       if (clear_bb && !INSN_DELETED_P (current))
255 	set_block_for_insn (current, NULL);
256 
257       if (current == start)
258 	break;
259       current = prev;
260     }
261 }
262 
263 /* Create a new basic block consisting of the instructions between HEAD and END
264    inclusive.  This function is designed to allow fast BB construction - reuses
265    the note and basic block struct in BB_NOTE, if any and do not grow
266    BASIC_BLOCK chain and should be used directly only by CFG construction code.
267    END can be NULL in to create new empty basic block before HEAD.  Both END
268    and HEAD can be NULL to create basic block at the end of INSN chain.
269    AFTER is the basic block we should be put after.  */
270 
271 basic_block
create_basic_block_structure(rtx head,rtx end,rtx bb_note,basic_block after)272 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
273 {
274   basic_block bb;
275 
276   if (bb_note
277       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
278       && bb->aux == NULL)
279     {
280       /* If we found an existing note, thread it back onto the chain.  */
281 
282       rtx after;
283 
284       if (LABEL_P (head))
285 	after = head;
286       else
287 	{
288 	  after = PREV_INSN (head);
289 	  head = bb_note;
290 	}
291 
292       if (after != bb_note && NEXT_INSN (after) != bb_note)
293 	reorder_insns_nobb (bb_note, bb_note, after);
294     }
295   else
296     {
297       /* Otherwise we must create a note and a basic block structure.  */
298 
299       bb = alloc_block ();
300 
301       init_rtl_bb_info (bb);
302       if (!head && !end)
303 	head = end = bb_note
304 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
305       else if (LABEL_P (head) && end)
306 	{
307 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
308 	  if (head == end)
309 	    end = bb_note;
310 	}
311       else
312 	{
313 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
314 	  head = bb_note;
315 	  if (!end)
316 	    end = head;
317 	}
318 
319       NOTE_BASIC_BLOCK (bb_note) = bb;
320     }
321 
322   /* Always include the bb note in the block.  */
323   if (NEXT_INSN (end) == bb_note)
324     end = bb_note;
325 
326   BB_HEAD (bb) = head;
327   BB_END (bb) = end;
328   bb->index = last_basic_block++;
329   bb->flags = BB_NEW | BB_RTL;
330   link_block (bb, after);
331   SET_BASIC_BLOCK (bb->index, bb);
332   df_bb_refs_record (bb->index, false);
333   update_bb_for_insn (bb);
334   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
335 
336   /* Tag the block so that we know it has been used when considering
337      other basic block notes.  */
338   bb->aux = bb;
339 
340   return bb;
341 }
342 
343 /* Create new basic block consisting of instructions in between HEAD and END
344    and place it to the BB chain after block AFTER.  END can be NULL to
345    create a new empty basic block before HEAD.  Both END and HEAD can be
346    NULL to create basic block at the end of INSN chain.  */
347 
348 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)349 rtl_create_basic_block (void *headp, void *endp, basic_block after)
350 {
351   rtx head = (rtx) headp, end = (rtx) endp;
352   basic_block bb;
353 
354   /* Grow the basic block array if needed.  */
355   if ((size_t) last_basic_block >= basic_block_info->length ())
356     {
357       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
358       vec_safe_grow_cleared (basic_block_info, new_size);
359     }
360 
361   n_basic_blocks++;
362 
363   bb = create_basic_block_structure (head, end, NULL, after);
364   bb->aux = NULL;
365   return bb;
366 }
367 
368 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)369 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
370 {
371   basic_block newbb = rtl_create_basic_block (head, end, after);
372 
373   return newbb;
374 }
375 
376 /* Delete the insns in a (non-live) block.  We physically delete every
377    non-deleted-note insn, and update the flow graph appropriately.
378 
379    Return nonzero if we deleted an exception handler.  */
380 
381 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
382    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
383 
384 static void
rtl_delete_block(basic_block b)385 rtl_delete_block (basic_block b)
386 {
387   rtx insn, end;
388 
389   /* If the head of this block is a CODE_LABEL, then it might be the
390      label for an exception handler which can't be reached.  We need
391      to remove the label from the exception_handler_label list.  */
392   insn = BB_HEAD (b);
393 
394   end = get_last_bb_insn (b);
395 
396   /* Selectively delete the entire chain.  */
397   BB_HEAD (b) = NULL;
398   delete_insn_chain (insn, end, true);
399 
400 
401   if (dump_file)
402     fprintf (dump_file, "deleting block %d\n", b->index);
403   df_bb_delete (b->index);
404 }
405 
406 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
407 
408 void
compute_bb_for_insn(void)409 compute_bb_for_insn (void)
410 {
411   basic_block bb;
412 
413   FOR_EACH_BB (bb)
414     {
415       rtx end = BB_END (bb);
416       rtx insn;
417 
418       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
419 	{
420 	  BLOCK_FOR_INSN (insn) = bb;
421 	  if (insn == end)
422 	    break;
423 	}
424     }
425 }
426 
427 /* Release the basic_block_for_insn array.  */
428 
429 unsigned int
free_bb_for_insn(void)430 free_bb_for_insn (void)
431 {
432   rtx insn;
433   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
434     if (!BARRIER_P (insn))
435       BLOCK_FOR_INSN (insn) = NULL;
436   return 0;
437 }
438 
439 static unsigned int
rest_of_pass_free_cfg(void)440 rest_of_pass_free_cfg (void)
441 {
442 #ifdef DELAY_SLOTS
443   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
444      valid at that point so it would be too late to call df_analyze.  */
445   if (optimize > 0 && flag_delayed_branch)
446     {
447       df_note_add_problem ();
448       df_analyze ();
449     }
450 #endif
451 
452   free_bb_for_insn ();
453   return 0;
454 }
455 
456 struct rtl_opt_pass pass_free_cfg =
457 {
458  {
459   RTL_PASS,
460   "*free_cfg",                          /* name */
461   OPTGROUP_NONE,                        /* optinfo_flags */
462   NULL,                                 /* gate */
463   rest_of_pass_free_cfg,                /* execute */
464   NULL,                                 /* sub */
465   NULL,                                 /* next */
466   0,                                    /* static_pass_number */
467   TV_NONE,                              /* tv_id */
468   0,                                    /* properties_required */
469   0,                                    /* properties_provided */
470   PROP_cfg,                             /* properties_destroyed */
471   0,                                    /* todo_flags_start */
472   0,                                    /* todo_flags_finish */
473  }
474 };
475 
476 /* Return RTX to emit after when we want to emit code on the entry of function.  */
477 rtx
entry_of_function(void)478 entry_of_function (void)
479 {
480   return (n_basic_blocks > NUM_FIXED_BLOCKS ?
481 	  BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
482 }
483 
484 /* Emit INSN at the entry point of the function, ensuring that it is only
485    executed once per function.  */
486 void
emit_insn_at_entry(rtx insn)487 emit_insn_at_entry (rtx insn)
488 {
489   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
490   edge e = ei_safe_edge (ei);
491   gcc_assert (e->flags & EDGE_FALLTHRU);
492 
493   insert_insn_on_edge (insn, e);
494   commit_edge_insertions ();
495 }
496 
497 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
498    (or BARRIER if found) and notify df of the bb change.
499    The insn chain range is inclusive
500    (i.e. both BEGIN and END will be updated. */
501 
502 static void
update_bb_for_insn_chain(rtx begin,rtx end,basic_block bb)503 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
504 {
505   rtx insn;
506 
507   end = NEXT_INSN (end);
508   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
509     if (!BARRIER_P (insn))
510       df_insn_change_bb (insn, bb);
511 }
512 
513 /* Update BLOCK_FOR_INSN of insns in BB to BB,
514    and notify df of the change.  */
515 
516 void
update_bb_for_insn(basic_block bb)517 update_bb_for_insn (basic_block bb)
518 {
519   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
520 }
521 
522 
523 /* Like active_insn_p, except keep the return value clobber around
524    even after reload.  */
525 
526 static bool
flow_active_insn_p(const_rtx insn)527 flow_active_insn_p (const_rtx insn)
528 {
529   if (active_insn_p (insn))
530     return true;
531 
532   /* A clobber of the function return value exists for buggy
533      programs that fail to return a value.  Its effect is to
534      keep the return value from being live across the entire
535      function.  If we allow it to be skipped, we introduce the
536      possibility for register lifetime confusion.  */
537   if (GET_CODE (PATTERN (insn)) == CLOBBER
538       && REG_P (XEXP (PATTERN (insn), 0))
539       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
540     return true;
541 
542   return false;
543 }
544 
545 /* Return true if the block has no effect and only forwards control flow to
546    its single destination.  */
547 
548 bool
contains_no_active_insn_p(const_basic_block bb)549 contains_no_active_insn_p (const_basic_block bb)
550 {
551   rtx insn;
552 
553   if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
554       || !single_succ_p (bb))
555     return false;
556 
557   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
558     if (INSN_P (insn) && flow_active_insn_p (insn))
559       return false;
560 
561   return (!INSN_P (insn)
562 	  || (JUMP_P (insn) && simplejump_p (insn))
563 	  || !flow_active_insn_p (insn));
564 }
565 
566 /* Likewise, but protect loop latches, headers and preheaders.  */
567 /* FIXME: Make this a cfg hook.  */
568 
569 bool
forwarder_block_p(const_basic_block bb)570 forwarder_block_p (const_basic_block bb)
571 {
572   if (!contains_no_active_insn_p (bb))
573     return false;
574 
575   /* Protect loop latches, headers and preheaders.  */
576   if (current_loops)
577     {
578       basic_block dest;
579       if (bb->loop_father->header == bb)
580 	return false;
581       dest = EDGE_SUCC (bb, 0)->dest;
582       if (dest->loop_father->header == dest)
583 	return false;
584     }
585 
586   return true;
587 }
588 
589 /* Return nonzero if we can reach target from src by falling through.  */
590 /* FIXME: Make this a cfg hook.  */
591 
592 bool
can_fallthru(basic_block src,basic_block target)593 can_fallthru (basic_block src, basic_block target)
594 {
595   rtx insn = BB_END (src);
596   rtx insn2;
597   edge e;
598   edge_iterator ei;
599 
600   if (target == EXIT_BLOCK_PTR)
601     return true;
602   if (src->next_bb != target)
603     return 0;
604   FOR_EACH_EDGE (e, ei, src->succs)
605     if (e->dest == EXIT_BLOCK_PTR
606 	&& e->flags & EDGE_FALLTHRU)
607       return 0;
608 
609   insn2 = BB_HEAD (target);
610   if (insn2 && !active_insn_p (insn2))
611     insn2 = next_active_insn (insn2);
612 
613   /* ??? Later we may add code to move jump tables offline.  */
614   return next_active_insn (insn) == insn2;
615 }
616 
617 /* Return nonzero if we could reach target from src by falling through,
618    if the target was made adjacent.  If we already have a fall-through
619    edge to the exit block, we can't do that.  */
620 static bool
could_fall_through(basic_block src,basic_block target)621 could_fall_through (basic_block src, basic_block target)
622 {
623   edge e;
624   edge_iterator ei;
625 
626   if (target == EXIT_BLOCK_PTR)
627     return true;
628   FOR_EACH_EDGE (e, ei, src->succs)
629     if (e->dest == EXIT_BLOCK_PTR
630 	&& e->flags & EDGE_FALLTHRU)
631       return 0;
632   return true;
633 }
634 
635 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
636 rtx
bb_note(basic_block bb)637 bb_note (basic_block bb)
638 {
639   rtx note;
640 
641   note = BB_HEAD (bb);
642   if (LABEL_P (note))
643     note = NEXT_INSN (note);
644 
645   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
646   return note;
647 }
648 
649 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
650    note associated with the BLOCK.  */
651 
652 static rtx
first_insn_after_basic_block_note(basic_block block)653 first_insn_after_basic_block_note (basic_block block)
654 {
655   rtx insn;
656 
657   /* Get the first instruction in the block.  */
658   insn = BB_HEAD (block);
659 
660   if (insn == NULL_RTX)
661     return NULL_RTX;
662   if (LABEL_P (insn))
663     insn = NEXT_INSN (insn);
664   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
665 
666   return NEXT_INSN (insn);
667 }
668 
669 /* Creates a new basic block just after basic block B by splitting
670    everything after specified instruction I.  */
671 
672 static basic_block
rtl_split_block(basic_block bb,void * insnp)673 rtl_split_block (basic_block bb, void *insnp)
674 {
675   basic_block new_bb;
676   rtx insn = (rtx) insnp;
677   edge e;
678   edge_iterator ei;
679 
680   if (!insn)
681     {
682       insn = first_insn_after_basic_block_note (bb);
683 
684       if (insn)
685 	{
686 	  rtx next = insn;
687 
688 	  insn = PREV_INSN (insn);
689 
690 	  /* If the block contains only debug insns, insn would have
691 	     been NULL in a non-debug compilation, and then we'd end
692 	     up emitting a DELETED note.  For -fcompare-debug
693 	     stability, emit the note too.  */
694 	  if (insn != BB_END (bb)
695 	      && DEBUG_INSN_P (next)
696 	      && DEBUG_INSN_P (BB_END (bb)))
697 	    {
698 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
699 		next = NEXT_INSN (next);
700 
701 	      if (next == BB_END (bb))
702 		emit_note_after (NOTE_INSN_DELETED, next);
703 	    }
704 	}
705       else
706 	insn = get_last_insn ();
707     }
708 
709   /* We probably should check type of the insn so that we do not create
710      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
711      bother.  */
712   if (insn == BB_END (bb))
713     emit_note_after (NOTE_INSN_DELETED, insn);
714 
715   /* Create the new basic block.  */
716   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
717   BB_COPY_PARTITION (new_bb, bb);
718   BB_END (bb) = insn;
719 
720   /* Redirect the outgoing edges.  */
721   new_bb->succs = bb->succs;
722   bb->succs = NULL;
723   FOR_EACH_EDGE (e, ei, new_bb->succs)
724     e->src = new_bb;
725 
726   /* The new block starts off being dirty.  */
727   df_set_bb_dirty (bb);
728   return new_bb;
729 }
730 
731 /* Return true if the single edge between blocks A and B is the only place
732    in RTL which holds some unique locus.  */
733 
734 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)735 unique_locus_on_edge_between_p (basic_block a, basic_block b)
736 {
737   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
738   rtx insn, end;
739 
740   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
741     return false;
742 
743   /* First scan block A backward.  */
744   insn = BB_END (a);
745   end = PREV_INSN (BB_HEAD (a));
746   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
747     insn = PREV_INSN (insn);
748 
749   if (insn != end && INSN_LOCATION (insn) == goto_locus)
750     return false;
751 
752   /* Then scan block B forward.  */
753   insn = BB_HEAD (b);
754   if (insn)
755     {
756       end = NEXT_INSN (BB_END (b));
757       while (insn != end && !NONDEBUG_INSN_P (insn))
758 	insn = NEXT_INSN (insn);
759 
760       if (insn != end && INSN_HAS_LOCATION (insn)
761 	  && INSN_LOCATION (insn) == goto_locus)
762 	return false;
763     }
764 
765   return true;
766 }
767 
768 /* If the single edge between blocks A and B is the only place in RTL which
769    holds some unique locus, emit a nop with that locus between the blocks.  */
770 
771 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)772 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
773 {
774   if (!unique_locus_on_edge_between_p (a, b))
775     return;
776 
777   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
778   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
779 }
780 
781 /* Blocks A and B are to be merged into a single block A.  The insns
782    are already contiguous.  */
783 
784 static void
rtl_merge_blocks(basic_block a,basic_block b)785 rtl_merge_blocks (basic_block a, basic_block b)
786 {
787   rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
788   rtx del_first = NULL_RTX, del_last = NULL_RTX;
789   rtx b_debug_start = b_end, b_debug_end = b_end;
790   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
791   int b_empty = 0;
792 
793   if (dump_file)
794     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
795 	     a->index);
796 
797   while (DEBUG_INSN_P (b_end))
798     b_end = PREV_INSN (b_debug_start = b_end);
799 
800   /* If there was a CODE_LABEL beginning B, delete it.  */
801   if (LABEL_P (b_head))
802     {
803       /* Detect basic blocks with nothing but a label.  This can happen
804 	 in particular at the end of a function.  */
805       if (b_head == b_end)
806 	b_empty = 1;
807 
808       del_first = del_last = b_head;
809       b_head = NEXT_INSN (b_head);
810     }
811 
812   /* Delete the basic block note and handle blocks containing just that
813      note.  */
814   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
815     {
816       if (b_head == b_end)
817 	b_empty = 1;
818       if (! del_last)
819 	del_first = b_head;
820 
821       del_last = b_head;
822       b_head = NEXT_INSN (b_head);
823     }
824 
825   /* If there was a jump out of A, delete it.  */
826   if (JUMP_P (a_end))
827     {
828       rtx prev;
829 
830       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
831 	if (!NOTE_P (prev)
832 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
833 	    || prev == BB_HEAD (a))
834 	  break;
835 
836       del_first = a_end;
837 
838 #ifdef HAVE_cc0
839       /* If this was a conditional jump, we need to also delete
840 	 the insn that set cc0.  */
841       if (only_sets_cc0_p (prev))
842 	{
843 	  rtx tmp = prev;
844 
845 	  prev = prev_nonnote_insn (prev);
846 	  if (!prev)
847 	    prev = BB_HEAD (a);
848 	  del_first = tmp;
849 	}
850 #endif
851 
852       a_end = PREV_INSN (del_first);
853     }
854   else if (BARRIER_P (NEXT_INSN (a_end)))
855     del_first = NEXT_INSN (a_end);
856 
857   /* Delete everything marked above as well as crap that might be
858      hanging out between the two blocks.  */
859   BB_END (a) = a_end;
860   BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
861   delete_insn_chain (del_first, del_last, true);
862 
863   /* When not optimizing CFG and the edge is the only place in RTL which holds
864      some unique locus, emit a nop with that locus in between.  */
865   if (!optimize)
866     {
867       emit_nop_for_unique_locus_between (a, b);
868       a_end = BB_END (a);
869     }
870 
871   /* Reassociate the insns of B with A.  */
872   if (!b_empty)
873     {
874       update_bb_for_insn_chain (a_end, b_debug_end, a);
875 
876       BB_END (a) = b_debug_end;
877       BB_HEAD (b) = NULL_RTX;
878     }
879   else if (b_end != b_debug_end)
880     {
881       /* Move any deleted labels and other notes between the end of A
882 	 and the debug insns that make up B after the debug insns,
883 	 bringing the debug insns into A while keeping the notes after
884 	 the end of A.  */
885       if (NEXT_INSN (a_end) != b_debug_start)
886 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
887 			    b_debug_end);
888       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
889       BB_END (a) = b_debug_end;
890     }
891 
892   df_bb_delete (b->index);
893 
894   /* If B was a forwarder block, propagate the locus on the edge.  */
895   if (forwarder_p
896       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
897     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
898 
899   if (dump_file)
900     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
901 }
902 
903 
904 /* Return true when block A and B can be merged.  */
905 
906 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)907 rtl_can_merge_blocks (basic_block a, basic_block b)
908 {
909   /* If we are partitioning hot/cold basic blocks, we don't want to
910      mess up unconditional or indirect jumps that cross between hot
911      and cold sections.
912 
913      Basic block partitioning may result in some jumps that appear to
914      be optimizable (or blocks that appear to be mergeable), but which really
915      must be left untouched (they are required to make it safely across
916      partition boundaries).  See  the comments at the top of
917      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
918 
919   if (BB_PARTITION (a) != BB_PARTITION (b))
920     return false;
921 
922   /* Protect the loop latches.  */
923   if (current_loops && b->loop_father->latch == b)
924     return false;
925 
926   /* There must be exactly one edge in between the blocks.  */
927   return (single_succ_p (a)
928 	  && single_succ (a) == b
929 	  && single_pred_p (b)
930 	  && a != b
931 	  /* Must be simple edge.  */
932 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
933 	  && a->next_bb == b
934 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
935 	  /* If the jump insn has side effects,
936 	     we can't kill the edge.  */
937 	  && (!JUMP_P (BB_END (a))
938 	      || (reload_completed
939 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
940 }
941 
942 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
943    exist.  */
944 
945 rtx
block_label(basic_block block)946 block_label (basic_block block)
947 {
948   if (block == EXIT_BLOCK_PTR)
949     return NULL_RTX;
950 
951   if (!LABEL_P (BB_HEAD (block)))
952     {
953       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
954     }
955 
956   return BB_HEAD (block);
957 }
958 
959 /* Attempt to perform edge redirection by replacing possibly complex jump
960    instruction by unconditional jump or removing jump completely.  This can
961    apply only if all edges now point to the same block.  The parameters and
962    return values are equivalent to redirect_edge_and_branch.  */
963 
964 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)965 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
966 {
967   basic_block src = e->src;
968   rtx insn = BB_END (src), kill_from;
969   rtx set;
970   int fallthru = 0;
971 
972   /* If we are partitioning hot/cold basic blocks, we don't want to
973      mess up unconditional or indirect jumps that cross between hot
974      and cold sections.
975 
976      Basic block partitioning may result in some jumps that appear to
977      be optimizable (or blocks that appear to be mergeable), but which really
978      must be left untouched (they are required to make it safely across
979      partition boundaries).  See  the comments at the top of
980      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
981 
982   if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
983       || BB_PARTITION (src) != BB_PARTITION (target))
984     return NULL;
985 
986   /* We can replace or remove a complex jump only when we have exactly
987      two edges.  Also, if we have exactly one outgoing edge, we can
988      redirect that.  */
989   if (EDGE_COUNT (src->succs) >= 3
990       /* Verify that all targets will be TARGET.  Specifically, the
991 	 edge that is not E must also go to TARGET.  */
992       || (EDGE_COUNT (src->succs) == 2
993 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
994     return NULL;
995 
996   if (!onlyjump_p (insn))
997     return NULL;
998   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
999     return NULL;
1000 
1001   /* Avoid removing branch with side effects.  */
1002   set = single_set (insn);
1003   if (!set || side_effects_p (set))
1004     return NULL;
1005 
1006   /* In case we zap a conditional jump, we'll need to kill
1007      the cc0 setter too.  */
1008   kill_from = insn;
1009 #ifdef HAVE_cc0
1010   if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1011       && only_sets_cc0_p (PREV_INSN (insn)))
1012     kill_from = PREV_INSN (insn);
1013 #endif
1014 
1015   /* See if we can create the fallthru edge.  */
1016   if (in_cfglayout || can_fallthru (src, target))
1017     {
1018       if (dump_file)
1019 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1020       fallthru = 1;
1021 
1022       /* Selectively unlink whole insn chain.  */
1023       if (in_cfglayout)
1024 	{
1025 	  rtx insn = BB_FOOTER (src);
1026 
1027 	  delete_insn_chain (kill_from, BB_END (src), false);
1028 
1029 	  /* Remove barriers but keep jumptables.  */
1030 	  while (insn)
1031 	    {
1032 	      if (BARRIER_P (insn))
1033 		{
1034 		  if (PREV_INSN (insn))
1035 		    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1036 		  else
1037 		    BB_FOOTER (src) = NEXT_INSN (insn);
1038 		  if (NEXT_INSN (insn))
1039 		    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1040 		}
1041 	      if (LABEL_P (insn))
1042 		break;
1043 	      insn = NEXT_INSN (insn);
1044 	    }
1045 	}
1046       else
1047 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1048 			   false);
1049     }
1050 
1051   /* If this already is simplejump, redirect it.  */
1052   else if (simplejump_p (insn))
1053     {
1054       if (e->dest == target)
1055 	return NULL;
1056       if (dump_file)
1057 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1058 		 INSN_UID (insn), e->dest->index, target->index);
1059       if (!redirect_jump (insn, block_label (target), 0))
1060 	{
1061 	  gcc_assert (target == EXIT_BLOCK_PTR);
1062 	  return NULL;
1063 	}
1064     }
1065 
1066   /* Cannot do anything for target exit block.  */
1067   else if (target == EXIT_BLOCK_PTR)
1068     return NULL;
1069 
1070   /* Or replace possibly complicated jump insn by simple jump insn.  */
1071   else
1072     {
1073       rtx target_label = block_label (target);
1074       rtx barrier, label, table;
1075 
1076       emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1077       JUMP_LABEL (BB_END (src)) = target_label;
1078       LABEL_NUSES (target_label)++;
1079       if (dump_file)
1080 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1081 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1082 
1083 
1084       delete_insn_chain (kill_from, insn, false);
1085 
1086       /* Recognize a tablejump that we are converting to a
1087 	 simple jump and remove its associated CODE_LABEL
1088 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1089       if (tablejump_p (insn, &label, &table))
1090 	delete_insn_chain (label, table, false);
1091 
1092       barrier = next_nonnote_insn (BB_END (src));
1093       if (!barrier || !BARRIER_P (barrier))
1094 	emit_barrier_after (BB_END (src));
1095       else
1096 	{
1097 	  if (barrier != NEXT_INSN (BB_END (src)))
1098 	    {
1099 	      /* Move the jump before barrier so that the notes
1100 		 which originally were or were created before jump table are
1101 		 inside the basic block.  */
1102 	      rtx new_insn = BB_END (src);
1103 
1104 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1105 				        PREV_INSN (barrier), src);
1106 
1107 	      NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1108 	      PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1109 
1110 	      NEXT_INSN (new_insn) = barrier;
1111 	      NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1112 
1113 	      PREV_INSN (new_insn) = PREV_INSN (barrier);
1114 	      PREV_INSN (barrier) = new_insn;
1115 	    }
1116 	}
1117     }
1118 
1119   /* Keep only one edge out and set proper flags.  */
1120   if (!single_succ_p (src))
1121     remove_edge (e);
1122   gcc_assert (single_succ_p (src));
1123 
1124   e = single_succ_edge (src);
1125   if (fallthru)
1126     e->flags = EDGE_FALLTHRU;
1127   else
1128     e->flags = 0;
1129 
1130   e->probability = REG_BR_PROB_BASE;
1131   e->count = src->count;
1132 
1133   if (e->dest != target)
1134     redirect_edge_succ (e, target);
1135   return e;
1136 }
1137 
1138 /* Subroutine of redirect_branch_edge that tries to patch the jump
1139    instruction INSN so that it reaches block NEW.  Do this
1140    only when it originally reached block OLD.  Return true if this
1141    worked or the original target wasn't OLD, return false if redirection
1142    doesn't work.  */
1143 
1144 static bool
patch_jump_insn(rtx insn,rtx old_label,basic_block new_bb)1145 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1146 {
1147   rtx tmp;
1148   /* Recognize a tablejump and adjust all matching cases.  */
1149   if (tablejump_p (insn, NULL, &tmp))
1150     {
1151       rtvec vec;
1152       int j;
1153       rtx new_label = block_label (new_bb);
1154 
1155       if (new_bb == EXIT_BLOCK_PTR)
1156 	return false;
1157       if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1158 	vec = XVEC (PATTERN (tmp), 0);
1159       else
1160 	vec = XVEC (PATTERN (tmp), 1);
1161 
1162       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1163 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1164 	  {
1165 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1166 	    --LABEL_NUSES (old_label);
1167 	    ++LABEL_NUSES (new_label);
1168 	  }
1169 
1170       /* Handle casesi dispatch insns.  */
1171       if ((tmp = single_set (insn)) != NULL
1172 	  && SET_DEST (tmp) == pc_rtx
1173 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1174 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1175 	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1176 	{
1177 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1178 						       new_label);
1179 	  --LABEL_NUSES (old_label);
1180 	  ++LABEL_NUSES (new_label);
1181 	}
1182     }
1183   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1184     {
1185       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1186       rtx new_label, note;
1187 
1188       if (new_bb == EXIT_BLOCK_PTR)
1189 	return false;
1190       new_label = block_label (new_bb);
1191 
1192       for (i = 0; i < n; ++i)
1193 	{
1194 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1195 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1196 	  if (XEXP (old_ref, 0) == old_label)
1197 	    {
1198 	      ASM_OPERANDS_LABEL (tmp, i)
1199 		= gen_rtx_LABEL_REF (Pmode, new_label);
1200 	      --LABEL_NUSES (old_label);
1201 	      ++LABEL_NUSES (new_label);
1202 	    }
1203 	}
1204 
1205       if (JUMP_LABEL (insn) == old_label)
1206 	{
1207 	  JUMP_LABEL (insn) = new_label;
1208 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1209 	  if (note)
1210 	    remove_note (insn, note);
1211 	}
1212       else
1213 	{
1214 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1215 	  if (note)
1216 	    remove_note (insn, note);
1217 	  if (JUMP_LABEL (insn) != new_label
1218 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1219 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1220 	}
1221       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1222 	     != NULL_RTX)
1223 	XEXP (note, 0) = new_label;
1224     }
1225   else
1226     {
1227       /* ?? We may play the games with moving the named labels from
1228 	 one basic block to the other in case only one computed_jump is
1229 	 available.  */
1230       if (computed_jump_p (insn)
1231 	  /* A return instruction can't be redirected.  */
1232 	  || returnjump_p (insn))
1233 	return false;
1234 
1235       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1236 	{
1237 	  /* If the insn doesn't go where we think, we're confused.  */
1238 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1239 
1240 	  /* If the substitution doesn't succeed, die.  This can happen
1241 	     if the back end emitted unrecognizable instructions or if
1242 	     target is exit block on some arches.  */
1243 	  if (!redirect_jump (insn, block_label (new_bb), 0))
1244 	    {
1245 	      gcc_assert (new_bb == EXIT_BLOCK_PTR);
1246 	      return false;
1247 	    }
1248 	}
1249     }
1250   return true;
1251 }
1252 
1253 
1254 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1255    NULL on failure  */
1256 static edge
redirect_branch_edge(edge e,basic_block target)1257 redirect_branch_edge (edge e, basic_block target)
1258 {
1259   rtx old_label = BB_HEAD (e->dest);
1260   basic_block src = e->src;
1261   rtx insn = BB_END (src);
1262 
1263   /* We can only redirect non-fallthru edges of jump insn.  */
1264   if (e->flags & EDGE_FALLTHRU)
1265     return NULL;
1266   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1267     return NULL;
1268 
1269   if (!currently_expanding_to_rtl)
1270     {
1271       if (!patch_jump_insn (insn, old_label, target))
1272 	return NULL;
1273     }
1274   else
1275     /* When expanding this BB might actually contain multiple
1276        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1277        Redirect all of those that match our label.  */
1278     FOR_BB_INSNS (src, insn)
1279       if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1280 	return NULL;
1281 
1282   if (dump_file)
1283     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1284 	     e->src->index, e->dest->index, target->index);
1285 
1286   if (e->dest != target)
1287     e = redirect_edge_succ_nodup (e, target);
1288 
1289   return e;
1290 }
1291 
1292 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1293    expense of adding new instructions or reordering basic blocks.
1294 
1295    Function can be also called with edge destination equivalent to the TARGET.
1296    Then it should try the simplifications and do nothing if none is possible.
1297 
1298    Return edge representing the branch if transformation succeeded.  Return NULL
1299    on failure.
1300    We still return NULL in case E already destinated TARGET and we didn't
1301    managed to simplify instruction stream.  */
1302 
1303 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1304 rtl_redirect_edge_and_branch (edge e, basic_block target)
1305 {
1306   edge ret;
1307   basic_block src = e->src;
1308 
1309   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1310     return NULL;
1311 
1312   if (e->dest == target)
1313     return e;
1314 
1315   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1316     {
1317       df_set_bb_dirty (src);
1318       return ret;
1319     }
1320 
1321   ret = redirect_branch_edge (e, target);
1322   if (!ret)
1323     return NULL;
1324 
1325   df_set_bb_dirty (src);
1326   return ret;
1327 }
1328 
1329 /* Like force_nonfallthru below, but additionally performs redirection
1330    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1331    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1332    simple_return_rtx, indicating which kind of returnjump to create.
1333    It should be NULL otherwise.  */
1334 
1335 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1336 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1337 {
1338   basic_block jump_block, new_bb = NULL, src = e->src;
1339   rtx note;
1340   edge new_edge;
1341   int abnormal_edge_flags = 0;
1342   bool asm_goto_edge = false;
1343   int loc;
1344 
1345   /* In the case the last instruction is conditional jump to the next
1346      instruction, first redirect the jump itself and then continue
1347      by creating a basic block afterwards to redirect fallthru edge.  */
1348   if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1349       && any_condjump_p (BB_END (e->src))
1350       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1351     {
1352       rtx note;
1353       edge b = unchecked_make_edge (e->src, target, 0);
1354       bool redirected;
1355 
1356       redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1357       gcc_assert (redirected);
1358 
1359       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1360       if (note)
1361 	{
1362 	  int prob = INTVAL (XEXP (note, 0));
1363 
1364 	  b->probability = prob;
1365 	  b->count = e->count * prob / REG_BR_PROB_BASE;
1366 	  e->probability -= e->probability;
1367 	  e->count -= b->count;
1368 	  if (e->probability < 0)
1369 	    e->probability = 0;
1370 	  if (e->count < 0)
1371 	    e->count = 0;
1372 	}
1373     }
1374 
1375   if (e->flags & EDGE_ABNORMAL)
1376     {
1377       /* Irritating special case - fallthru edge to the same block as abnormal
1378 	 edge.
1379 	 We can't redirect abnormal edge, but we still can split the fallthru
1380 	 one and create separate abnormal edge to original destination.
1381 	 This allows bb-reorder to make such edge non-fallthru.  */
1382       gcc_assert (e->dest == target);
1383       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1384       e->flags &= EDGE_FALLTHRU;
1385     }
1386   else
1387     {
1388       gcc_assert (e->flags & EDGE_FALLTHRU);
1389       if (e->src == ENTRY_BLOCK_PTR)
1390 	{
1391 	  /* We can't redirect the entry block.  Create an empty block
1392 	     at the start of the function which we use to add the new
1393 	     jump.  */
1394 	  edge tmp;
1395 	  edge_iterator ei;
1396 	  bool found = false;
1397 
1398 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1399 
1400 	  /* Change the existing edge's source to be the new block, and add
1401 	     a new edge from the entry block to the new block.  */
1402 	  e->src = bb;
1403 	  for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1404 	    {
1405 	      if (tmp == e)
1406 		{
1407 		  ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
1408 		  found = true;
1409 		  break;
1410 		}
1411 	      else
1412 		ei_next (&ei);
1413 	    }
1414 
1415 	  gcc_assert (found);
1416 
1417 	  vec_safe_push (bb->succs, e);
1418 	  make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1419 	}
1420     }
1421 
1422   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1423      don't point to the target or fallthru label.  */
1424   if (JUMP_P (BB_END (e->src))
1425       && target != EXIT_BLOCK_PTR
1426       && (e->flags & EDGE_FALLTHRU)
1427       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1428     {
1429       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1430       bool adjust_jump_target = false;
1431 
1432       for (i = 0; i < n; ++i)
1433 	{
1434 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1435 	    {
1436 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1437 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1438 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1439 	      adjust_jump_target = true;
1440 	    }
1441 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1442 	    asm_goto_edge = true;
1443 	}
1444       if (adjust_jump_target)
1445 	{
1446 	  rtx insn = BB_END (e->src), note;
1447 	  rtx old_label = BB_HEAD (e->dest);
1448 	  rtx new_label = BB_HEAD (target);
1449 
1450 	  if (JUMP_LABEL (insn) == old_label)
1451 	    {
1452 	      JUMP_LABEL (insn) = new_label;
1453 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1454 	      if (note)
1455 		remove_note (insn, note);
1456 	    }
1457 	  else
1458 	    {
1459 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1460 	      if (note)
1461 		remove_note (insn, note);
1462 	      if (JUMP_LABEL (insn) != new_label
1463 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1464 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1465 	    }
1466 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1467 		 != NULL_RTX)
1468 	    XEXP (note, 0) = new_label;
1469 	}
1470     }
1471 
1472   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1473     {
1474       gcov_type count = e->count;
1475       int probability = e->probability;
1476       /* Create the new structures.  */
1477 
1478       /* If the old block ended with a tablejump, skip its table
1479 	 by searching forward from there.  Otherwise start searching
1480 	 forward from the last instruction of the old block.  */
1481       if (!tablejump_p (BB_END (e->src), NULL, &note))
1482 	note = BB_END (e->src);
1483       note = NEXT_INSN (note);
1484 
1485       jump_block = create_basic_block (note, NULL, e->src);
1486       jump_block->count = count;
1487       jump_block->frequency = EDGE_FREQUENCY (e);
1488 
1489       /* Make sure new block ends up in correct hot/cold section.  */
1490 
1491       BB_COPY_PARTITION (jump_block, e->src);
1492       if (flag_reorder_blocks_and_partition
1493 	  && targetm_common.have_named_sections
1494 	  && JUMP_P (BB_END (jump_block))
1495 	  && !any_condjump_p (BB_END (jump_block))
1496 	  && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1497 	add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1498 
1499       /* Wire edge in.  */
1500       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1501       new_edge->probability = probability;
1502       new_edge->count = count;
1503 
1504       /* Redirect old edge.  */
1505       redirect_edge_pred (e, jump_block);
1506       e->probability = REG_BR_PROB_BASE;
1507 
1508       /* If asm goto has any label refs to target's label,
1509 	 add also edge from asm goto bb to target.  */
1510       if (asm_goto_edge)
1511 	{
1512 	  new_edge->probability /= 2;
1513 	  new_edge->count /= 2;
1514 	  jump_block->count /= 2;
1515 	  jump_block->frequency /= 2;
1516 	  new_edge = make_edge (new_edge->src, target,
1517 				e->flags & ~EDGE_FALLTHRU);
1518 	  new_edge->probability = probability - probability / 2;
1519 	  new_edge->count = count - count / 2;
1520 	}
1521 
1522       new_bb = jump_block;
1523     }
1524   else
1525     jump_block = e->src;
1526 
1527   loc = e->goto_locus;
1528   e->flags &= ~EDGE_FALLTHRU;
1529   if (target == EXIT_BLOCK_PTR)
1530     {
1531       if (jump_label == ret_rtx)
1532 	{
1533 #ifdef HAVE_return
1534 	  emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1535 #else
1536 	  gcc_unreachable ();
1537 #endif
1538 	}
1539       else
1540 	{
1541 	  gcc_assert (jump_label == simple_return_rtx);
1542 #ifdef HAVE_simple_return
1543 	  emit_jump_insn_after_setloc (gen_simple_return (),
1544 				       BB_END (jump_block), loc);
1545 #else
1546 	  gcc_unreachable ();
1547 #endif
1548 	}
1549       set_return_jump_label (BB_END (jump_block));
1550     }
1551   else
1552     {
1553       rtx label = block_label (target);
1554       emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1555       JUMP_LABEL (BB_END (jump_block)) = label;
1556       LABEL_NUSES (label)++;
1557     }
1558 
1559   emit_barrier_after (BB_END (jump_block));
1560   redirect_edge_succ_nodup (e, target);
1561 
1562   if (abnormal_edge_flags)
1563     make_edge (src, target, abnormal_edge_flags);
1564 
1565   df_mark_solutions_dirty ();
1566   return new_bb;
1567 }
1568 
1569 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1570    (and possibly create new basic block) to make edge non-fallthru.
1571    Return newly created BB or NULL if none.  */
1572 
1573 static basic_block
rtl_force_nonfallthru(edge e)1574 rtl_force_nonfallthru (edge e)
1575 {
1576   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1577 }
1578 
1579 /* Redirect edge even at the expense of creating new jump insn or
1580    basic block.  Return new basic block if created, NULL otherwise.
1581    Conversion must be possible.  */
1582 
1583 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1584 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1585 {
1586   if (redirect_edge_and_branch (e, target)
1587       || e->dest == target)
1588     return NULL;
1589 
1590   /* In case the edge redirection failed, try to force it to be non-fallthru
1591      and redirect newly created simplejump.  */
1592   df_set_bb_dirty (e->src);
1593   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1594 }
1595 
1596 /* The given edge should potentially be a fallthru edge.  If that is in
1597    fact true, delete the jump and barriers that are in the way.  */
1598 
1599 static void
rtl_tidy_fallthru_edge(edge e)1600 rtl_tidy_fallthru_edge (edge e)
1601 {
1602   rtx q;
1603   basic_block b = e->src, c = b->next_bb;
1604 
1605   /* ??? In a late-running flow pass, other folks may have deleted basic
1606      blocks by nopping out blocks, leaving multiple BARRIERs between here
1607      and the target label. They ought to be chastised and fixed.
1608 
1609      We can also wind up with a sequence of undeletable labels between
1610      one block and the next.
1611 
1612      So search through a sequence of barriers, labels, and notes for
1613      the head of block C and assert that we really do fall through.  */
1614 
1615   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1616     if (INSN_P (q))
1617       return;
1618 
1619   /* Remove what will soon cease being the jump insn from the source block.
1620      If block B consisted only of this single jump, turn it into a deleted
1621      note.  */
1622   q = BB_END (b);
1623   if (JUMP_P (q)
1624       && onlyjump_p (q)
1625       && (any_uncondjump_p (q)
1626 	  || single_succ_p (b)))
1627     {
1628 #ifdef HAVE_cc0
1629       /* If this was a conditional jump, we need to also delete
1630 	 the insn that set cc0.  */
1631       if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1632 	q = PREV_INSN (q);
1633 #endif
1634 
1635       q = PREV_INSN (q);
1636     }
1637 
1638   /* Selectively unlink the sequence.  */
1639   if (q != PREV_INSN (BB_HEAD (c)))
1640     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1641 
1642   e->flags |= EDGE_FALLTHRU;
1643 }
1644 
1645 /* Should move basic block BB after basic block AFTER.  NIY.  */
1646 
1647 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1648 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1649 		      basic_block after ATTRIBUTE_UNUSED)
1650 {
1651   return false;
1652 }
1653 
1654 /* Split a (typically critical) edge.  Return the new block.
1655    The edge must not be abnormal.
1656 
1657    ??? The code generally expects to be called on critical edges.
1658    The case of a block ending in an unconditional jump to a
1659    block with multiple predecessors is not handled optimally.  */
1660 
1661 static basic_block
rtl_split_edge(edge edge_in)1662 rtl_split_edge (edge edge_in)
1663 {
1664   basic_block bb;
1665   rtx before;
1666 
1667   /* Abnormal edges cannot be split.  */
1668   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1669 
1670   /* We are going to place the new block in front of edge destination.
1671      Avoid existence of fallthru predecessors.  */
1672   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1673     {
1674       edge e = find_fallthru_edge (edge_in->dest->preds);
1675 
1676       if (e)
1677 	force_nonfallthru (e);
1678     }
1679 
1680   /* Create the basic block note.  */
1681   if (edge_in->dest != EXIT_BLOCK_PTR)
1682     before = BB_HEAD (edge_in->dest);
1683   else
1684     before = NULL_RTX;
1685 
1686   /* If this is a fall through edge to the exit block, the blocks might be
1687      not adjacent, and the right place is after the source.  */
1688   if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1689     {
1690       before = NEXT_INSN (BB_END (edge_in->src));
1691       bb = create_basic_block (before, NULL, edge_in->src);
1692       BB_COPY_PARTITION (bb, edge_in->src);
1693     }
1694   else
1695     {
1696       bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1697       /* ??? Why not edge_in->dest->prev_bb here?  */
1698       BB_COPY_PARTITION (bb, edge_in->dest);
1699     }
1700 
1701   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1702 
1703   /* For non-fallthru edges, we must adjust the predecessor's
1704      jump instruction to target our new block.  */
1705   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1706     {
1707       edge redirected = redirect_edge_and_branch (edge_in, bb);
1708       gcc_assert (redirected);
1709     }
1710   else
1711     {
1712       if (edge_in->src != ENTRY_BLOCK_PTR)
1713 	{
1714 	  /* For asm goto even splitting of fallthru edge might
1715 	     need insn patching, as other labels might point to the
1716 	     old label.  */
1717 	  rtx last = BB_END (edge_in->src);
1718 	  if (last
1719 	      && JUMP_P (last)
1720 	      && edge_in->dest != EXIT_BLOCK_PTR
1721 	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
1722 	      && patch_jump_insn (last, before, bb))
1723 	    df_set_bb_dirty (edge_in->src);
1724 	}
1725       redirect_edge_succ (edge_in, bb);
1726     }
1727 
1728   return bb;
1729 }
1730 
1731 /* Queue instructions for insertion on an edge between two basic blocks.
1732    The new instructions and basic blocks (if any) will not appear in the
1733    CFG until commit_edge_insertions is called.  */
1734 
1735 void
insert_insn_on_edge(rtx pattern,edge e)1736 insert_insn_on_edge (rtx pattern, edge e)
1737 {
1738   /* We cannot insert instructions on an abnormal critical edge.
1739      It will be easier to find the culprit if we die now.  */
1740   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1741 
1742   if (e->insns.r == NULL_RTX)
1743     start_sequence ();
1744   else
1745     push_to_sequence (e->insns.r);
1746 
1747   emit_insn (pattern);
1748 
1749   e->insns.r = get_insns ();
1750   end_sequence ();
1751 }
1752 
1753 /* Update the CFG for the instructions queued on edge E.  */
1754 
1755 void
commit_one_edge_insertion(edge e)1756 commit_one_edge_insertion (edge e)
1757 {
1758   rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1759   basic_block bb;
1760 
1761   /* Pull the insns off the edge now since the edge might go away.  */
1762   insns = e->insns.r;
1763   e->insns.r = NULL_RTX;
1764 
1765   /* Figure out where to put these insns.  If the destination has
1766      one predecessor, insert there.  Except for the exit block.  */
1767   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1768     {
1769       bb = e->dest;
1770 
1771       /* Get the location correct wrt a code label, and "nice" wrt
1772 	 a basic block note, and before everything else.  */
1773       tmp = BB_HEAD (bb);
1774       if (LABEL_P (tmp))
1775 	tmp = NEXT_INSN (tmp);
1776       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1777 	tmp = NEXT_INSN (tmp);
1778       if (tmp == BB_HEAD (bb))
1779 	before = tmp;
1780       else if (tmp)
1781 	after = PREV_INSN (tmp);
1782       else
1783 	after = get_last_insn ();
1784     }
1785 
1786   /* If the source has one successor and the edge is not abnormal,
1787      insert there.  Except for the entry block.
1788      Don't do this if the predecessor ends in a jump other than
1789      unconditional simple jump.  E.g. for asm goto that points all
1790      its labels at the fallthru basic block, we can't insert instructions
1791      before the asm goto, as the asm goto can have various of side effects,
1792      and can't emit instructions after the asm goto, as it must end
1793      the basic block.  */
1794   else if ((e->flags & EDGE_ABNORMAL) == 0
1795 	   && single_succ_p (e->src)
1796 	   && e->src != ENTRY_BLOCK_PTR
1797 	   && (!JUMP_P (BB_END (e->src))
1798 	       || simplejump_p (BB_END (e->src))))
1799     {
1800       bb = e->src;
1801 
1802       /* It is possible to have a non-simple jump here.  Consider a target
1803 	 where some forms of unconditional jumps clobber a register.  This
1804 	 happens on the fr30 for example.
1805 
1806 	 We know this block has a single successor, so we can just emit
1807 	 the queued insns before the jump.  */
1808       if (JUMP_P (BB_END (bb)))
1809 	before = BB_END (bb);
1810       else
1811 	{
1812 	  /* We'd better be fallthru, or we've lost track of what's what.  */
1813 	  gcc_assert (e->flags & EDGE_FALLTHRU);
1814 
1815 	  after = BB_END (bb);
1816 	}
1817     }
1818 
1819   /* Otherwise we must split the edge.  */
1820   else
1821     {
1822       bb = split_edge (e);
1823       after = BB_END (bb);
1824 
1825       if (flag_reorder_blocks_and_partition
1826 	  && targetm_common.have_named_sections
1827 	  && e->src != ENTRY_BLOCK_PTR
1828 	  && BB_PARTITION (e->src) == BB_COLD_PARTITION
1829 	  && !(e->flags & EDGE_CROSSING)
1830 	  && JUMP_P (after)
1831 	  && !any_condjump_p (after)
1832 	  && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1833 	add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1834     }
1835 
1836   /* Now that we've found the spot, do the insertion.  */
1837   if (before)
1838     {
1839       emit_insn_before_noloc (insns, before, bb);
1840       last = prev_nonnote_insn (before);
1841     }
1842   else
1843     last = emit_insn_after_noloc (insns, after, bb);
1844 
1845   if (returnjump_p (last))
1846     {
1847       /* ??? Remove all outgoing edges from BB and add one for EXIT.
1848 	 This is not currently a problem because this only happens
1849 	 for the (single) epilogue, which already has a fallthru edge
1850 	 to EXIT.  */
1851 
1852       e = single_succ_edge (bb);
1853       gcc_assert (e->dest == EXIT_BLOCK_PTR
1854 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1855 
1856       e->flags &= ~EDGE_FALLTHRU;
1857       emit_barrier_after (last);
1858 
1859       if (before)
1860 	delete_insn (before);
1861     }
1862   else
1863     gcc_assert (!JUMP_P (last));
1864 }
1865 
1866 /* Update the CFG for all queued instructions.  */
1867 
1868 void
commit_edge_insertions(void)1869 commit_edge_insertions (void)
1870 {
1871   basic_block bb;
1872 
1873 #ifdef ENABLE_CHECKING
1874   verify_flow_info ();
1875 #endif
1876 
1877   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1878     {
1879       edge e;
1880       edge_iterator ei;
1881 
1882       FOR_EACH_EDGE (e, ei, bb->succs)
1883 	if (e->insns.r)
1884 	  commit_one_edge_insertion (e);
1885     }
1886 }
1887 
1888 
1889 /* Print out RTL-specific basic block information (live information
1890    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
1891    documented in dumpfile.h.  */
1892 
1893 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,int flags)1894 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
1895 {
1896   rtx insn;
1897   rtx last;
1898   char *s_indent;
1899 
1900   s_indent = (char *) alloca ((size_t) indent + 1);
1901   memset (s_indent, ' ', (size_t) indent);
1902   s_indent[indent] = '\0';
1903 
1904   if (df && (flags & TDF_DETAILS))
1905     {
1906       df_dump_top (bb, outf);
1907       putc ('\n', outf);
1908     }
1909 
1910   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1911     for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1912 	 insn = NEXT_INSN (insn))
1913       {
1914 	if (flags & TDF_DETAILS)
1915 	  df_dump_insn_top (insn, outf);
1916 	if (! (flags & TDF_SLIM))
1917 	  print_rtl_single (outf, insn);
1918 	else
1919 	  dump_insn_slim (outf, insn);
1920 	if (flags & TDF_DETAILS)
1921 	  df_dump_insn_bottom (insn, outf);
1922       }
1923 
1924   if (df && (flags & TDF_DETAILS))
1925     {
1926       df_dump_bottom (bb, outf);
1927       putc ('\n', outf);
1928     }
1929 
1930 }
1931 
1932 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
1933    for the start of each basic block.  FLAGS are the TDF_* masks documented
1934    in dumpfile.h.  */
1935 
1936 void
print_rtl_with_bb(FILE * outf,const_rtx rtx_first,int flags)1937 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
1938 {
1939   const_rtx tmp_rtx;
1940   if (rtx_first == 0)
1941     fprintf (outf, "(nil)\n");
1942   else
1943     {
1944       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1945       int max_uid = get_max_uid ();
1946       basic_block *start = XCNEWVEC (basic_block, max_uid);
1947       basic_block *end = XCNEWVEC (basic_block, max_uid);
1948       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1949       basic_block bb;
1950 
1951       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1952 	 insns, but the CFG is not maintained so the basic block info
1953 	 is not reliable.  Therefore it's omitted from the dumps.  */
1954       if (! (cfun->curr_properties & PROP_cfg))
1955         flags &= ~TDF_BLOCKS;
1956 
1957       if (df)
1958 	df_dump_start (outf);
1959 
1960       if (flags & TDF_BLOCKS)
1961 	{
1962 	  FOR_EACH_BB_REVERSE (bb)
1963 	    {
1964 	      rtx x;
1965 
1966 	      start[INSN_UID (BB_HEAD (bb))] = bb;
1967 	      end[INSN_UID (BB_END (bb))] = bb;
1968 	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1969 		{
1970 		  enum bb_state state = IN_MULTIPLE_BB;
1971 
1972 		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1973 		    state = IN_ONE_BB;
1974 		  in_bb_p[INSN_UID (x)] = state;
1975 
1976 		  if (x == BB_END (bb))
1977 		    break;
1978 		}
1979 	    }
1980 	}
1981 
1982       for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1983 	{
1984 	  if (flags & TDF_BLOCKS)
1985 	    {
1986 	      bb = start[INSN_UID (tmp_rtx)];
1987 	      if (bb != NULL)
1988 		{
1989 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1990 		  if (df && (flags & TDF_DETAILS))
1991 		    df_dump_top (bb, outf);
1992 		}
1993 
1994 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1995 		  && !NOTE_P (tmp_rtx)
1996 		  && !BARRIER_P (tmp_rtx))
1997 		fprintf (outf, ";; Insn is not within a basic block\n");
1998 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1999 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2000 	    }
2001 
2002 	  if (flags & TDF_DETAILS)
2003 	    df_dump_insn_top (tmp_rtx, outf);
2004 	  if (! (flags & TDF_SLIM))
2005 	    print_rtl_single (outf, tmp_rtx);
2006 	  else
2007 	    dump_insn_slim (outf, tmp_rtx);
2008 	  if (flags & TDF_DETAILS)
2009 	    df_dump_insn_bottom (tmp_rtx, outf);
2010 
2011 	  if (flags & TDF_BLOCKS)
2012 	    {
2013 	      bb = end[INSN_UID (tmp_rtx)];
2014 	      if (bb != NULL)
2015 		{
2016 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2017 		  if (df && (flags & TDF_DETAILS))
2018 		    df_dump_bottom (bb, outf);
2019 		  putc ('\n', outf);
2020 		}
2021 	    }
2022 	}
2023 
2024       free (start);
2025       free (end);
2026       free (in_bb_p);
2027     }
2028 }
2029 
2030 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2031 
2032 void
update_br_prob_note(basic_block bb)2033 update_br_prob_note (basic_block bb)
2034 {
2035   rtx note;
2036   if (!JUMP_P (BB_END (bb)))
2037     return;
2038   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2039   if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2040     return;
2041   XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2042 }
2043 
2044 /* Get the last insn associated with block BB (that includes barriers and
2045    tablejumps after BB).  */
2046 rtx
get_last_bb_insn(basic_block bb)2047 get_last_bb_insn (basic_block bb)
2048 {
2049   rtx tmp;
2050   rtx end = BB_END (bb);
2051 
2052   /* Include any jump table following the basic block.  */
2053   if (tablejump_p (end, NULL, &tmp))
2054     end = tmp;
2055 
2056   /* Include any barriers that may follow the basic block.  */
2057   tmp = next_nonnote_insn_bb (end);
2058   while (tmp && BARRIER_P (tmp))
2059     {
2060       end = tmp;
2061       tmp = next_nonnote_insn_bb (end);
2062     }
2063 
2064   return end;
2065 }
2066 
2067 /* Verify the CFG and RTL consistency common for both underlying RTL and
2068    cfglayout RTL.
2069 
2070    Currently it does following checks:
2071 
2072    - overlapping of basic blocks
2073    - insns with wrong BLOCK_FOR_INSN pointers
2074    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2075    - tails of basic blocks (ensure that boundary is necessary)
2076    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2077      and NOTE_INSN_BASIC_BLOCK
2078    - verify that no fall_thru edge crosses hot/cold partition boundaries
2079    - verify that there are no pending RTL branch predictions
2080 
2081    In future it can be extended check a lot of other stuff as well
2082    (reachability of basic blocks, life information, etc. etc.).  */
2083 
2084 static int
rtl_verify_flow_info_1(void)2085 rtl_verify_flow_info_1 (void)
2086 {
2087   rtx x;
2088   int err = 0;
2089   basic_block bb;
2090 
2091   /* Check the general integrity of the basic blocks.  */
2092   FOR_EACH_BB_REVERSE (bb)
2093     {
2094       rtx insn;
2095 
2096       if (!(bb->flags & BB_RTL))
2097 	{
2098 	  error ("BB_RTL flag not set for block %d", bb->index);
2099 	  err = 1;
2100 	}
2101 
2102       FOR_BB_INSNS (bb, insn)
2103 	if (BLOCK_FOR_INSN (insn) != bb)
2104 	  {
2105 	    error ("insn %d basic block pointer is %d, should be %d",
2106 		   INSN_UID (insn),
2107 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2108 		   bb->index);
2109 	    err = 1;
2110 	  }
2111 
2112       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2113 	if (!BARRIER_P (insn)
2114 	    && BLOCK_FOR_INSN (insn) != NULL)
2115 	  {
2116 	    error ("insn %d in header of bb %d has non-NULL basic block",
2117 		   INSN_UID (insn), bb->index);
2118 	    err = 1;
2119 	  }
2120       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2121 	if (!BARRIER_P (insn)
2122 	    && BLOCK_FOR_INSN (insn) != NULL)
2123 	  {
2124 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2125 		   INSN_UID (insn), bb->index);
2126 	    err = 1;
2127 	  }
2128     }
2129 
2130   /* Now check the basic blocks (boundaries etc.) */
2131   FOR_EACH_BB_REVERSE (bb)
2132     {
2133       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2134       int n_eh = 0, n_abnormal = 0;
2135       edge e, fallthru = NULL;
2136       rtx note;
2137       edge_iterator ei;
2138 
2139       if (JUMP_P (BB_END (bb))
2140 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2141 	  && EDGE_COUNT (bb->succs) >= 2
2142 	  && any_condjump_p (BB_END (bb)))
2143 	{
2144 	  if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2145 	      && profile_status != PROFILE_ABSENT)
2146 	    {
2147 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2148 		     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2149 	      err = 1;
2150 	    }
2151 	}
2152       FOR_EACH_EDGE (e, ei, bb->succs)
2153 	{
2154 	  bool is_crossing;
2155 
2156 	  if (e->flags & EDGE_FALLTHRU)
2157 	    n_fallthru++, fallthru = e;
2158 
2159 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2160 			 && e->src != ENTRY_BLOCK_PTR
2161 			 && e->dest != EXIT_BLOCK_PTR);
2162 	  if (e->flags & EDGE_CROSSING)
2163 	    {
2164 	      if (!is_crossing)
2165 		{
2166 		  error ("EDGE_CROSSING incorrectly set across same section");
2167 		  err = 1;
2168 		}
2169 	      if (e->flags & EDGE_FALLTHRU)
2170 		{
2171 		  error ("fallthru edge crosses section boundary in bb %i",
2172 			 e->src->index);
2173 		  err = 1;
2174 		}
2175 	      if (e->flags & EDGE_EH)
2176 		{
2177 		  error ("EH edge crosses section boundary in bb %i",
2178 			 e->src->index);
2179 		  err = 1;
2180 		}
2181 	    }
2182 	  else if (is_crossing)
2183 	    {
2184 	      error ("EDGE_CROSSING missing across section boundary");
2185 	      err = 1;
2186 	    }
2187 
2188 	  if ((e->flags & ~(EDGE_DFS_BACK
2189 			    | EDGE_CAN_FALLTHRU
2190 			    | EDGE_IRREDUCIBLE_LOOP
2191 			    | EDGE_LOOP_EXIT
2192 			    | EDGE_CROSSING
2193 			    | EDGE_PRESERVE)) == 0)
2194 	    n_branch++;
2195 
2196 	  if (e->flags & EDGE_ABNORMAL_CALL)
2197 	    n_abnormal_call++;
2198 
2199 	  if (e->flags & EDGE_SIBCALL)
2200 	    n_sibcall++;
2201 
2202 	  if (e->flags & EDGE_EH)
2203 	    n_eh++;
2204 
2205 	  if (e->flags & EDGE_ABNORMAL)
2206 	    n_abnormal++;
2207 	}
2208 
2209       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2210 	{
2211 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2212 	  err = 1;
2213 	}
2214       if (n_eh > 1)
2215 	{
2216 	  error ("too many exception handling edges in bb %i", bb->index);
2217 	  err = 1;
2218 	}
2219       if (n_branch
2220 	  && (!JUMP_P (BB_END (bb))
2221 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2222 				   || any_condjump_p (BB_END (bb))))))
2223 	{
2224 	  error ("too many outgoing branch edges from bb %i", bb->index);
2225 	  err = 1;
2226 	}
2227       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2228 	{
2229 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2230 	  err = 1;
2231 	}
2232       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2233 	{
2234 	  error ("wrong number of branch edges after unconditional jump"
2235 		 " in bb %i", bb->index);
2236 	  err = 1;
2237 	}
2238       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2239 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2240 	{
2241 	  error ("wrong amount of branch edges after conditional jump"
2242 		 " in bb %i", bb->index);
2243 	  err = 1;
2244 	}
2245       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2246 	{
2247 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2248 	  err = 1;
2249 	}
2250       if (n_sibcall && !CALL_P (BB_END (bb)))
2251 	{
2252 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2253 	  err = 1;
2254 	}
2255       if (n_abnormal > n_eh
2256 	  && !(CALL_P (BB_END (bb))
2257 	       && n_abnormal == n_abnormal_call + n_sibcall)
2258 	  && (!JUMP_P (BB_END (bb))
2259 	      || any_condjump_p (BB_END (bb))
2260 	      || any_uncondjump_p (BB_END (bb))))
2261 	{
2262 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2263 	  err = 1;
2264 	}
2265 
2266       for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2267 	/* We may have a barrier inside a basic block before dead code
2268 	   elimination.  There is no BLOCK_FOR_INSN field in a barrier.  */
2269 	if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2270 	  {
2271 	    debug_rtx (x);
2272 	    if (! BLOCK_FOR_INSN (x))
2273 	      error
2274 		("insn %d inside basic block %d but block_for_insn is NULL",
2275 		 INSN_UID (x), bb->index);
2276 	    else
2277 	      error
2278 		("insn %d inside basic block %d but block_for_insn is %i",
2279 		 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2280 
2281 	    err = 1;
2282 	  }
2283 
2284       /* OK pointers are correct.  Now check the header of basic
2285 	 block.  It ought to contain optional CODE_LABEL followed
2286 	 by NOTE_BASIC_BLOCK.  */
2287       x = BB_HEAD (bb);
2288       if (LABEL_P (x))
2289 	{
2290 	  if (BB_END (bb) == x)
2291 	    {
2292 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2293 		     bb->index);
2294 	      err = 1;
2295 	    }
2296 
2297 	  x = NEXT_INSN (x);
2298 	}
2299 
2300       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2301 	{
2302 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2303 		 bb->index);
2304 	  err = 1;
2305 	}
2306 
2307       if (BB_END (bb) == x)
2308 	/* Do checks for empty blocks here.  */
2309 	;
2310       else
2311 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2312 	  {
2313 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2314 	      {
2315 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2316 		       INSN_UID (x), bb->index);
2317 		err = 1;
2318 	      }
2319 
2320 	    if (x == BB_END (bb))
2321 	      break;
2322 
2323 	    if (control_flow_insn_p (x))
2324 	      {
2325 		error ("in basic block %d:", bb->index);
2326 		fatal_insn ("flow control insn inside a basic block", x);
2327 	      }
2328 	  }
2329     }
2330 
2331   /* Clean up.  */
2332   return err;
2333 }
2334 
2335 /* Verify the CFG and RTL consistency common for both underlying RTL and
2336    cfglayout RTL.
2337 
2338    Currently it does following checks:
2339    - all checks of rtl_verify_flow_info_1
2340    - test head/end pointers
2341    - check that all insns are in the basic blocks
2342      (except the switch handling code, barriers and notes)
2343    - check that all returns are followed by barriers
2344    - check that all fallthru edge points to the adjacent blocks.  */
2345 
2346 static int
rtl_verify_flow_info(void)2347 rtl_verify_flow_info (void)
2348 {
2349   basic_block bb;
2350   int err = rtl_verify_flow_info_1 ();
2351   rtx x;
2352   rtx last_head = get_last_insn ();
2353   basic_block *bb_info;
2354   int num_bb_notes;
2355   const rtx rtx_first = get_insns ();
2356   basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2357   const int max_uid = get_max_uid ();
2358 
2359   bb_info = XCNEWVEC (basic_block, max_uid);
2360 
2361   FOR_EACH_BB_REVERSE (bb)
2362     {
2363       edge e;
2364       rtx head = BB_HEAD (bb);
2365       rtx end = BB_END (bb);
2366 
2367       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2368 	{
2369 	  /* Verify the end of the basic block is in the INSN chain.  */
2370 	  if (x == end)
2371 	    break;
2372 
2373 	  /* And that the code outside of basic blocks has NULL bb field.  */
2374 	if (!BARRIER_P (x)
2375 	    && BLOCK_FOR_INSN (x) != NULL)
2376 	  {
2377 	    error ("insn %d outside of basic blocks has non-NULL bb field",
2378 		   INSN_UID (x));
2379 	    err = 1;
2380 	  }
2381 	}
2382 
2383       if (!x)
2384 	{
2385 	  error ("end insn %d for block %d not found in the insn stream",
2386 		 INSN_UID (end), bb->index);
2387 	  err = 1;
2388 	}
2389 
2390       /* Work backwards from the end to the head of the basic block
2391 	 to verify the head is in the RTL chain.  */
2392       for (; x != NULL_RTX; x = PREV_INSN (x))
2393 	{
2394 	  /* While walking over the insn chain, verify insns appear
2395 	     in only one basic block.  */
2396 	  if (bb_info[INSN_UID (x)] != NULL)
2397 	    {
2398 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2399 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2400 	      err = 1;
2401 	    }
2402 
2403 	  bb_info[INSN_UID (x)] = bb;
2404 
2405 	  if (x == head)
2406 	    break;
2407 	}
2408       if (!x)
2409 	{
2410 	  error ("head insn %d for block %d not found in the insn stream",
2411 		 INSN_UID (head), bb->index);
2412 	  err = 1;
2413 	}
2414 
2415       last_head = PREV_INSN (x);
2416 
2417       e = find_fallthru_edge (bb->succs);
2418       if (!e)
2419 	{
2420 	  rtx insn;
2421 
2422 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2423 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2424 	    {
2425 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2426 		{
2427 		  error ("missing barrier after block %i", bb->index);
2428 		  err = 1;
2429 		  break;
2430 		}
2431 	      if (BARRIER_P (insn))
2432 		break;
2433 	    }
2434 	}
2435       else if (e->src != ENTRY_BLOCK_PTR
2436 	       && e->dest != EXIT_BLOCK_PTR)
2437 	{
2438 	  rtx insn;
2439 
2440 	  if (e->src->next_bb != e->dest)
2441 	    {
2442 	      error
2443 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2444 		 e->src->index, e->dest->index);
2445 	      err = 1;
2446 	    }
2447 	  else
2448 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2449 		 insn = NEXT_INSN (insn))
2450 	      if (BARRIER_P (insn) || INSN_P (insn))
2451 		{
2452 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2453 			 e->src->index, e->dest->index);
2454 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2455 		  err = 1;
2456 		}
2457 	}
2458     }
2459 
2460   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2461     {
2462       /* Check that the code before the first basic block has NULL
2463 	 bb field.  */
2464       if (!BARRIER_P (x)
2465 	  && BLOCK_FOR_INSN (x) != NULL)
2466 	{
2467 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2468 		 INSN_UID (x));
2469 	  err = 1;
2470 	}
2471     }
2472   free (bb_info);
2473 
2474   num_bb_notes = 0;
2475   last_bb_seen = ENTRY_BLOCK_PTR;
2476 
2477   for (x = rtx_first; x; x = NEXT_INSN (x))
2478     {
2479       if (NOTE_INSN_BASIC_BLOCK_P (x))
2480 	{
2481 	  bb = NOTE_BASIC_BLOCK (x);
2482 
2483 	  num_bb_notes++;
2484 	  if (bb != last_bb_seen->next_bb)
2485 	    internal_error ("basic blocks not laid down consecutively");
2486 
2487 	  curr_bb = last_bb_seen = bb;
2488 	}
2489 
2490       if (!curr_bb)
2491 	{
2492 	  switch (GET_CODE (x))
2493 	    {
2494 	    case BARRIER:
2495 	    case NOTE:
2496 	      break;
2497 
2498 	    case CODE_LABEL:
2499 	      /* An addr_vec is placed outside any basic block.  */
2500 	      if (NEXT_INSN (x)
2501 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2502 		x = NEXT_INSN (x);
2503 
2504 	      /* But in any case, non-deletable labels can appear anywhere.  */
2505 	      break;
2506 
2507 	    default:
2508 	      fatal_insn ("insn outside basic block", x);
2509 	    }
2510 	}
2511 
2512       if (JUMP_P (x)
2513 	  && returnjump_p (x) && ! condjump_p (x)
2514 	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2515 	    fatal_insn ("return not followed by barrier", x);
2516       if (curr_bb && x == BB_END (curr_bb))
2517 	curr_bb = NULL;
2518     }
2519 
2520   if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2521     internal_error
2522       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2523        num_bb_notes, n_basic_blocks);
2524 
2525    return err;
2526 }
2527 
2528 /* Assume that the preceding pass has possibly eliminated jump instructions
2529    or converted the unconditional jumps.  Eliminate the edges from CFG.
2530    Return true if any edges are eliminated.  */
2531 
2532 bool
purge_dead_edges(basic_block bb)2533 purge_dead_edges (basic_block bb)
2534 {
2535   edge e;
2536   rtx insn = BB_END (bb), note;
2537   bool purged = false;
2538   bool found;
2539   edge_iterator ei;
2540 
2541   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2542     do
2543       insn = PREV_INSN (insn);
2544     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2545 
2546   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2547   if (NONJUMP_INSN_P (insn)
2548       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2549     {
2550       rtx eqnote;
2551 
2552       if (! may_trap_p (PATTERN (insn))
2553 	  || ((eqnote = find_reg_equal_equiv_note (insn))
2554 	      && ! may_trap_p (XEXP (eqnote, 0))))
2555 	remove_note (insn, note);
2556     }
2557 
2558   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
2559   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2560     {
2561       bool remove = false;
2562 
2563       /* There are three types of edges we need to handle correctly here: EH
2564 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
2565 	 latter can appear when nonlocal gotos are used.  */
2566       if (e->flags & EDGE_ABNORMAL_CALL)
2567 	{
2568 	  if (!CALL_P (insn))
2569 	    remove = true;
2570 	  else if (can_nonlocal_goto (insn))
2571 	    ;
2572 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2573 	    ;
2574 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2575 	    ;
2576 	  else
2577 	    remove = true;
2578 	}
2579       else if (e->flags & EDGE_EH)
2580 	remove = !can_throw_internal (insn);
2581 
2582       if (remove)
2583 	{
2584 	  remove_edge (e);
2585 	  df_set_bb_dirty (bb);
2586 	  purged = true;
2587 	}
2588       else
2589 	ei_next (&ei);
2590     }
2591 
2592   if (JUMP_P (insn))
2593     {
2594       rtx note;
2595       edge b,f;
2596       edge_iterator ei;
2597 
2598       /* We do care only about conditional jumps and simplejumps.  */
2599       if (!any_condjump_p (insn)
2600 	  && !returnjump_p (insn)
2601 	  && !simplejump_p (insn))
2602 	return purged;
2603 
2604       /* Branch probability/prediction notes are defined only for
2605 	 condjumps.  We've possibly turned condjump into simplejump.  */
2606       if (simplejump_p (insn))
2607 	{
2608 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2609 	  if (note)
2610 	    remove_note (insn, note);
2611 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2612 	    remove_note (insn, note);
2613 	}
2614 
2615       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2616 	{
2617 	  /* Avoid abnormal flags to leak from computed jumps turned
2618 	     into simplejumps.  */
2619 
2620 	  e->flags &= ~EDGE_ABNORMAL;
2621 
2622 	  /* See if this edge is one we should keep.  */
2623 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2624 	    /* A conditional jump can fall through into the next
2625 	       block, so we should keep the edge.  */
2626 	    {
2627 	      ei_next (&ei);
2628 	      continue;
2629 	    }
2630 	  else if (e->dest != EXIT_BLOCK_PTR
2631 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2632 	    /* If the destination block is the target of the jump,
2633 	       keep the edge.  */
2634 	    {
2635 	      ei_next (&ei);
2636 	      continue;
2637 	    }
2638 	  else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2639 	    /* If the destination block is the exit block, and this
2640 	       instruction is a return, then keep the edge.  */
2641 	    {
2642 	      ei_next (&ei);
2643 	      continue;
2644 	    }
2645 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2646 	    /* Keep the edges that correspond to exceptions thrown by
2647 	       this instruction and rematerialize the EDGE_ABNORMAL
2648 	       flag we just cleared above.  */
2649 	    {
2650 	      e->flags |= EDGE_ABNORMAL;
2651 	      ei_next (&ei);
2652 	      continue;
2653 	    }
2654 
2655 	  /* We do not need this edge.  */
2656 	  df_set_bb_dirty (bb);
2657 	  purged = true;
2658 	  remove_edge (e);
2659 	}
2660 
2661       if (EDGE_COUNT (bb->succs) == 0 || !purged)
2662 	return purged;
2663 
2664       if (dump_file)
2665 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2666 
2667       if (!optimize)
2668 	return purged;
2669 
2670       /* Redistribute probabilities.  */
2671       if (single_succ_p (bb))
2672 	{
2673 	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2674 	  single_succ_edge (bb)->count = bb->count;
2675 	}
2676       else
2677 	{
2678 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2679 	  if (!note)
2680 	    return purged;
2681 
2682 	  b = BRANCH_EDGE (bb);
2683 	  f = FALLTHRU_EDGE (bb);
2684 	  b->probability = INTVAL (XEXP (note, 0));
2685 	  f->probability = REG_BR_PROB_BASE - b->probability;
2686 	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2687 	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2688 	}
2689 
2690       return purged;
2691     }
2692   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2693     {
2694       /* First, there should not be any EH or ABCALL edges resulting
2695 	 from non-local gotos and the like.  If there were, we shouldn't
2696 	 have created the sibcall in the first place.  Second, there
2697 	 should of course never have been a fallthru edge.  */
2698       gcc_assert (single_succ_p (bb));
2699       gcc_assert (single_succ_edge (bb)->flags
2700 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
2701 
2702       return 0;
2703     }
2704 
2705   /* If we don't see a jump insn, we don't know exactly why the block would
2706      have been broken at this point.  Look for a simple, non-fallthru edge,
2707      as these are only created by conditional branches.  If we find such an
2708      edge we know that there used to be a jump here and can then safely
2709      remove all non-fallthru edges.  */
2710   found = false;
2711   FOR_EACH_EDGE (e, ei, bb->succs)
2712     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2713       {
2714 	found = true;
2715 	break;
2716       }
2717 
2718   if (!found)
2719     return purged;
2720 
2721   /* Remove all but the fake and fallthru edges.  The fake edge may be
2722      the only successor for this block in the case of noreturn
2723      calls.  */
2724   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2725     {
2726       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2727 	{
2728 	  df_set_bb_dirty (bb);
2729 	  remove_edge (e);
2730 	  purged = true;
2731 	}
2732       else
2733 	ei_next (&ei);
2734     }
2735 
2736   gcc_assert (single_succ_p (bb));
2737 
2738   single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2739   single_succ_edge (bb)->count = bb->count;
2740 
2741   if (dump_file)
2742     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2743 	     bb->index);
2744   return purged;
2745 }
2746 
2747 /* Search all basic blocks for potentially dead edges and purge them.  Return
2748    true if some edge has been eliminated.  */
2749 
2750 bool
purge_all_dead_edges(void)2751 purge_all_dead_edges (void)
2752 {
2753   int purged = false;
2754   basic_block bb;
2755 
2756   FOR_EACH_BB (bb)
2757     {
2758       bool purged_here = purge_dead_edges (bb);
2759 
2760       purged |= purged_here;
2761     }
2762 
2763   return purged;
2764 }
2765 
2766 /* This is used by a few passes that emit some instructions after abnormal
2767    calls, moving the basic block's end, while they in fact do want to emit
2768    them on the fallthru edge.  Look for abnormal call edges, find backward
2769    the call in the block and insert the instructions on the edge instead.
2770 
2771    Similarly, handle instructions throwing exceptions internally.
2772 
2773    Return true when instructions have been found and inserted on edges.  */
2774 
2775 bool
fixup_abnormal_edges(void)2776 fixup_abnormal_edges (void)
2777 {
2778   bool inserted = false;
2779   basic_block bb;
2780 
2781   FOR_EACH_BB (bb)
2782     {
2783       edge e;
2784       edge_iterator ei;
2785 
2786       /* Look for cases we are interested in - calls or instructions causing
2787          exceptions.  */
2788       FOR_EACH_EDGE (e, ei, bb->succs)
2789 	if ((e->flags & EDGE_ABNORMAL_CALL)
2790 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2791 		== (EDGE_ABNORMAL | EDGE_EH)))
2792 	  break;
2793 
2794       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2795 	{
2796 	  rtx insn;
2797 
2798 	  /* Get past the new insns generated.  Allow notes, as the insns
2799 	     may be already deleted.  */
2800 	  insn = BB_END (bb);
2801 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2802 		 && !can_throw_internal (insn)
2803 		 && insn != BB_HEAD (bb))
2804 	    insn = PREV_INSN (insn);
2805 
2806 	  if (CALL_P (insn) || can_throw_internal (insn))
2807 	    {
2808 	      rtx stop, next;
2809 
2810 	      e = find_fallthru_edge (bb->succs);
2811 
2812 	      stop = NEXT_INSN (BB_END (bb));
2813 	      BB_END (bb) = insn;
2814 
2815 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2816 		{
2817 		  next = NEXT_INSN (insn);
2818 		  if (INSN_P (insn))
2819 		    {
2820 		      delete_insn (insn);
2821 
2822 		      /* Sometimes there's still the return value USE.
2823 			 If it's placed after a trapping call (i.e. that
2824 			 call is the last insn anyway), we have no fallthru
2825 			 edge.  Simply delete this use and don't try to insert
2826 			 on the non-existent edge.  */
2827 		      if (GET_CODE (PATTERN (insn)) != USE)
2828 			{
2829 			  /* We're not deleting it, we're moving it.  */
2830 			  INSN_DELETED_P (insn) = 0;
2831 			  PREV_INSN (insn) = NULL_RTX;
2832 			  NEXT_INSN (insn) = NULL_RTX;
2833 
2834 			  insert_insn_on_edge (insn, e);
2835 			  inserted = true;
2836 			}
2837 		    }
2838 		  else if (!BARRIER_P (insn))
2839 		    set_block_for_insn (insn, NULL);
2840 		}
2841 	    }
2842 
2843 	  /* It may be that we don't find any trapping insn.  In this
2844 	     case we discovered quite late that the insn that had been
2845 	     marked as can_throw_internal in fact couldn't trap at all.
2846 	     So we should in fact delete the EH edges out of the block.  */
2847 	  else
2848 	    purge_dead_edges (bb);
2849 	}
2850     }
2851 
2852   return inserted;
2853 }
2854 
2855 /* Cut the insns from FIRST to LAST out of the insns stream.  */
2856 
2857 rtx
unlink_insn_chain(rtx first,rtx last)2858 unlink_insn_chain (rtx first, rtx last)
2859 {
2860   rtx prevfirst = PREV_INSN (first);
2861   rtx nextlast = NEXT_INSN (last);
2862 
2863   PREV_INSN (first) = NULL;
2864   NEXT_INSN (last) = NULL;
2865   if (prevfirst)
2866     NEXT_INSN (prevfirst) = nextlast;
2867   if (nextlast)
2868     PREV_INSN (nextlast) = prevfirst;
2869   else
2870     set_last_insn (prevfirst);
2871   if (!prevfirst)
2872     set_first_insn (nextlast);
2873   return first;
2874 }
2875 
2876 /* Skip over inter-block insns occurring after BB which are typically
2877    associated with BB (e.g., barriers). If there are any such insns,
2878    we return the last one. Otherwise, we return the end of BB.  */
2879 
2880 static rtx
skip_insns_after_block(basic_block bb)2881 skip_insns_after_block (basic_block bb)
2882 {
2883   rtx insn, last_insn, next_head, prev;
2884 
2885   next_head = NULL_RTX;
2886   if (bb->next_bb != EXIT_BLOCK_PTR)
2887     next_head = BB_HEAD (bb->next_bb);
2888 
2889   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2890     {
2891       if (insn == next_head)
2892 	break;
2893 
2894       switch (GET_CODE (insn))
2895 	{
2896 	case BARRIER:
2897 	  last_insn = insn;
2898 	  continue;
2899 
2900 	case NOTE:
2901 	  switch (NOTE_KIND (insn))
2902 	    {
2903 	    case NOTE_INSN_BLOCK_END:
2904 	      gcc_unreachable ();
2905 	      continue;
2906 	    default:
2907 	      continue;
2908 	      break;
2909 	    }
2910 	  break;
2911 
2912 	case CODE_LABEL:
2913 	  if (NEXT_INSN (insn)
2914 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2915 	    {
2916 	      insn = NEXT_INSN (insn);
2917 	      last_insn = insn;
2918 	      continue;
2919 	    }
2920 	  break;
2921 
2922 	default:
2923 	  break;
2924 	}
2925 
2926       break;
2927     }
2928 
2929   /* It is possible to hit contradictory sequence.  For instance:
2930 
2931      jump_insn
2932      NOTE_INSN_BLOCK_BEG
2933      barrier
2934 
2935      Where barrier belongs to jump_insn, but the note does not.  This can be
2936      created by removing the basic block originally following
2937      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
2938 
2939   for (insn = last_insn; insn != BB_END (bb); insn = prev)
2940     {
2941       prev = PREV_INSN (insn);
2942       if (NOTE_P (insn))
2943 	switch (NOTE_KIND (insn))
2944 	  {
2945 	  case NOTE_INSN_BLOCK_END:
2946 	    gcc_unreachable ();
2947 	    break;
2948 	  case NOTE_INSN_DELETED:
2949 	  case NOTE_INSN_DELETED_LABEL:
2950 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
2951 	    continue;
2952 	  default:
2953 	    reorder_insns (insn, insn, last_insn);
2954 	  }
2955     }
2956 
2957   return last_insn;
2958 }
2959 
2960 /* Locate or create a label for a given basic block.  */
2961 
2962 static rtx
label_for_bb(basic_block bb)2963 label_for_bb (basic_block bb)
2964 {
2965   rtx label = BB_HEAD (bb);
2966 
2967   if (!LABEL_P (label))
2968     {
2969       if (dump_file)
2970 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2971 
2972       label = block_label (bb);
2973     }
2974 
2975   return label;
2976 }
2977 
2978 /* Locate the effective beginning and end of the insn chain for each
2979    block, as defined by skip_insns_after_block above.  */
2980 
2981 static void
record_effective_endpoints(void)2982 record_effective_endpoints (void)
2983 {
2984   rtx next_insn;
2985   basic_block bb;
2986   rtx insn;
2987 
2988   for (insn = get_insns ();
2989        insn
2990        && NOTE_P (insn)
2991        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2992        insn = NEXT_INSN (insn))
2993     continue;
2994   /* No basic blocks at all?  */
2995   gcc_assert (insn);
2996 
2997   if (PREV_INSN (insn))
2998     cfg_layout_function_header =
2999 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3000   else
3001     cfg_layout_function_header = NULL_RTX;
3002 
3003   next_insn = get_insns ();
3004   FOR_EACH_BB (bb)
3005     {
3006       rtx end;
3007 
3008       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3009 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3010 					      PREV_INSN (BB_HEAD (bb)));
3011       end = skip_insns_after_block (bb);
3012       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3013 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3014       next_insn = NEXT_INSN (BB_END (bb));
3015     }
3016 
3017   cfg_layout_function_footer = next_insn;
3018   if (cfg_layout_function_footer)
3019     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3020 }
3021 
3022 static unsigned int
into_cfg_layout_mode(void)3023 into_cfg_layout_mode (void)
3024 {
3025   cfg_layout_initialize (0);
3026   return 0;
3027 }
3028 
3029 static unsigned int
outof_cfg_layout_mode(void)3030 outof_cfg_layout_mode (void)
3031 {
3032   basic_block bb;
3033 
3034   FOR_EACH_BB (bb)
3035     if (bb->next_bb != EXIT_BLOCK_PTR)
3036       bb->aux = bb->next_bb;
3037 
3038   cfg_layout_finalize ();
3039 
3040   return 0;
3041 }
3042 
3043 struct rtl_opt_pass pass_into_cfg_layout_mode =
3044 {
3045  {
3046   RTL_PASS,
3047   "into_cfglayout",                     /* name */
3048   OPTGROUP_NONE,                        /* optinfo_flags */
3049   NULL,                                 /* gate */
3050   into_cfg_layout_mode,                 /* execute */
3051   NULL,                                 /* sub */
3052   NULL,                                 /* next */
3053   0,                                    /* static_pass_number */
3054   TV_CFG,                               /* tv_id */
3055   0,                                    /* properties_required */
3056   PROP_cfglayout,                       /* properties_provided */
3057   0,                                    /* properties_destroyed */
3058   0,                                    /* todo_flags_start */
3059   0                                     /* todo_flags_finish */
3060  }
3061 };
3062 
3063 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3064 {
3065  {
3066   RTL_PASS,
3067   "outof_cfglayout",                    /* name */
3068   OPTGROUP_NONE,                        /* optinfo_flags */
3069   NULL,                                 /* gate */
3070   outof_cfg_layout_mode,                /* execute */
3071   NULL,                                 /* sub */
3072   NULL,                                 /* next */
3073   0,                                    /* static_pass_number */
3074   TV_CFG,                               /* tv_id */
3075   0,                                    /* properties_required */
3076   0,                                    /* properties_provided */
3077   PROP_cfglayout,                       /* properties_destroyed */
3078   0,                                    /* todo_flags_start */
3079   0                                     /* todo_flags_finish */
3080  }
3081 };
3082 
3083 
3084 /* Link the basic blocks in the correct order, compacting the basic
3085    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3086    function also clears the basic block header and footer fields.
3087 
3088    This function is usually called after a pass (e.g. tracer) finishes
3089    some transformations while in cfglayout mode.  The required sequence
3090    of the basic blocks is in a linked list along the bb->aux field.
3091    This functions re-links the basic block prev_bb and next_bb pointers
3092    accordingly, and it compacts and renumbers the blocks.
3093 
3094    FIXME: This currently works only for RTL, but the only RTL-specific
3095    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3096    to GIMPLE a long time ago, but it doesn't relink the basic block
3097    chain.  It could do that (to give better initial RTL) if this function
3098    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
3099 
3100 void
relink_block_chain(bool stay_in_cfglayout_mode)3101 relink_block_chain (bool stay_in_cfglayout_mode)
3102 {
3103   basic_block bb, prev_bb;
3104   int index;
3105 
3106   /* Maybe dump the re-ordered sequence.  */
3107   if (dump_file)
3108     {
3109       fprintf (dump_file, "Reordered sequence:\n");
3110       for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3111 	   bb;
3112 	   bb = (basic_block) bb->aux, index++)
3113 	{
3114 	  fprintf (dump_file, " %i ", index);
3115 	  if (get_bb_original (bb))
3116 	    fprintf (dump_file, "duplicate of %i ",
3117 		     get_bb_original (bb)->index);
3118 	  else if (forwarder_block_p (bb)
3119 		   && !LABEL_P (BB_HEAD (bb)))
3120 	    fprintf (dump_file, "compensation ");
3121 	  else
3122 	    fprintf (dump_file, "bb %i ", bb->index);
3123 	  fprintf (dump_file, " [%i]\n", bb->frequency);
3124 	}
3125     }
3126 
3127   /* Now reorder the blocks.  */
3128   prev_bb = ENTRY_BLOCK_PTR;
3129   bb = ENTRY_BLOCK_PTR->next_bb;
3130   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3131     {
3132       bb->prev_bb = prev_bb;
3133       prev_bb->next_bb = bb;
3134     }
3135   prev_bb->next_bb = EXIT_BLOCK_PTR;
3136   EXIT_BLOCK_PTR->prev_bb = prev_bb;
3137 
3138   /* Then, clean up the aux fields.  */
3139   FOR_ALL_BB (bb)
3140     {
3141       bb->aux = NULL;
3142       if (!stay_in_cfglayout_mode)
3143 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3144     }
3145 
3146   /* Maybe reset the original copy tables, they are not valid anymore
3147      when we renumber the basic blocks in compact_blocks.  If we are
3148      are going out of cfglayout mode, don't re-allocate the tables.  */
3149   free_original_copy_tables ();
3150   if (stay_in_cfglayout_mode)
3151     initialize_original_copy_tables ();
3152 
3153   /* Finally, put basic_block_info in the new order.  */
3154   compact_blocks ();
3155 }
3156 
3157 
3158 /* Given a reorder chain, rearrange the code to match.  */
3159 
3160 static void
fixup_reorder_chain(void)3161 fixup_reorder_chain (void)
3162 {
3163   basic_block bb;
3164   rtx insn = NULL;
3165 
3166   if (cfg_layout_function_header)
3167     {
3168       set_first_insn (cfg_layout_function_header);
3169       insn = cfg_layout_function_header;
3170       while (NEXT_INSN (insn))
3171 	insn = NEXT_INSN (insn);
3172     }
3173 
3174   /* First do the bulk reordering -- rechain the blocks without regard to
3175      the needed changes to jumps and labels.  */
3176 
3177   for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3178     {
3179       if (BB_HEADER (bb))
3180 	{
3181 	  if (insn)
3182 	    NEXT_INSN (insn) = BB_HEADER (bb);
3183 	  else
3184 	    set_first_insn (BB_HEADER (bb));
3185 	  PREV_INSN (BB_HEADER (bb)) = insn;
3186 	  insn = BB_HEADER (bb);
3187 	  while (NEXT_INSN (insn))
3188 	    insn = NEXT_INSN (insn);
3189 	}
3190       if (insn)
3191 	NEXT_INSN (insn) = BB_HEAD (bb);
3192       else
3193 	set_first_insn (BB_HEAD (bb));
3194       PREV_INSN (BB_HEAD (bb)) = insn;
3195       insn = BB_END (bb);
3196       if (BB_FOOTER (bb))
3197 	{
3198 	  NEXT_INSN (insn) = BB_FOOTER (bb);
3199 	  PREV_INSN (BB_FOOTER (bb)) = insn;
3200 	  while (NEXT_INSN (insn))
3201 	    insn = NEXT_INSN (insn);
3202 	}
3203     }
3204 
3205   NEXT_INSN (insn) = cfg_layout_function_footer;
3206   if (cfg_layout_function_footer)
3207     PREV_INSN (cfg_layout_function_footer) = insn;
3208 
3209   while (NEXT_INSN (insn))
3210     insn = NEXT_INSN (insn);
3211 
3212   set_last_insn (insn);
3213 #ifdef ENABLE_CHECKING
3214   verify_insn_chain ();
3215 #endif
3216 
3217   /* Now add jumps and labels as needed to match the blocks new
3218      outgoing edges.  */
3219 
3220   for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3221     {
3222       edge e_fall, e_taken, e;
3223       rtx bb_end_insn;
3224       rtx ret_label = NULL_RTX;
3225       basic_block nb, src_bb;
3226       edge_iterator ei;
3227 
3228       if (EDGE_COUNT (bb->succs) == 0)
3229 	continue;
3230 
3231       /* Find the old fallthru edge, and another non-EH edge for
3232 	 a taken jump.  */
3233       e_taken = e_fall = NULL;
3234 
3235       FOR_EACH_EDGE (e, ei, bb->succs)
3236 	if (e->flags & EDGE_FALLTHRU)
3237 	  e_fall = e;
3238 	else if (! (e->flags & EDGE_EH))
3239 	  e_taken = e;
3240 
3241       bb_end_insn = BB_END (bb);
3242       if (JUMP_P (bb_end_insn))
3243 	{
3244 	  ret_label = JUMP_LABEL (bb_end_insn);
3245 	  if (any_condjump_p (bb_end_insn))
3246 	    {
3247 	      /* This might happen if the conditional jump has side
3248 		 effects and could therefore not be optimized away.
3249 		 Make the basic block to end with a barrier in order
3250 		 to prevent rtl_verify_flow_info from complaining.  */
3251 	      if (!e_fall)
3252 		{
3253 		  gcc_assert (!onlyjump_p (bb_end_insn)
3254 			      || returnjump_p (bb_end_insn));
3255 		  BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3256 		  continue;
3257 		}
3258 
3259 	      /* If the old fallthru is still next, nothing to do.  */
3260 	      if (bb->aux == e_fall->dest
3261 		  || e_fall->dest == EXIT_BLOCK_PTR)
3262 		continue;
3263 
3264 	      /* The degenerated case of conditional jump jumping to the next
3265 		 instruction can happen for jumps with side effects.  We need
3266 		 to construct a forwarder block and this will be done just
3267 		 fine by force_nonfallthru below.  */
3268 	      if (!e_taken)
3269 		;
3270 
3271 	      /* There is another special case: if *neither* block is next,
3272 		 such as happens at the very end of a function, then we'll
3273 		 need to add a new unconditional jump.  Choose the taken
3274 		 edge based on known or assumed probability.  */
3275 	      else if (bb->aux != e_taken->dest)
3276 		{
3277 		  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3278 
3279 		  if (note
3280 		      && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3281 		      && invert_jump (bb_end_insn,
3282 				      (e_fall->dest == EXIT_BLOCK_PTR
3283 				       ? NULL_RTX
3284 				       : label_for_bb (e_fall->dest)), 0))
3285 		    {
3286 		      e_fall->flags &= ~EDGE_FALLTHRU;
3287 		      gcc_checking_assert (could_fall_through
3288 					   (e_taken->src, e_taken->dest));
3289 		      e_taken->flags |= EDGE_FALLTHRU;
3290 		      update_br_prob_note (bb);
3291 		      e = e_fall, e_fall = e_taken, e_taken = e;
3292 		    }
3293 		}
3294 
3295 	      /* If the "jumping" edge is a crossing edge, and the fall
3296 		 through edge is non-crossing, leave things as they are.  */
3297 	      else if ((e_taken->flags & EDGE_CROSSING)
3298 		       && !(e_fall->flags & EDGE_CROSSING))
3299 		continue;
3300 
3301 	      /* Otherwise we can try to invert the jump.  This will
3302 		 basically never fail, however, keep up the pretense.  */
3303 	      else if (invert_jump (bb_end_insn,
3304 				    (e_fall->dest == EXIT_BLOCK_PTR
3305 				     ? NULL_RTX
3306 				     : label_for_bb (e_fall->dest)), 0))
3307 		{
3308 		  e_fall->flags &= ~EDGE_FALLTHRU;
3309 		  gcc_checking_assert (could_fall_through
3310 				       (e_taken->src, e_taken->dest));
3311 		  e_taken->flags |= EDGE_FALLTHRU;
3312 		  update_br_prob_note (bb);
3313 		  if (LABEL_NUSES (ret_label) == 0
3314 		      && single_pred_p (e_taken->dest))
3315 		    delete_insn (ret_label);
3316 		  continue;
3317 		}
3318 	    }
3319 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3320 	    {
3321 	      /* If the old fallthru is still next or if
3322 		 asm goto doesn't have a fallthru (e.g. when followed by
3323 		 __builtin_unreachable ()), nothing to do.  */
3324 	      if (! e_fall
3325 		  || bb->aux == e_fall->dest
3326 		  || e_fall->dest == EXIT_BLOCK_PTR)
3327 		continue;
3328 
3329 	      /* Otherwise we'll have to use the fallthru fixup below.  */
3330 	    }
3331 	  else
3332 	    {
3333 	      /* Otherwise we have some return, switch or computed
3334 		 jump.  In the 99% case, there should not have been a
3335 		 fallthru edge.  */
3336 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3337 	      continue;
3338 	    }
3339 	}
3340       else
3341 	{
3342 	  /* No fallthru implies a noreturn function with EH edges, or
3343 	     something similarly bizarre.  In any case, we don't need to
3344 	     do anything.  */
3345 	  if (! e_fall)
3346 	    continue;
3347 
3348 	  /* If the fallthru block is still next, nothing to do.  */
3349 	  if (bb->aux == e_fall->dest)
3350 	    continue;
3351 
3352 	  /* A fallthru to exit block.  */
3353 	  if (e_fall->dest == EXIT_BLOCK_PTR)
3354 	    continue;
3355 	}
3356 
3357       /* We got here if we need to add a new jump insn.
3358 	 Note force_nonfallthru can delete E_FALL and thus we have to
3359 	 save E_FALL->src prior to the call to force_nonfallthru.  */
3360       src_bb = e_fall->src;
3361       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3362       if (nb)
3363 	{
3364 	  nb->aux = bb->aux;
3365 	  bb->aux = nb;
3366 	  /* Don't process this new block.  */
3367 	  bb = nb;
3368 
3369 	  /* Make sure new bb is tagged for correct section (same as
3370 	     fall-thru source, since you cannot fall-thru across
3371 	     section boundaries).  */
3372 	  BB_COPY_PARTITION (src_bb, single_pred (bb));
3373 	  if (flag_reorder_blocks_and_partition
3374 	      && targetm_common.have_named_sections
3375 	      && JUMP_P (BB_END (bb))
3376 	      && !any_condjump_p (BB_END (bb))
3377 	      && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3378 	    add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3379 	}
3380     }
3381 
3382   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3383 
3384   /* Annoying special case - jump around dead jumptables left in the code.  */
3385   FOR_EACH_BB (bb)
3386     {
3387       edge e = find_fallthru_edge (bb->succs);
3388 
3389       if (e && !can_fallthru (e->src, e->dest))
3390 	force_nonfallthru (e);
3391     }
3392 
3393   /* Ensure goto_locus from edges has some instructions with that locus
3394      in RTL.  */
3395   if (!optimize)
3396     FOR_EACH_BB (bb)
3397       {
3398         edge e;
3399         edge_iterator ei;
3400 
3401         FOR_EACH_EDGE (e, ei, bb->succs)
3402 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3403 	      && !(e->flags & EDGE_ABNORMAL))
3404 	    {
3405 	      edge e2;
3406 	      edge_iterator ei2;
3407 	      basic_block dest, nb;
3408 	      rtx end;
3409 
3410 	      insn = BB_END (e->src);
3411 	      end = PREV_INSN (BB_HEAD (e->src));
3412 	      while (insn != end
3413 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3414 		insn = PREV_INSN (insn);
3415 	      if (insn != end
3416 		  && INSN_LOCATION (insn) == e->goto_locus)
3417 		continue;
3418 	      if (simplejump_p (BB_END (e->src))
3419 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
3420 		{
3421 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3422 		  continue;
3423 		}
3424 	      dest = e->dest;
3425 	      if (dest == EXIT_BLOCK_PTR)
3426 		{
3427 		  /* Non-fallthru edges to the exit block cannot be split.  */
3428 		  if (!(e->flags & EDGE_FALLTHRU))
3429 		    continue;
3430 		}
3431 	      else
3432 		{
3433 		  insn = BB_HEAD (dest);
3434 		  end = NEXT_INSN (BB_END (dest));
3435 		  while (insn != end && !NONDEBUG_INSN_P (insn))
3436 		    insn = NEXT_INSN (insn);
3437 		  if (insn != end && INSN_HAS_LOCATION (insn)
3438 		      && INSN_LOCATION (insn) == e->goto_locus)
3439 		    continue;
3440 		}
3441 	      nb = split_edge (e);
3442 	      if (!INSN_P (BB_END (nb)))
3443 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3444 						     nb);
3445 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3446 
3447 	      /* If there are other incoming edges to the destination block
3448 		 with the same goto locus, redirect them to the new block as
3449 		 well, this can prevent other such blocks from being created
3450 		 in subsequent iterations of the loop.  */
3451 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3452 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3453 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3454 		    && e->goto_locus == e2->goto_locus)
3455 		  redirect_edge_and_branch (e2, nb);
3456 		else
3457 		  ei_next (&ei2);
3458 	    }
3459       }
3460 }
3461 
3462 /* Perform sanity checks on the insn chain.
3463    1. Check that next/prev pointers are consistent in both the forward and
3464       reverse direction.
3465    2. Count insns in chain, going both directions, and check if equal.
3466    3. Check that get_last_insn () returns the actual end of chain.  */
3467 
3468 DEBUG_FUNCTION void
verify_insn_chain(void)3469 verify_insn_chain (void)
3470 {
3471   rtx x, prevx, nextx;
3472   int insn_cnt1, insn_cnt2;
3473 
3474   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3475        x != 0;
3476        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3477     gcc_assert (PREV_INSN (x) == prevx);
3478 
3479   gcc_assert (prevx == get_last_insn ());
3480 
3481   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3482        x != 0;
3483        nextx = x, insn_cnt2++, x = PREV_INSN (x))
3484     gcc_assert (NEXT_INSN (x) == nextx);
3485 
3486   gcc_assert (insn_cnt1 == insn_cnt2);
3487 }
3488 
3489 /* If we have assembler epilogues, the block falling through to exit must
3490    be the last one in the reordered chain when we reach final.  Ensure
3491    that this condition is met.  */
3492 static void
fixup_fallthru_exit_predecessor(void)3493 fixup_fallthru_exit_predecessor (void)
3494 {
3495   edge e;
3496   basic_block bb = NULL;
3497 
3498   /* This transformation is not valid before reload, because we might
3499      separate a call from the instruction that copies the return
3500      value.  */
3501   gcc_assert (reload_completed);
3502 
3503   e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3504   if (e)
3505     bb = e->src;
3506 
3507   if (bb && bb->aux)
3508     {
3509       basic_block c = ENTRY_BLOCK_PTR->next_bb;
3510 
3511       /* If the very first block is the one with the fall-through exit
3512 	 edge, we have to split that block.  */
3513       if (c == bb)
3514 	{
3515 	  bb = split_block (bb, NULL)->dest;
3516 	  bb->aux = c->aux;
3517 	  c->aux = bb;
3518 	  BB_FOOTER (bb) = BB_FOOTER (c);
3519 	  BB_FOOTER (c) = NULL;
3520 	}
3521 
3522       while (c->aux != bb)
3523 	c = (basic_block) c->aux;
3524 
3525       c->aux = bb->aux;
3526       while (c->aux)
3527 	c = (basic_block) c->aux;
3528 
3529       c->aux = bb;
3530       bb->aux = NULL;
3531     }
3532 }
3533 
3534 /* In case there are more than one fallthru predecessors of exit, force that
3535    there is only one.  */
3536 
3537 static void
force_one_exit_fallthru(void)3538 force_one_exit_fallthru (void)
3539 {
3540   edge e, predecessor = NULL;
3541   bool more = false;
3542   edge_iterator ei;
3543   basic_block forwarder, bb;
3544 
3545   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3546     if (e->flags & EDGE_FALLTHRU)
3547       {
3548 	if (predecessor == NULL)
3549 	  predecessor = e;
3550 	else
3551 	  {
3552 	    more = true;
3553 	    break;
3554 	  }
3555       }
3556 
3557   if (!more)
3558     return;
3559 
3560   /* Exit has several fallthru predecessors.  Create a forwarder block for
3561      them.  */
3562   forwarder = split_edge (predecessor);
3563   for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3564     {
3565       if (e->src == forwarder
3566 	  || !(e->flags & EDGE_FALLTHRU))
3567 	ei_next (&ei);
3568       else
3569 	redirect_edge_and_branch_force (e, forwarder);
3570     }
3571 
3572   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3573      exit block.  */
3574   FOR_EACH_BB (bb)
3575     {
3576       if (bb->aux == NULL && bb != forwarder)
3577 	{
3578 	  bb->aux = forwarder;
3579 	  break;
3580 	}
3581     }
3582 }
3583 
3584 /* Return true in case it is possible to duplicate the basic block BB.  */
3585 
3586 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)3587 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3588 {
3589   /* Do not attempt to duplicate tablejumps, as we need to unshare
3590      the dispatch table.  This is difficult to do, as the instructions
3591      computing jump destination may be hoisted outside the basic block.  */
3592   if (tablejump_p (BB_END (bb), NULL, NULL))
3593     return false;
3594 
3595   /* Do not duplicate blocks containing insns that can't be copied.  */
3596   if (targetm.cannot_copy_insn_p)
3597     {
3598       rtx insn = BB_HEAD (bb);
3599       while (1)
3600 	{
3601 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3602 	    return false;
3603 	  if (insn == BB_END (bb))
3604 	    break;
3605 	  insn = NEXT_INSN (insn);
3606 	}
3607     }
3608 
3609   return true;
3610 }
3611 
3612 rtx
duplicate_insn_chain(rtx from,rtx to)3613 duplicate_insn_chain (rtx from, rtx to)
3614 {
3615   rtx insn, last, copy;
3616 
3617   /* Avoid updating of boundaries of previous basic block.  The
3618      note will get removed from insn stream in fixup.  */
3619   last = emit_note (NOTE_INSN_DELETED);
3620 
3621   /* Create copy at the end of INSN chain.  The chain will
3622      be reordered later.  */
3623   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3624     {
3625       switch (GET_CODE (insn))
3626 	{
3627 	case DEBUG_INSN:
3628 	  /* Don't duplicate label debug insns.  */
3629 	  if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3630 	    break;
3631 	  /* FALLTHRU */
3632 	case INSN:
3633 	case CALL_INSN:
3634 	case JUMP_INSN:
3635 	  /* Avoid copying of dispatch tables.  We never duplicate
3636 	     tablejumps, so this can hit only in case the table got
3637 	     moved far from original jump.  */
3638 	  if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3639 	      || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3640 	    {
3641 	      /* Avoid copying following barrier as well if any
3642 		 (and debug insns in between).  */
3643 	      rtx next;
3644 
3645 	      for (next = NEXT_INSN (insn);
3646 		   next != NEXT_INSN (to);
3647 		   next = NEXT_INSN (next))
3648 		if (!DEBUG_INSN_P (next))
3649 		  break;
3650 	      if (next != NEXT_INSN (to) && BARRIER_P (next))
3651 		insn = next;
3652 	      break;
3653 	    }
3654 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
3655 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3656 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
3657 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
3658           maybe_copy_prologue_epilogue_insn (insn, copy);
3659 	  break;
3660 
3661 	case CODE_LABEL:
3662 	  break;
3663 
3664 	case BARRIER:
3665 	  emit_barrier ();
3666 	  break;
3667 
3668 	case NOTE:
3669 	  switch (NOTE_KIND (insn))
3670 	    {
3671 	      /* In case prologue is empty and function contain label
3672 		 in first BB, we may want to copy the block.  */
3673 	    case NOTE_INSN_PROLOGUE_END:
3674 
3675 	    case NOTE_INSN_DELETED:
3676 	    case NOTE_INSN_DELETED_LABEL:
3677 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
3678 	      /* No problem to strip these.  */
3679 	    case NOTE_INSN_FUNCTION_BEG:
3680 	      /* There is always just single entry to function.  */
3681 	    case NOTE_INSN_BASIC_BLOCK:
3682 	      break;
3683 
3684 	    case NOTE_INSN_EPILOGUE_BEG:
3685 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3686 	      emit_note_copy (insn);
3687 	      break;
3688 
3689 	    default:
3690 	      /* All other notes should have already been eliminated.  */
3691 	      gcc_unreachable ();
3692 	    }
3693 	  break;
3694 	default:
3695 	  gcc_unreachable ();
3696 	}
3697     }
3698   insn = NEXT_INSN (last);
3699   delete_insn (last);
3700   return insn;
3701 }
3702 
3703 /* Create a duplicate of the basic block BB.  */
3704 
3705 static basic_block
cfg_layout_duplicate_bb(basic_block bb)3706 cfg_layout_duplicate_bb (basic_block bb)
3707 {
3708   rtx insn;
3709   basic_block new_bb;
3710 
3711   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3712   new_bb = create_basic_block (insn,
3713 			       insn ? get_last_insn () : NULL,
3714 			       EXIT_BLOCK_PTR->prev_bb);
3715 
3716   BB_COPY_PARTITION (new_bb, bb);
3717   if (BB_HEADER (bb))
3718     {
3719       insn = BB_HEADER (bb);
3720       while (NEXT_INSN (insn))
3721 	insn = NEXT_INSN (insn);
3722       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3723       if (insn)
3724 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3725     }
3726 
3727   if (BB_FOOTER (bb))
3728     {
3729       insn = BB_FOOTER (bb);
3730       while (NEXT_INSN (insn))
3731 	insn = NEXT_INSN (insn);
3732       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3733       if (insn)
3734 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3735     }
3736 
3737   return new_bb;
3738 }
3739 
3740 
3741 /* Main entry point to this module - initialize the datastructures for
3742    CFG layout changes.  It keeps LOOPS up-to-date if not null.
3743 
3744    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
3745 
3746 void
cfg_layout_initialize(unsigned int flags)3747 cfg_layout_initialize (unsigned int flags)
3748 {
3749   rtx x;
3750   basic_block bb;
3751 
3752   initialize_original_copy_tables ();
3753 
3754   cfg_layout_rtl_register_cfg_hooks ();
3755 
3756   record_effective_endpoints ();
3757 
3758   /* Make sure that the targets of non local gotos are marked.  */
3759   for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3760     {
3761       bb = BLOCK_FOR_INSN (XEXP (x, 0));
3762       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3763     }
3764 
3765   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3766 }
3767 
3768 /* Splits superblocks.  */
3769 void
break_superblocks(void)3770 break_superblocks (void)
3771 {
3772   sbitmap superblocks;
3773   bool need = false;
3774   basic_block bb;
3775 
3776   superblocks = sbitmap_alloc (last_basic_block);
3777   bitmap_clear (superblocks);
3778 
3779   FOR_EACH_BB (bb)
3780     if (bb->flags & BB_SUPERBLOCK)
3781       {
3782 	bb->flags &= ~BB_SUPERBLOCK;
3783 	bitmap_set_bit (superblocks, bb->index);
3784 	need = true;
3785       }
3786 
3787   if (need)
3788     {
3789       rebuild_jump_labels (get_insns ());
3790       find_many_sub_basic_blocks (superblocks);
3791     }
3792 
3793   free (superblocks);
3794 }
3795 
3796 /* Finalize the changes: reorder insn list according to the sequence specified
3797    by aux pointers, enter compensation code, rebuild scope forest.  */
3798 
3799 void
cfg_layout_finalize(void)3800 cfg_layout_finalize (void)
3801 {
3802 #ifdef ENABLE_CHECKING
3803   verify_flow_info ();
3804 #endif
3805   force_one_exit_fallthru ();
3806   rtl_register_cfg_hooks ();
3807   if (reload_completed
3808 #ifdef HAVE_epilogue
3809       && !HAVE_epilogue
3810 #endif
3811       )
3812     fixup_fallthru_exit_predecessor ();
3813   fixup_reorder_chain ();
3814 
3815   rebuild_jump_labels (get_insns ());
3816   delete_dead_jumptables ();
3817 
3818 #ifdef ENABLE_CHECKING
3819   verify_insn_chain ();
3820   verify_flow_info ();
3821 #endif
3822 }
3823 
3824 
3825 /* Same as split_block but update cfg_layout structures.  */
3826 
3827 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)3828 cfg_layout_split_block (basic_block bb, void *insnp)
3829 {
3830   rtx insn = (rtx) insnp;
3831   basic_block new_bb = rtl_split_block (bb, insn);
3832 
3833   BB_FOOTER (new_bb) = BB_FOOTER (bb);
3834   BB_FOOTER (bb) = NULL;
3835 
3836   return new_bb;
3837 }
3838 
3839 /* Redirect Edge to DEST.  */
3840 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)3841 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
3842 {
3843   basic_block src = e->src;
3844   edge ret;
3845 
3846   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3847     return NULL;
3848 
3849   if (e->dest == dest)
3850     return e;
3851 
3852   if (e->src != ENTRY_BLOCK_PTR
3853       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
3854     {
3855       df_set_bb_dirty (src);
3856       return ret;
3857     }
3858 
3859   if (e->src == ENTRY_BLOCK_PTR
3860       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3861     {
3862       if (dump_file)
3863 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
3864 		 e->src->index, dest->index);
3865 
3866       df_set_bb_dirty (e->src);
3867       redirect_edge_succ (e, dest);
3868       return e;
3869     }
3870 
3871   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3872      in the case the basic block appears to be in sequence.  Avoid this
3873      transformation.  */
3874 
3875   if (e->flags & EDGE_FALLTHRU)
3876     {
3877       /* Redirect any branch edges unified with the fallthru one.  */
3878       if (JUMP_P (BB_END (src))
3879 	  && label_is_jump_target_p (BB_HEAD (e->dest),
3880 				     BB_END (src)))
3881 	{
3882 	  edge redirected;
3883 
3884 	  if (dump_file)
3885 	    fprintf (dump_file, "Fallthru edge unified with branch "
3886 		     "%i->%i redirected to %i\n",
3887 		     e->src->index, e->dest->index, dest->index);
3888 	  e->flags &= ~EDGE_FALLTHRU;
3889 	  redirected = redirect_branch_edge (e, dest);
3890 	  gcc_assert (redirected);
3891 	  redirected->flags |= EDGE_FALLTHRU;
3892 	  df_set_bb_dirty (redirected->src);
3893 	  return redirected;
3894 	}
3895       /* In case we are redirecting fallthru edge to the branch edge
3896 	 of conditional jump, remove it.  */
3897       if (EDGE_COUNT (src->succs) == 2)
3898 	{
3899 	  /* Find the edge that is different from E.  */
3900 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
3901 
3902 	  if (s->dest == dest
3903 	      && any_condjump_p (BB_END (src))
3904 	      && onlyjump_p (BB_END (src)))
3905 	    delete_insn (BB_END (src));
3906 	}
3907       if (dump_file)
3908 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
3909 		 e->src->index, e->dest->index, dest->index);
3910       ret = redirect_edge_succ_nodup (e, dest);
3911     }
3912   else
3913     ret = redirect_branch_edge (e, dest);
3914 
3915   /* We don't want simplejumps in the insn stream during cfglayout.  */
3916   gcc_assert (!simplejump_p (BB_END (src)));
3917 
3918   df_set_bb_dirty (src);
3919   return ret;
3920 }
3921 
3922 /* Simple wrapper as we always can redirect fallthru edges.  */
3923 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)3924 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
3925 {
3926   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3927 
3928   gcc_assert (redirected);
3929   return NULL;
3930 }
3931 
3932 /* Same as delete_basic_block but update cfg_layout structures.  */
3933 
3934 static void
cfg_layout_delete_block(basic_block bb)3935 cfg_layout_delete_block (basic_block bb)
3936 {
3937   rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
3938 
3939   if (BB_HEADER (bb))
3940     {
3941       next = BB_HEAD (bb);
3942       if (prev)
3943 	NEXT_INSN (prev) = BB_HEADER (bb);
3944       else
3945 	set_first_insn (BB_HEADER (bb));
3946       PREV_INSN (BB_HEADER (bb)) = prev;
3947       insn = BB_HEADER (bb);
3948       while (NEXT_INSN (insn))
3949 	insn = NEXT_INSN (insn);
3950       NEXT_INSN (insn) = next;
3951       PREV_INSN (next) = insn;
3952     }
3953   next = NEXT_INSN (BB_END (bb));
3954   if (BB_FOOTER (bb))
3955     {
3956       insn = BB_FOOTER (bb);
3957       while (insn)
3958 	{
3959 	  if (BARRIER_P (insn))
3960 	    {
3961 	      if (PREV_INSN (insn))
3962 		NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3963 	      else
3964 		BB_FOOTER (bb) = NEXT_INSN (insn);
3965 	      if (NEXT_INSN (insn))
3966 		PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3967 	    }
3968 	  if (LABEL_P (insn))
3969 	    break;
3970 	  insn = NEXT_INSN (insn);
3971 	}
3972       if (BB_FOOTER (bb))
3973 	{
3974 	  insn = BB_END (bb);
3975 	  NEXT_INSN (insn) = BB_FOOTER (bb);
3976 	  PREV_INSN (BB_FOOTER (bb)) = insn;
3977 	  while (NEXT_INSN (insn))
3978 	    insn = NEXT_INSN (insn);
3979 	  NEXT_INSN (insn) = next;
3980 	  if (next)
3981 	    PREV_INSN (next) = insn;
3982 	  else
3983 	    set_last_insn (insn);
3984 	}
3985     }
3986   if (bb->next_bb != EXIT_BLOCK_PTR)
3987     to = &BB_HEADER (bb->next_bb);
3988   else
3989     to = &cfg_layout_function_footer;
3990 
3991   rtl_delete_block (bb);
3992 
3993   if (prev)
3994     prev = NEXT_INSN (prev);
3995   else
3996     prev = get_insns ();
3997   if (next)
3998     next = PREV_INSN (next);
3999   else
4000     next = get_last_insn ();
4001 
4002   if (next && NEXT_INSN (next) != prev)
4003     {
4004       remaints = unlink_insn_chain (prev, next);
4005       insn = remaints;
4006       while (NEXT_INSN (insn))
4007 	insn = NEXT_INSN (insn);
4008       NEXT_INSN (insn) = *to;
4009       if (*to)
4010 	PREV_INSN (*to) = insn;
4011       *to = remaints;
4012     }
4013 }
4014 
4015 /* Return true when blocks A and B can be safely merged.  */
4016 
4017 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4018 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4019 {
4020   /* If we are partitioning hot/cold basic blocks, we don't want to
4021      mess up unconditional or indirect jumps that cross between hot
4022      and cold sections.
4023 
4024      Basic block partitioning may result in some jumps that appear to
4025      be optimizable (or blocks that appear to be mergeable), but which really
4026      must be left untouched (they are required to make it safely across
4027      partition boundaries).  See  the comments at the top of
4028      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4029 
4030   if (BB_PARTITION (a) != BB_PARTITION (b))
4031     return false;
4032 
4033   /* Protect the loop latches.  */
4034   if (current_loops && b->loop_father->latch == b)
4035     return false;
4036 
4037   /* If we would end up moving B's instructions, make sure it doesn't fall
4038      through into the exit block, since we cannot recover from a fallthrough
4039      edge into the exit block occurring in the middle of a function.  */
4040   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4041     {
4042       edge e = find_fallthru_edge (b->succs);
4043       if (e && e->dest == EXIT_BLOCK_PTR)
4044 	return false;
4045     }
4046 
4047   /* There must be exactly one edge in between the blocks.  */
4048   return (single_succ_p (a)
4049 	  && single_succ (a) == b
4050 	  && single_pred_p (b) == 1
4051 	  && a != b
4052 	  /* Must be simple edge.  */
4053 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4054 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4055 	  /* If the jump insn has side effects, we can't kill the edge.
4056 	     When not optimizing, try_redirect_by_replacing_jump will
4057 	     not allow us to redirect an edge by replacing a table jump.  */
4058 	  && (!JUMP_P (BB_END (a))
4059 	      || ((!optimize || reload_completed)
4060 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4061 }
4062 
4063 /* Merge block A and B.  The blocks must be mergeable.  */
4064 
4065 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4066 cfg_layout_merge_blocks (basic_block a, basic_block b)
4067 {
4068   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4069   rtx insn;
4070 
4071   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4072 
4073   if (dump_file)
4074     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4075 			 a->index);
4076 
4077   /* If there was a CODE_LABEL beginning B, delete it.  */
4078   if (LABEL_P (BB_HEAD (b)))
4079     {
4080       delete_insn (BB_HEAD (b));
4081     }
4082 
4083   /* We should have fallthru edge in a, or we can do dummy redirection to get
4084      it cleaned up.  */
4085   if (JUMP_P (BB_END (a)))
4086     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4087   gcc_assert (!JUMP_P (BB_END (a)));
4088 
4089   /* When not optimizing CFG and the edge is the only place in RTL which holds
4090      some unique locus, emit a nop with that locus in between.  */
4091   if (!optimize)
4092     emit_nop_for_unique_locus_between (a, b);
4093 
4094   /* Possible line number notes should appear in between.  */
4095   if (BB_HEADER (b))
4096     {
4097       rtx first = BB_END (a), last;
4098 
4099       last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
4100       /* The above might add a BARRIER as BB_END, but as barriers
4101 	 aren't valid parts of a bb, remove_insn doesn't update
4102 	 BB_END if it is a barrier.  So adjust BB_END here.  */
4103       while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4104 	BB_END (a) = PREV_INSN (BB_END (a));
4105       delete_insn_chain (NEXT_INSN (first), last, false);
4106       BB_HEADER (b) = NULL;
4107     }
4108 
4109   /* In the case basic blocks are not adjacent, move them around.  */
4110   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4111     {
4112       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4113 
4114       emit_insn_after_noloc (insn, BB_END (a), a);
4115     }
4116   /* Otherwise just re-associate the instructions.  */
4117   else
4118     {
4119       insn = BB_HEAD (b);
4120       BB_END (a) = BB_END (b);
4121     }
4122 
4123   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4124      We need to explicitly call. */
4125   update_bb_for_insn_chain (insn, BB_END (b), a);
4126 
4127   /* Skip possible DELETED_LABEL insn.  */
4128   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4129     insn = NEXT_INSN (insn);
4130   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4131   BB_HEAD (b) = NULL;
4132   delete_insn (insn);
4133 
4134   df_bb_delete (b->index);
4135 
4136   /* Possible tablejumps and barriers should appear after the block.  */
4137   if (BB_FOOTER (b))
4138     {
4139       if (!BB_FOOTER (a))
4140 	BB_FOOTER (a) = BB_FOOTER (b);
4141       else
4142 	{
4143 	  rtx last = BB_FOOTER (a);
4144 
4145 	  while (NEXT_INSN (last))
4146 	    last = NEXT_INSN (last);
4147 	  NEXT_INSN (last) = BB_FOOTER (b);
4148 	  PREV_INSN (BB_FOOTER (b)) = last;
4149 	}
4150       BB_FOOTER (b) = NULL;
4151     }
4152 
4153   /* If B was a forwarder block, propagate the locus on the edge.  */
4154   if (forwarder_p
4155       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4156     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4157 
4158   if (dump_file)
4159     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4160 }
4161 
4162 /* Split edge E.  */
4163 
4164 static basic_block
cfg_layout_split_edge(edge e)4165 cfg_layout_split_edge (edge e)
4166 {
4167   basic_block new_bb =
4168     create_basic_block (e->src != ENTRY_BLOCK_PTR
4169 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4170 			NULL_RTX, e->src);
4171 
4172   if (e->dest == EXIT_BLOCK_PTR)
4173     BB_COPY_PARTITION (new_bb, e->src);
4174   else
4175     BB_COPY_PARTITION (new_bb, e->dest);
4176   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4177   redirect_edge_and_branch_force (e, new_bb);
4178 
4179   return new_bb;
4180 }
4181 
4182 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4183 
4184 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4185 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4186 {
4187 }
4188 
4189 /* Return true if BB contains only labels or non-executable
4190    instructions.  */
4191 
4192 static bool
rtl_block_empty_p(basic_block bb)4193 rtl_block_empty_p (basic_block bb)
4194 {
4195   rtx insn;
4196 
4197   if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4198     return true;
4199 
4200   FOR_BB_INSNS (bb, insn)
4201     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4202       return false;
4203 
4204   return true;
4205 }
4206 
4207 /* Split a basic block if it ends with a conditional branch and if
4208    the other part of the block is not empty.  */
4209 
4210 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4211 rtl_split_block_before_cond_jump (basic_block bb)
4212 {
4213   rtx insn;
4214   rtx split_point = NULL;
4215   rtx last = NULL;
4216   bool found_code = false;
4217 
4218   FOR_BB_INSNS (bb, insn)
4219     {
4220       if (any_condjump_p (insn))
4221 	split_point = last;
4222       else if (NONDEBUG_INSN_P (insn))
4223 	found_code = true;
4224       last = insn;
4225     }
4226 
4227   /* Did not find everything.  */
4228   if (found_code && split_point)
4229     return split_block (bb, split_point)->dest;
4230   else
4231     return NULL;
4232 }
4233 
4234 /* Return 1 if BB ends with a call, possibly followed by some
4235    instructions that must stay with the call, 0 otherwise.  */
4236 
4237 static bool
rtl_block_ends_with_call_p(basic_block bb)4238 rtl_block_ends_with_call_p (basic_block bb)
4239 {
4240   rtx insn = BB_END (bb);
4241 
4242   while (!CALL_P (insn)
4243 	 && insn != BB_HEAD (bb)
4244 	 && (keep_with_call_p (insn)
4245 	     || NOTE_P (insn)
4246 	     || DEBUG_INSN_P (insn)))
4247     insn = PREV_INSN (insn);
4248   return (CALL_P (insn));
4249 }
4250 
4251 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
4252 
4253 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)4254 rtl_block_ends_with_condjump_p (const_basic_block bb)
4255 {
4256   return any_condjump_p (BB_END (bb));
4257 }
4258 
4259 /* Return true if we need to add fake edge to exit.
4260    Helper function for rtl_flow_call_edges_add.  */
4261 
4262 static bool
need_fake_edge_p(const_rtx insn)4263 need_fake_edge_p (const_rtx insn)
4264 {
4265   if (!INSN_P (insn))
4266     return false;
4267 
4268   if ((CALL_P (insn)
4269        && !SIBLING_CALL_P (insn)
4270        && !find_reg_note (insn, REG_NORETURN, NULL)
4271        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4272     return true;
4273 
4274   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4275 	   && MEM_VOLATILE_P (PATTERN (insn)))
4276 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4277 	      && asm_noperands (insn) != -1
4278 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4279 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4280 }
4281 
4282 /* Add fake edges to the function exit for any non constant and non noreturn
4283    calls, volatile inline assembly in the bitmap of blocks specified by
4284    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
4285    that were split.
4286 
4287    The goal is to expose cases in which entering a basic block does not imply
4288    that all subsequent instructions must be executed.  */
4289 
4290 static int
rtl_flow_call_edges_add(sbitmap blocks)4291 rtl_flow_call_edges_add (sbitmap blocks)
4292 {
4293   int i;
4294   int blocks_split = 0;
4295   int last_bb = last_basic_block;
4296   bool check_last_block = false;
4297 
4298   if (n_basic_blocks == NUM_FIXED_BLOCKS)
4299     return 0;
4300 
4301   if (! blocks)
4302     check_last_block = true;
4303   else
4304     check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4305 
4306   /* In the last basic block, before epilogue generation, there will be
4307      a fallthru edge to EXIT.  Special care is required if the last insn
4308      of the last basic block is a call because make_edge folds duplicate
4309      edges, which would result in the fallthru edge also being marked
4310      fake, which would result in the fallthru edge being removed by
4311      remove_fake_edges, which would result in an invalid CFG.
4312 
4313      Moreover, we can't elide the outgoing fake edge, since the block
4314      profiler needs to take this into account in order to solve the minimal
4315      spanning tree in the case that the call doesn't return.
4316 
4317      Handle this by adding a dummy instruction in a new last basic block.  */
4318   if (check_last_block)
4319     {
4320       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4321       rtx insn = BB_END (bb);
4322 
4323       /* Back up past insns that must be kept in the same block as a call.  */
4324       while (insn != BB_HEAD (bb)
4325 	     && keep_with_call_p (insn))
4326 	insn = PREV_INSN (insn);
4327 
4328       if (need_fake_edge_p (insn))
4329 	{
4330 	  edge e;
4331 
4332 	  e = find_edge (bb, EXIT_BLOCK_PTR);
4333 	  if (e)
4334 	    {
4335 	      insert_insn_on_edge (gen_use (const0_rtx), e);
4336 	      commit_edge_insertions ();
4337 	    }
4338 	}
4339     }
4340 
4341   /* Now add fake edges to the function exit for any non constant
4342      calls since there is no way that we can determine if they will
4343      return or not...  */
4344 
4345   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4346     {
4347       basic_block bb = BASIC_BLOCK (i);
4348       rtx insn;
4349       rtx prev_insn;
4350 
4351       if (!bb)
4352 	continue;
4353 
4354       if (blocks && !bitmap_bit_p (blocks, i))
4355 	continue;
4356 
4357       for (insn = BB_END (bb); ; insn = prev_insn)
4358 	{
4359 	  prev_insn = PREV_INSN (insn);
4360 	  if (need_fake_edge_p (insn))
4361 	    {
4362 	      edge e;
4363 	      rtx split_at_insn = insn;
4364 
4365 	      /* Don't split the block between a call and an insn that should
4366 		 remain in the same block as the call.  */
4367 	      if (CALL_P (insn))
4368 		while (split_at_insn != BB_END (bb)
4369 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
4370 		  split_at_insn = NEXT_INSN (split_at_insn);
4371 
4372 	      /* The handling above of the final block before the epilogue
4373 		 should be enough to verify that there is no edge to the exit
4374 		 block in CFG already.  Calling make_edge in such case would
4375 		 cause us to mark that edge as fake and remove it later.  */
4376 
4377 #ifdef ENABLE_CHECKING
4378 	      if (split_at_insn == BB_END (bb))
4379 		{
4380 		  e = find_edge (bb, EXIT_BLOCK_PTR);
4381 		  gcc_assert (e == NULL);
4382 		}
4383 #endif
4384 
4385 	      /* Note that the following may create a new basic block
4386 		 and renumber the existing basic blocks.  */
4387 	      if (split_at_insn != BB_END (bb))
4388 		{
4389 		  e = split_block (bb, split_at_insn);
4390 		  if (e)
4391 		    blocks_split++;
4392 		}
4393 
4394 	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4395 	    }
4396 
4397 	  if (insn == BB_HEAD (bb))
4398 	    break;
4399 	}
4400     }
4401 
4402   if (blocks_split)
4403     verify_flow_info ();
4404 
4405   return blocks_split;
4406 }
4407 
4408 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
4409    the conditional branch target, SECOND_HEAD should be the fall-thru
4410    there is no need to handle this here the loop versioning code handles
4411    this.  the reason for SECON_HEAD is that it is needed for condition
4412    in trees, and this should be of the same type since it is a hook.  */
4413 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)4414 rtl_lv_add_condition_to_bb (basic_block first_head ,
4415 			    basic_block second_head ATTRIBUTE_UNUSED,
4416 			    basic_block cond_bb, void *comp_rtx)
4417 {
4418   rtx label, seq, jump;
4419   rtx op0 = XEXP ((rtx)comp_rtx, 0);
4420   rtx op1 = XEXP ((rtx)comp_rtx, 1);
4421   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4422   enum machine_mode mode;
4423 
4424 
4425   label = block_label (first_head);
4426   mode = GET_MODE (op0);
4427   if (mode == VOIDmode)
4428     mode = GET_MODE (op1);
4429 
4430   start_sequence ();
4431   op0 = force_operand (op0, NULL_RTX);
4432   op1 = force_operand (op1, NULL_RTX);
4433   do_compare_rtx_and_jump (op0, op1, comp, 0,
4434 			   mode, NULL_RTX, NULL_RTX, label, -1);
4435   jump = get_last_insn ();
4436   JUMP_LABEL (jump) = label;
4437   LABEL_NUSES (label)++;
4438   seq = get_insns ();
4439   end_sequence ();
4440 
4441   /* Add the new cond , in the new head.  */
4442   emit_insn_after(seq, BB_END(cond_bb));
4443 }
4444 
4445 
4446 /* Given a block B with unconditional branch at its end, get the
4447    store the return the branch edge and the fall-thru edge in
4448    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
4449 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)4450 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4451 			   edge *fallthru_edge)
4452 {
4453   edge e = EDGE_SUCC (b, 0);
4454 
4455   if (e->flags & EDGE_FALLTHRU)
4456     {
4457       *fallthru_edge = e;
4458       *branch_edge = EDGE_SUCC (b, 1);
4459     }
4460   else
4461     {
4462       *branch_edge = e;
4463       *fallthru_edge = EDGE_SUCC (b, 1);
4464     }
4465 }
4466 
4467 void
init_rtl_bb_info(basic_block bb)4468 init_rtl_bb_info (basic_block bb)
4469 {
4470   gcc_assert (!bb->il.x.rtl);
4471   bb->il.x.head_ = NULL;
4472   bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4473 }
4474 
4475 /* Returns true if it is possible to remove edge E by redirecting
4476    it to the destination of the other edge from E->src.  */
4477 
4478 static bool
rtl_can_remove_branch_p(const_edge e)4479 rtl_can_remove_branch_p (const_edge e)
4480 {
4481   const_basic_block src = e->src;
4482   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4483   const_rtx insn = BB_END (src), set;
4484 
4485   /* The conditions are taken from try_redirect_by_replacing_jump.  */
4486   if (target == EXIT_BLOCK_PTR)
4487     return false;
4488 
4489   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4490     return false;
4491 
4492   if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4493       || BB_PARTITION (src) != BB_PARTITION (target))
4494     return false;
4495 
4496   if (!onlyjump_p (insn)
4497       || tablejump_p (insn, NULL, NULL))
4498     return false;
4499 
4500   set = single_set (insn);
4501   if (!set || side_effects_p (set))
4502     return false;
4503 
4504   return true;
4505 }
4506 
4507 static basic_block
rtl_duplicate_bb(basic_block bb)4508 rtl_duplicate_bb (basic_block bb)
4509 {
4510   bb = cfg_layout_duplicate_bb (bb);
4511   bb->aux = NULL;
4512   return bb;
4513 }
4514 
4515 /* Do book-keeping of basic block BB for the profile consistency checker.
4516    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4517    then do post-pass accounting.  Store the counting in RECORD.  */
4518 static void
rtl_account_profile_record(basic_block bb,int after_pass,struct profile_record * record)4519 rtl_account_profile_record (basic_block bb, int after_pass,
4520 			    struct profile_record *record)
4521 {
4522   rtx insn;
4523   FOR_BB_INSNS (bb, insn)
4524     if (INSN_P (insn))
4525       {
4526 	record->size[after_pass]
4527 	  += insn_rtx_cost (PATTERN (insn), false);
4528 	if (profile_status == PROFILE_READ)
4529 	  record->time[after_pass]
4530 	    += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4531 	else if (profile_status == PROFILE_GUESSED)
4532 	  record->time[after_pass]
4533 	    += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4534       }
4535 }
4536 
4537 /* Implementation of CFG manipulation for linearized RTL.  */
4538 struct cfg_hooks rtl_cfg_hooks = {
4539   "rtl",
4540   rtl_verify_flow_info,
4541   rtl_dump_bb,
4542   rtl_dump_bb_for_graph,
4543   rtl_create_basic_block,
4544   rtl_redirect_edge_and_branch,
4545   rtl_redirect_edge_and_branch_force,
4546   rtl_can_remove_branch_p,
4547   rtl_delete_block,
4548   rtl_split_block,
4549   rtl_move_block_after,
4550   rtl_can_merge_blocks,  /* can_merge_blocks_p */
4551   rtl_merge_blocks,
4552   rtl_predict_edge,
4553   rtl_predicted_by_p,
4554   cfg_layout_can_duplicate_bb_p,
4555   rtl_duplicate_bb,
4556   rtl_split_edge,
4557   rtl_make_forwarder_block,
4558   rtl_tidy_fallthru_edge,
4559   rtl_force_nonfallthru,
4560   rtl_block_ends_with_call_p,
4561   rtl_block_ends_with_condjump_p,
4562   rtl_flow_call_edges_add,
4563   NULL, /* execute_on_growing_pred */
4564   NULL, /* execute_on_shrinking_pred */
4565   NULL, /* duplicate loop for trees */
4566   NULL, /* lv_add_condition_to_bb */
4567   NULL, /* lv_adjust_loop_header_phi*/
4568   NULL, /* extract_cond_bb_edges */
4569   NULL, /* flush_pending_stmts */
4570   rtl_block_empty_p, /* block_empty_p */
4571   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4572   rtl_account_profile_record,
4573 };
4574 
4575 /* Implementation of CFG manipulation for cfg layout RTL, where
4576    basic block connected via fallthru edges does not have to be adjacent.
4577    This representation will hopefully become the default one in future
4578    version of the compiler.  */
4579 
4580 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4581   "cfglayout mode",
4582   rtl_verify_flow_info_1,
4583   rtl_dump_bb,
4584   rtl_dump_bb_for_graph,
4585   cfg_layout_create_basic_block,
4586   cfg_layout_redirect_edge_and_branch,
4587   cfg_layout_redirect_edge_and_branch_force,
4588   rtl_can_remove_branch_p,
4589   cfg_layout_delete_block,
4590   cfg_layout_split_block,
4591   rtl_move_block_after,
4592   cfg_layout_can_merge_blocks_p,
4593   cfg_layout_merge_blocks,
4594   rtl_predict_edge,
4595   rtl_predicted_by_p,
4596   cfg_layout_can_duplicate_bb_p,
4597   cfg_layout_duplicate_bb,
4598   cfg_layout_split_edge,
4599   rtl_make_forwarder_block,
4600   NULL, /* tidy_fallthru_edge */
4601   rtl_force_nonfallthru,
4602   rtl_block_ends_with_call_p,
4603   rtl_block_ends_with_condjump_p,
4604   rtl_flow_call_edges_add,
4605   NULL, /* execute_on_growing_pred */
4606   NULL, /* execute_on_shrinking_pred */
4607   duplicate_loop_to_header_edge, /* duplicate loop for trees */
4608   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4609   NULL, /* lv_adjust_loop_header_phi*/
4610   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
4611   NULL, /* flush_pending_stmts */
4612   rtl_block_empty_p, /* block_empty_p */
4613   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4614   rtl_account_profile_record,
4615 };
4616 
4617 #include "gt-cfgrtl.h"
4618