1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
22
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
33
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
39
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64
65 /* Disable warnings about missing quoting in GCC diagnostics. */
66 #if __GNUC__ >= 10
67 # pragma GCC diagnostic push
68 # pragma GCC diagnostic ignored "-Wformat-diag"
69 #endif
70
71 /* Holds the interesting leading and trailing notes for the function.
72 Only applicable if the CFG is in cfglayout mode. */
73 static GTY(()) rtx_insn *cfg_layout_function_footer;
74 static GTY(()) rtx_insn *cfg_layout_function_header;
75
76 static rtx_insn *skip_insns_after_block (basic_block);
77 static void record_effective_endpoints (void);
78 static void fixup_reorder_chain (void);
79
80 void verify_insn_chain (void);
81 static void fixup_fallthru_exit_predecessor (void);
82 static int can_delete_note_p (const rtx_note *);
83 static int can_delete_label_p (const rtx_code_label *);
84 static basic_block rtl_split_edge (edge);
85 static bool rtl_move_block_after (basic_block, basic_block);
86 static int rtl_verify_flow_info (void);
87 static basic_block cfg_layout_split_block (basic_block, void *);
88 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
89 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
90 static void cfg_layout_delete_block (basic_block);
91 static void rtl_delete_block (basic_block);
92 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
93 static edge rtl_redirect_edge_and_branch (edge, basic_block);
94 static basic_block rtl_split_block (basic_block, void *);
95 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
96 static int rtl_verify_flow_info_1 (void);
97 static void rtl_make_forwarder_block (edge);
98
99 /* Return true if NOTE is not one of the ones that must be kept paired,
100 so that we may simply delete it. */
101
102 static int
can_delete_note_p(const rtx_note * note)103 can_delete_note_p (const rtx_note *note)
104 {
105 switch (NOTE_KIND (note))
106 {
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_BASIC_BLOCK:
109 case NOTE_INSN_EPILOGUE_BEG:
110 return true;
111
112 default:
113 return false;
114 }
115 }
116
117 /* True if a given label can be deleted. */
118
119 static int
can_delete_label_p(const rtx_code_label * label)120 can_delete_label_p (const rtx_code_label *label)
121 {
122 return (!LABEL_PRESERVE_P (label)
123 /* User declared labels must be preserved. */
124 && LABEL_NAME (label) == 0
125 && !vec_safe_contains<rtx_insn *> (forced_labels,
126 const_cast<rtx_code_label *> (label)));
127 }
128
129 /* Delete INSN by patching it out. */
130
131 void
delete_insn(rtx_insn * insn)132 delete_insn (rtx_insn *insn)
133 {
134 rtx note;
135 bool really_delete = true;
136
137 if (LABEL_P (insn))
138 {
139 /* Some labels can't be directly removed from the INSN chain, as they
140 might be references via variables, constant pool etc.
141 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
142 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
143 {
144 const char *name = LABEL_NAME (insn);
145 basic_block bb = BLOCK_FOR_INSN (insn);
146 rtx_insn *bb_note = NEXT_INSN (insn);
147
148 really_delete = false;
149 PUT_CODE (insn, NOTE);
150 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
151 NOTE_DELETED_LABEL_NAME (insn) = name;
152
153 /* If the note following the label starts a basic block, and the
154 label is a member of the same basic block, interchange the two. */
155 if (bb_note != NULL_RTX
156 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
157 && bb != NULL
158 && bb == BLOCK_FOR_INSN (bb_note))
159 {
160 reorder_insns_nobb (insn, insn, bb_note);
161 BB_HEAD (bb) = bb_note;
162 if (BB_END (bb) == bb_note)
163 BB_END (bb) = insn;
164 }
165 }
166
167 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
168 }
169
170 if (really_delete)
171 {
172 /* If this insn has already been deleted, something is very wrong. */
173 gcc_assert (!insn->deleted ());
174 if (INSN_P (insn))
175 df_insn_delete (insn);
176 remove_insn (insn);
177 insn->set_deleted ();
178 }
179
180 /* If deleting a jump, decrement the use count of the label. Deleting
181 the label itself should happen in the normal course of block merging. */
182 if (JUMP_P (insn))
183 {
184 if (JUMP_LABEL (insn)
185 && LABEL_P (JUMP_LABEL (insn)))
186 LABEL_NUSES (JUMP_LABEL (insn))--;
187
188 /* If there are more targets, remove them too. */
189 while ((note
190 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
191 && LABEL_P (XEXP (note, 0)))
192 {
193 LABEL_NUSES (XEXP (note, 0))--;
194 remove_note (insn, note);
195 }
196 }
197
198 /* Also if deleting any insn that references a label as an operand. */
199 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
200 && LABEL_P (XEXP (note, 0)))
201 {
202 LABEL_NUSES (XEXP (note, 0))--;
203 remove_note (insn, note);
204 }
205
206 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
207 {
208 rtvec vec = table->get_labels ();
209 int len = GET_NUM_ELEM (vec);
210 int i;
211
212 for (i = 0; i < len; i++)
213 {
214 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
215
216 /* When deleting code in bulk (e.g. removing many unreachable
217 blocks) we can delete a label that's a target of the vector
218 before deleting the vector itself. */
219 if (!NOTE_P (label))
220 LABEL_NUSES (label)--;
221 }
222 }
223 }
224
225 /* Like delete_insn but also purge dead edges from BB.
226 Return true if any edges are eliminated. */
227
228 bool
delete_insn_and_edges(rtx_insn * insn)229 delete_insn_and_edges (rtx_insn *insn)
230 {
231 bool purge = false;
232
233 if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
234 {
235 basic_block bb = BLOCK_FOR_INSN (insn);
236 if (BB_END (bb) == insn)
237 purge = true;
238 else if (DEBUG_INSN_P (BB_END (bb)))
239 for (rtx_insn *dinsn = NEXT_INSN (insn);
240 DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
241 if (BB_END (bb) == dinsn)
242 {
243 purge = true;
244 break;
245 }
246 }
247 delete_insn (insn);
248 if (purge)
249 return purge_dead_edges (BLOCK_FOR_INSN (insn));
250 return false;
251 }
252
253 /* Unlink a chain of insns between START and FINISH, leaving notes
254 that must be paired. If CLEAR_BB is true, we set bb field for
255 insns that cannot be removed to NULL. */
256
257 void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)258 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
259 {
260 /* Unchain the insns one by one. It would be quicker to delete all of these
261 with a single unchaining, rather than one at a time, but we need to keep
262 the NOTE's. */
263 rtx_insn *current = finish;
264 while (1)
265 {
266 rtx_insn *prev = PREV_INSN (current);
267 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
268 ;
269 else
270 delete_insn (current);
271
272 if (clear_bb && !current->deleted ())
273 set_block_for_insn (current, NULL);
274
275 if (current == start)
276 break;
277 current = prev;
278 }
279 }
280
281 /* Create a new basic block consisting of the instructions between HEAD and END
282 inclusive. This function is designed to allow fast BB construction - reuses
283 the note and basic block struct in BB_NOTE, if any and do not grow
284 BASIC_BLOCK chain and should be used directly only by CFG construction code.
285 END can be NULL in to create new empty basic block before HEAD. Both END
286 and HEAD can be NULL to create basic block at the end of INSN chain.
287 AFTER is the basic block we should be put after. */
288
289 basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)290 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
291 basic_block after)
292 {
293 basic_block bb;
294
295 if (bb_note
296 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
297 && bb->aux == NULL)
298 {
299 /* If we found an existing note, thread it back onto the chain. */
300
301 rtx_insn *after;
302
303 if (LABEL_P (head))
304 after = head;
305 else
306 {
307 after = PREV_INSN (head);
308 head = bb_note;
309 }
310
311 if (after != bb_note && NEXT_INSN (after) != bb_note)
312 reorder_insns_nobb (bb_note, bb_note, after);
313 }
314 else
315 {
316 /* Otherwise we must create a note and a basic block structure. */
317
318 bb = alloc_block ();
319
320 init_rtl_bb_info (bb);
321 if (!head && !end)
322 head = end = bb_note
323 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
324 else if (LABEL_P (head) && end)
325 {
326 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
327 if (head == end)
328 end = bb_note;
329 }
330 else
331 {
332 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
333 head = bb_note;
334 if (!end)
335 end = head;
336 }
337
338 NOTE_BASIC_BLOCK (bb_note) = bb;
339 }
340
341 /* Always include the bb note in the block. */
342 if (NEXT_INSN (end) == bb_note)
343 end = bb_note;
344
345 BB_HEAD (bb) = head;
346 BB_END (bb) = end;
347 bb->index = last_basic_block_for_fn (cfun)++;
348 bb->flags = BB_NEW | BB_RTL;
349 link_block (bb, after);
350 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
351 df_bb_refs_record (bb->index, false);
352 update_bb_for_insn (bb);
353 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
354
355 /* Tag the block so that we know it has been used when considering
356 other basic block notes. */
357 bb->aux = bb;
358
359 return bb;
360 }
361
362 /* Create new basic block consisting of instructions in between HEAD and END
363 and place it to the BB chain after block AFTER. END can be NULL to
364 create a new empty basic block before HEAD. Both END and HEAD can be
365 NULL to create basic block at the end of INSN chain. */
366
367 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)368 rtl_create_basic_block (void *headp, void *endp, basic_block after)
369 {
370 rtx_insn *head = (rtx_insn *) headp;
371 rtx_insn *end = (rtx_insn *) endp;
372 basic_block bb;
373
374 /* Grow the basic block array if needed. */
375 if ((size_t) last_basic_block_for_fn (cfun)
376 >= basic_block_info_for_fn (cfun)->length ())
377 {
378 size_t new_size =
379 (last_basic_block_for_fn (cfun)
380 + (last_basic_block_for_fn (cfun) + 3) / 4);
381 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
382 }
383
384 n_basic_blocks_for_fn (cfun)++;
385
386 bb = create_basic_block_structure (head, end, NULL, after);
387 bb->aux = NULL;
388 return bb;
389 }
390
391 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)392 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
393 {
394 basic_block newbb = rtl_create_basic_block (head, end, after);
395
396 return newbb;
397 }
398
399 /* Delete the insns in a (non-live) block. We physically delete every
400 non-deleted-note insn, and update the flow graph appropriately.
401
402 Return nonzero if we deleted an exception handler. */
403
404 /* ??? Preserving all such notes strikes me as wrong. It would be nice
405 to post-process the stream to remove empty blocks, loops, ranges, etc. */
406
407 static void
rtl_delete_block(basic_block b)408 rtl_delete_block (basic_block b)
409 {
410 rtx_insn *insn, *end;
411
412 /* If the head of this block is a CODE_LABEL, then it might be the
413 label for an exception handler which can't be reached. We need
414 to remove the label from the exception_handler_label list. */
415 insn = BB_HEAD (b);
416
417 end = get_last_bb_insn (b);
418
419 /* Selectively delete the entire chain. */
420 BB_HEAD (b) = NULL;
421 delete_insn_chain (insn, end, true);
422
423
424 if (dump_file)
425 fprintf (dump_file, "deleting block %d\n", b->index);
426 df_bb_delete (b->index);
427 }
428
429 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
430
431 void
compute_bb_for_insn(void)432 compute_bb_for_insn (void)
433 {
434 basic_block bb;
435
436 FOR_EACH_BB_FN (bb, cfun)
437 {
438 rtx_insn *end = BB_END (bb);
439 rtx_insn *insn;
440
441 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
442 {
443 BLOCK_FOR_INSN (insn) = bb;
444 if (insn == end)
445 break;
446 }
447 }
448 }
449
450 /* Release the basic_block_for_insn array. */
451
452 unsigned int
free_bb_for_insn(void)453 free_bb_for_insn (void)
454 {
455 rtx_insn *insn;
456 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
457 if (!BARRIER_P (insn))
458 BLOCK_FOR_INSN (insn) = NULL;
459 return 0;
460 }
461
462 namespace {
463
464 const pass_data pass_data_free_cfg =
465 {
466 RTL_PASS, /* type */
467 "*free_cfg", /* name */
468 OPTGROUP_NONE, /* optinfo_flags */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
475 };
476
477 class pass_free_cfg : public rtl_opt_pass
478 {
479 public:
pass_free_cfg(gcc::context * ctxt)480 pass_free_cfg (gcc::context *ctxt)
481 : rtl_opt_pass (pass_data_free_cfg, ctxt)
482 {}
483
484 /* opt_pass methods: */
485 virtual unsigned int execute (function *);
486
487 }; // class pass_free_cfg
488
489 unsigned int
execute(function *)490 pass_free_cfg::execute (function *)
491 {
492 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
493 valid at that point so it would be too late to call df_analyze. */
494 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
495 {
496 df_note_add_problem ();
497 df_analyze ();
498 }
499
500 if (crtl->has_bb_partition)
501 insert_section_boundary_note ();
502
503 free_bb_for_insn ();
504 return 0;
505 }
506
507 } // anon namespace
508
509 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)510 make_pass_free_cfg (gcc::context *ctxt)
511 {
512 return new pass_free_cfg (ctxt);
513 }
514
515 /* Return RTX to emit after when we want to emit code on the entry of function. */
516 rtx_insn *
entry_of_function(void)517 entry_of_function (void)
518 {
519 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
520 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
521 }
522
523 /* Emit INSN at the entry point of the function, ensuring that it is only
524 executed once per function. */
525 void
emit_insn_at_entry(rtx insn)526 emit_insn_at_entry (rtx insn)
527 {
528 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
529 edge e = ei_safe_edge (ei);
530 gcc_assert (e->flags & EDGE_FALLTHRU);
531
532 insert_insn_on_edge (insn, e);
533 commit_edge_insertions ();
534 }
535
536 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
537 (or BARRIER if found) and notify df of the bb change.
538 The insn chain range is inclusive
539 (i.e. both BEGIN and END will be updated. */
540
541 static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)542 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
543 {
544 rtx_insn *insn;
545
546 end = NEXT_INSN (end);
547 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
548 if (!BARRIER_P (insn))
549 df_insn_change_bb (insn, bb);
550 }
551
552 /* Update BLOCK_FOR_INSN of insns in BB to BB,
553 and notify df of the change. */
554
555 void
update_bb_for_insn(basic_block bb)556 update_bb_for_insn (basic_block bb)
557 {
558 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
559 }
560
561
562 /* Like active_insn_p, except keep the return value use or clobber around
563 even after reload. */
564
565 static bool
flow_active_insn_p(const rtx_insn * insn)566 flow_active_insn_p (const rtx_insn *insn)
567 {
568 if (active_insn_p (insn))
569 return true;
570
571 /* A clobber of the function return value exists for buggy
572 programs that fail to return a value. Its effect is to
573 keep the return value from being live across the entire
574 function. If we allow it to be skipped, we introduce the
575 possibility for register lifetime confusion.
576 Similarly, keep a USE of the function return value, otherwise
577 the USE is dropped and we could fail to thread jump if USE
578 appears on some paths and not on others, see PR90257. */
579 if ((GET_CODE (PATTERN (insn)) == CLOBBER
580 || GET_CODE (PATTERN (insn)) == USE)
581 && REG_P (XEXP (PATTERN (insn), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
583 return true;
584
585 return false;
586 }
587
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
590
591 bool
contains_no_active_insn_p(const_basic_block bb)592 contains_no_active_insn_p (const_basic_block bb)
593 {
594 rtx_insn *insn;
595
596 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
597 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
598 || !single_succ_p (bb)
599 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
600 return false;
601
602 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
603 if (INSN_P (insn) && flow_active_insn_p (insn))
604 return false;
605
606 return (!INSN_P (insn)
607 || (JUMP_P (insn) && simplejump_p (insn))
608 || !flow_active_insn_p (insn));
609 }
610
611 /* Likewise, but protect loop latches, headers and preheaders. */
612 /* FIXME: Make this a cfg hook. */
613
614 bool
forwarder_block_p(const_basic_block bb)615 forwarder_block_p (const_basic_block bb)
616 {
617 if (!contains_no_active_insn_p (bb))
618 return false;
619
620 /* Protect loop latches, headers and preheaders. */
621 if (current_loops)
622 {
623 basic_block dest;
624 if (bb->loop_father->header == bb)
625 return false;
626 dest = EDGE_SUCC (bb, 0)->dest;
627 if (dest->loop_father->header == dest)
628 return false;
629 }
630
631 return true;
632 }
633
634 /* Return nonzero if we can reach target from src by falling through. */
635 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
636
637 bool
can_fallthru(basic_block src,basic_block target)638 can_fallthru (basic_block src, basic_block target)
639 {
640 rtx_insn *insn = BB_END (src);
641 rtx_insn *insn2;
642 edge e;
643 edge_iterator ei;
644
645 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
646 return true;
647 if (src->next_bb != target)
648 return false;
649
650 /* ??? Later we may add code to move jump tables offline. */
651 if (tablejump_p (insn, NULL, NULL))
652 return false;
653
654 FOR_EACH_EDGE (e, ei, src->succs)
655 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
656 && e->flags & EDGE_FALLTHRU)
657 return false;
658
659 insn2 = BB_HEAD (target);
660 if (!active_insn_p (insn2))
661 insn2 = next_active_insn (insn2);
662
663 return next_active_insn (insn) == insn2;
664 }
665
666 /* Return nonzero if we could reach target from src by falling through,
667 if the target was made adjacent. If we already have a fall-through
668 edge to the exit block, we can't do that. */
669 static bool
could_fall_through(basic_block src,basic_block target)670 could_fall_through (basic_block src, basic_block target)
671 {
672 edge e;
673 edge_iterator ei;
674
675 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
676 return true;
677 FOR_EACH_EDGE (e, ei, src->succs)
678 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
679 && e->flags & EDGE_FALLTHRU)
680 return 0;
681 return true;
682 }
683
684 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
685 rtx_note *
bb_note(basic_block bb)686 bb_note (basic_block bb)
687 {
688 rtx_insn *note;
689
690 note = BB_HEAD (bb);
691 if (LABEL_P (note))
692 note = NEXT_INSN (note);
693
694 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
695 return as_a <rtx_note *> (note);
696 }
697
698 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
699 note associated with the BLOCK. */
700
701 static rtx_insn *
first_insn_after_basic_block_note(basic_block block)702 first_insn_after_basic_block_note (basic_block block)
703 {
704 rtx_insn *insn;
705
706 /* Get the first instruction in the block. */
707 insn = BB_HEAD (block);
708
709 if (insn == NULL_RTX)
710 return NULL;
711 if (LABEL_P (insn))
712 insn = NEXT_INSN (insn);
713 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
714
715 return NEXT_INSN (insn);
716 }
717
718 /* Creates a new basic block just after basic block BB by splitting
719 everything after specified instruction INSNP. */
720
721 static basic_block
rtl_split_block(basic_block bb,void * insnp)722 rtl_split_block (basic_block bb, void *insnp)
723 {
724 basic_block new_bb;
725 rtx_insn *insn = (rtx_insn *) insnp;
726 edge e;
727 edge_iterator ei;
728
729 if (!insn)
730 {
731 insn = first_insn_after_basic_block_note (bb);
732
733 if (insn)
734 {
735 rtx_insn *next = insn;
736
737 insn = PREV_INSN (insn);
738
739 /* If the block contains only debug insns, insn would have
740 been NULL in a non-debug compilation, and then we'd end
741 up emitting a DELETED note. For -fcompare-debug
742 stability, emit the note too. */
743 if (insn != BB_END (bb)
744 && DEBUG_INSN_P (next)
745 && DEBUG_INSN_P (BB_END (bb)))
746 {
747 while (next != BB_END (bb) && DEBUG_INSN_P (next))
748 next = NEXT_INSN (next);
749
750 if (next == BB_END (bb))
751 emit_note_after (NOTE_INSN_DELETED, next);
752 }
753 }
754 else
755 insn = get_last_insn ();
756 }
757
758 /* We probably should check type of the insn so that we do not create
759 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
760 bother. */
761 if (insn == BB_END (bb))
762 emit_note_after (NOTE_INSN_DELETED, insn);
763
764 /* Create the new basic block. */
765 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
766 BB_COPY_PARTITION (new_bb, bb);
767 BB_END (bb) = insn;
768
769 /* Redirect the outgoing edges. */
770 new_bb->succs = bb->succs;
771 bb->succs = NULL;
772 FOR_EACH_EDGE (e, ei, new_bb->succs)
773 e->src = new_bb;
774
775 /* The new block starts off being dirty. */
776 df_set_bb_dirty (bb);
777 return new_bb;
778 }
779
780 /* Return true if the single edge between blocks A and B is the only place
781 in RTL which holds some unique locus. */
782
783 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)784 unique_locus_on_edge_between_p (basic_block a, basic_block b)
785 {
786 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
787 rtx_insn *insn, *end;
788
789 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
790 return false;
791
792 /* First scan block A backward. */
793 insn = BB_END (a);
794 end = PREV_INSN (BB_HEAD (a));
795 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
796 insn = PREV_INSN (insn);
797
798 if (insn != end && INSN_LOCATION (insn) == goto_locus)
799 return false;
800
801 /* Then scan block B forward. */
802 insn = BB_HEAD (b);
803 if (insn)
804 {
805 end = NEXT_INSN (BB_END (b));
806 while (insn != end && !NONDEBUG_INSN_P (insn))
807 insn = NEXT_INSN (insn);
808
809 if (insn != end && INSN_HAS_LOCATION (insn)
810 && INSN_LOCATION (insn) == goto_locus)
811 return false;
812 }
813
814 return true;
815 }
816
817 /* If the single edge between blocks A and B is the only place in RTL which
818 holds some unique locus, emit a nop with that locus between the blocks. */
819
820 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)821 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
822 {
823 if (!unique_locus_on_edge_between_p (a, b))
824 return;
825
826 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
827 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
828 }
829
830 /* Blocks A and B are to be merged into a single block A. The insns
831 are already contiguous. */
832
833 static void
rtl_merge_blocks(basic_block a,basic_block b)834 rtl_merge_blocks (basic_block a, basic_block b)
835 {
836 /* If B is a forwarder block whose outgoing edge has no location, we'll
837 propagate the locus of the edge between A and B onto it. */
838 const bool forward_edge_locus
839 = (b->flags & BB_FORWARDER_BLOCK) != 0
840 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
841 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
842 rtx_insn *del_first = NULL, *del_last = NULL;
843 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
844 int b_empty = 0;
845
846 if (dump_file)
847 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
848 a->index);
849
850 while (DEBUG_INSN_P (b_end))
851 b_end = PREV_INSN (b_debug_start = b_end);
852
853 /* If there was a CODE_LABEL beginning B, delete it. */
854 if (LABEL_P (b_head))
855 {
856 /* Detect basic blocks with nothing but a label. This can happen
857 in particular at the end of a function. */
858 if (b_head == b_end)
859 b_empty = 1;
860
861 del_first = del_last = b_head;
862 b_head = NEXT_INSN (b_head);
863 }
864
865 /* Delete the basic block note and handle blocks containing just that
866 note. */
867 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
868 {
869 if (b_head == b_end)
870 b_empty = 1;
871 if (! del_last)
872 del_first = b_head;
873
874 del_last = b_head;
875 b_head = NEXT_INSN (b_head);
876 }
877
878 /* If there was a jump out of A, delete it. */
879 if (JUMP_P (a_end))
880 {
881 rtx_insn *prev;
882
883 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
884 if (!NOTE_P (prev)
885 || NOTE_INSN_BASIC_BLOCK_P (prev)
886 || prev == BB_HEAD (a))
887 break;
888
889 del_first = a_end;
890
891 /* If this was a conditional jump, we need to also delete
892 the insn that set cc0. */
893 if (HAVE_cc0 && only_sets_cc0_p (prev))
894 {
895 rtx_insn *tmp = prev;
896
897 prev = prev_nonnote_insn (prev);
898 if (!prev)
899 prev = BB_HEAD (a);
900 del_first = tmp;
901 }
902
903 a_end = PREV_INSN (del_first);
904 }
905 else if (BARRIER_P (NEXT_INSN (a_end)))
906 del_first = NEXT_INSN (a_end);
907
908 /* Delete everything marked above as well as crap that might be
909 hanging out between the two blocks. */
910 BB_END (a) = a_end;
911 BB_HEAD (b) = b_empty ? NULL : b_head;
912 delete_insn_chain (del_first, del_last, true);
913
914 /* If not optimizing, preserve the locus of the single edge between
915 blocks A and B if necessary by emitting a nop. */
916 if (!optimize
917 && !forward_edge_locus
918 && !DECL_IGNORED_P (current_function_decl))
919 {
920 emit_nop_for_unique_locus_between (a, b);
921 a_end = BB_END (a);
922 }
923
924 /* Reassociate the insns of B with A. */
925 if (!b_empty)
926 {
927 update_bb_for_insn_chain (a_end, b_debug_end, a);
928
929 BB_END (a) = b_debug_end;
930 BB_HEAD (b) = NULL;
931 }
932 else if (b_end != b_debug_end)
933 {
934 /* Move any deleted labels and other notes between the end of A
935 and the debug insns that make up B after the debug insns,
936 bringing the debug insns into A while keeping the notes after
937 the end of A. */
938 if (NEXT_INSN (a_end) != b_debug_start)
939 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
940 b_debug_end);
941 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
942 BB_END (a) = b_debug_end;
943 }
944
945 df_bb_delete (b->index);
946
947 if (forward_edge_locus)
948 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
949
950 if (dump_file)
951 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
952 }
953
954
955 /* Return true when block A and B can be merged. */
956
957 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)958 rtl_can_merge_blocks (basic_block a, basic_block b)
959 {
960 /* If we are partitioning hot/cold basic blocks, we don't want to
961 mess up unconditional or indirect jumps that cross between hot
962 and cold sections.
963
964 Basic block partitioning may result in some jumps that appear to
965 be optimizable (or blocks that appear to be mergeable), but which really
966 must be left untouched (they are required to make it safely across
967 partition boundaries). See the comments at the top of
968 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
969
970 if (BB_PARTITION (a) != BB_PARTITION (b))
971 return false;
972
973 /* Protect the loop latches. */
974 if (current_loops && b->loop_father->latch == b)
975 return false;
976
977 /* There must be exactly one edge in between the blocks. */
978 return (single_succ_p (a)
979 && single_succ (a) == b
980 && single_pred_p (b)
981 && a != b
982 /* Must be simple edge. */
983 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
984 && a->next_bb == b
985 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
986 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
987 /* If the jump insn has side effects,
988 we can't kill the edge. */
989 && (!JUMP_P (BB_END (a))
990 || (reload_completed
991 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
992 }
993
994 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
995 exist. */
996
997 rtx_code_label *
block_label(basic_block block)998 block_label (basic_block block)
999 {
1000 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1001 return NULL;
1002
1003 if (!LABEL_P (BB_HEAD (block)))
1004 {
1005 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1006 }
1007
1008 return as_a <rtx_code_label *> (BB_HEAD (block));
1009 }
1010
1011 /* Remove all barriers from BB_FOOTER of a BB. */
1012
1013 static void
remove_barriers_from_footer(basic_block bb)1014 remove_barriers_from_footer (basic_block bb)
1015 {
1016 rtx_insn *insn = BB_FOOTER (bb);
1017
1018 /* Remove barriers but keep jumptables. */
1019 while (insn)
1020 {
1021 if (BARRIER_P (insn))
1022 {
1023 if (PREV_INSN (insn))
1024 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1025 else
1026 BB_FOOTER (bb) = NEXT_INSN (insn);
1027 if (NEXT_INSN (insn))
1028 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1029 }
1030 if (LABEL_P (insn))
1031 return;
1032 insn = NEXT_INSN (insn);
1033 }
1034 }
1035
1036 /* Attempt to perform edge redirection by replacing possibly complex jump
1037 instruction by unconditional jump or removing jump completely. This can
1038 apply only if all edges now point to the same block. The parameters and
1039 return values are equivalent to redirect_edge_and_branch. */
1040
1041 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)1042 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1043 {
1044 basic_block src = e->src;
1045 rtx_insn *insn = BB_END (src), *kill_from;
1046 rtx set;
1047 int fallthru = 0;
1048
1049 /* If we are partitioning hot/cold basic blocks, we don't want to
1050 mess up unconditional or indirect jumps that cross between hot
1051 and cold sections.
1052
1053 Basic block partitioning may result in some jumps that appear to
1054 be optimizable (or blocks that appear to be mergeable), but which really
1055 must be left untouched (they are required to make it safely across
1056 partition boundaries). See the comments at the top of
1057 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1058
1059 if (BB_PARTITION (src) != BB_PARTITION (target))
1060 return NULL;
1061
1062 /* We can replace or remove a complex jump only when we have exactly
1063 two edges. Also, if we have exactly one outgoing edge, we can
1064 redirect that. */
1065 if (EDGE_COUNT (src->succs) >= 3
1066 /* Verify that all targets will be TARGET. Specifically, the
1067 edge that is not E must also go to TARGET. */
1068 || (EDGE_COUNT (src->succs) == 2
1069 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1070 return NULL;
1071
1072 if (!onlyjump_p (insn))
1073 return NULL;
1074 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1075 return NULL;
1076
1077 /* Avoid removing branch with side effects. */
1078 set = single_set (insn);
1079 if (!set || side_effects_p (set))
1080 return NULL;
1081
1082 /* In case we zap a conditional jump, we'll need to kill
1083 the cc0 setter too. */
1084 kill_from = insn;
1085 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1086 && only_sets_cc0_p (PREV_INSN (insn)))
1087 kill_from = PREV_INSN (insn);
1088
1089 /* See if we can create the fallthru edge. */
1090 if (in_cfglayout || can_fallthru (src, target))
1091 {
1092 if (dump_file)
1093 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1094 fallthru = 1;
1095
1096 /* Selectively unlink whole insn chain. */
1097 if (in_cfglayout)
1098 {
1099 delete_insn_chain (kill_from, BB_END (src), false);
1100 remove_barriers_from_footer (src);
1101 }
1102 else
1103 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1104 false);
1105 }
1106
1107 /* If this already is simplejump, redirect it. */
1108 else if (simplejump_p (insn))
1109 {
1110 if (e->dest == target)
1111 return NULL;
1112 if (dump_file)
1113 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1114 INSN_UID (insn), e->dest->index, target->index);
1115 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1116 block_label (target), 0))
1117 {
1118 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1119 return NULL;
1120 }
1121 }
1122
1123 /* Cannot do anything for target exit block. */
1124 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1125 return NULL;
1126
1127 /* Or replace possibly complicated jump insn by simple jump insn. */
1128 else
1129 {
1130 rtx_code_label *target_label = block_label (target);
1131 rtx_insn *barrier;
1132 rtx_insn *label;
1133 rtx_jump_table_data *table;
1134
1135 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1136 JUMP_LABEL (BB_END (src)) = target_label;
1137 LABEL_NUSES (target_label)++;
1138 if (dump_file)
1139 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1140 INSN_UID (insn), INSN_UID (BB_END (src)));
1141
1142
1143 delete_insn_chain (kill_from, insn, false);
1144
1145 /* Recognize a tablejump that we are converting to a
1146 simple jump and remove its associated CODE_LABEL
1147 and ADDR_VEC or ADDR_DIFF_VEC. */
1148 if (tablejump_p (insn, &label, &table))
1149 delete_insn_chain (label, table, false);
1150
1151 barrier = next_nonnote_nondebug_insn (BB_END (src));
1152 if (!barrier || !BARRIER_P (barrier))
1153 emit_barrier_after (BB_END (src));
1154 else
1155 {
1156 if (barrier != NEXT_INSN (BB_END (src)))
1157 {
1158 /* Move the jump before barrier so that the notes
1159 which originally were or were created before jump table are
1160 inside the basic block. */
1161 rtx_insn *new_insn = BB_END (src);
1162
1163 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1164 PREV_INSN (barrier), src);
1165
1166 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1167 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1168
1169 SET_NEXT_INSN (new_insn) = barrier;
1170 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1171
1172 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1173 SET_PREV_INSN (barrier) = new_insn;
1174 }
1175 }
1176 }
1177
1178 /* Keep only one edge out and set proper flags. */
1179 if (!single_succ_p (src))
1180 remove_edge (e);
1181 gcc_assert (single_succ_p (src));
1182
1183 e = single_succ_edge (src);
1184 if (fallthru)
1185 e->flags = EDGE_FALLTHRU;
1186 else
1187 e->flags = 0;
1188
1189 e->probability = profile_probability::always ();
1190
1191 if (e->dest != target)
1192 redirect_edge_succ (e, target);
1193 return e;
1194 }
1195
1196 /* Subroutine of redirect_branch_edge that tries to patch the jump
1197 instruction INSN so that it reaches block NEW. Do this
1198 only when it originally reached block OLD. Return true if this
1199 worked or the original target wasn't OLD, return false if redirection
1200 doesn't work. */
1201
1202 static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)1203 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1204 {
1205 rtx_jump_table_data *table;
1206 rtx tmp;
1207 /* Recognize a tablejump and adjust all matching cases. */
1208 if (tablejump_p (insn, NULL, &table))
1209 {
1210 rtvec vec;
1211 int j;
1212 rtx_code_label *new_label = block_label (new_bb);
1213
1214 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1215 return false;
1216 vec = table->get_labels ();
1217
1218 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1219 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1220 {
1221 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1222 --LABEL_NUSES (old_label);
1223 ++LABEL_NUSES (new_label);
1224 }
1225
1226 /* Handle casesi dispatch insns. */
1227 if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1228 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1229 {
1230 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1231 new_label);
1232 --LABEL_NUSES (old_label);
1233 ++LABEL_NUSES (new_label);
1234 }
1235 }
1236 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1237 {
1238 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1239 rtx note;
1240
1241 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1242 return false;
1243 rtx_code_label *new_label = block_label (new_bb);
1244
1245 for (i = 0; i < n; ++i)
1246 {
1247 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1248 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1249 if (XEXP (old_ref, 0) == old_label)
1250 {
1251 ASM_OPERANDS_LABEL (tmp, i)
1252 = gen_rtx_LABEL_REF (Pmode, new_label);
1253 --LABEL_NUSES (old_label);
1254 ++LABEL_NUSES (new_label);
1255 }
1256 }
1257
1258 if (JUMP_LABEL (insn) == old_label)
1259 {
1260 JUMP_LABEL (insn) = new_label;
1261 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1262 if (note)
1263 remove_note (insn, note);
1264 }
1265 else
1266 {
1267 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1268 if (note)
1269 remove_note (insn, note);
1270 if (JUMP_LABEL (insn) != new_label
1271 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1272 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1273 }
1274 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1275 != NULL_RTX)
1276 XEXP (note, 0) = new_label;
1277 }
1278 else
1279 {
1280 /* ?? We may play the games with moving the named labels from
1281 one basic block to the other in case only one computed_jump is
1282 available. */
1283 if (computed_jump_p (insn)
1284 /* A return instruction can't be redirected. */
1285 || returnjump_p (insn))
1286 return false;
1287
1288 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1289 {
1290 /* If the insn doesn't go where we think, we're confused. */
1291 gcc_assert (JUMP_LABEL (insn) == old_label);
1292
1293 /* If the substitution doesn't succeed, die. This can happen
1294 if the back end emitted unrecognizable instructions or if
1295 target is exit block on some arches. Or for crossing
1296 jumps. */
1297 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1298 block_label (new_bb), 0))
1299 {
1300 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1301 || CROSSING_JUMP_P (insn));
1302 return false;
1303 }
1304 }
1305 }
1306 return true;
1307 }
1308
1309
1310 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1311 NULL on failure */
1312 static edge
redirect_branch_edge(edge e,basic_block target)1313 redirect_branch_edge (edge e, basic_block target)
1314 {
1315 rtx_insn *old_label = BB_HEAD (e->dest);
1316 basic_block src = e->src;
1317 rtx_insn *insn = BB_END (src);
1318
1319 /* We can only redirect non-fallthru edges of jump insn. */
1320 if (e->flags & EDGE_FALLTHRU)
1321 return NULL;
1322 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1323 return NULL;
1324
1325 if (!currently_expanding_to_rtl)
1326 {
1327 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1328 return NULL;
1329 }
1330 else
1331 /* When expanding this BB might actually contain multiple
1332 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1333 Redirect all of those that match our label. */
1334 FOR_BB_INSNS (src, insn)
1335 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1336 old_label, target))
1337 return NULL;
1338
1339 if (dump_file)
1340 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1341 e->src->index, e->dest->index, target->index);
1342
1343 if (e->dest != target)
1344 e = redirect_edge_succ_nodup (e, target);
1345
1346 return e;
1347 }
1348
1349 /* Called when edge E has been redirected to a new destination,
1350 in order to update the region crossing flag on the edge and
1351 jump. */
1352
1353 static void
fixup_partition_crossing(edge e)1354 fixup_partition_crossing (edge e)
1355 {
1356 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1357 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1358 return;
1359 /* If we redirected an existing edge, it may already be marked
1360 crossing, even though the new src is missing a reg crossing note.
1361 But make sure reg crossing note doesn't already exist before
1362 inserting. */
1363 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1364 {
1365 e->flags |= EDGE_CROSSING;
1366 if (JUMP_P (BB_END (e->src)))
1367 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1368 }
1369 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1370 {
1371 e->flags &= ~EDGE_CROSSING;
1372 /* Remove the section crossing note from jump at end of
1373 src if it exists, and if no other successors are
1374 still crossing. */
1375 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1376 {
1377 bool has_crossing_succ = false;
1378 edge e2;
1379 edge_iterator ei;
1380 FOR_EACH_EDGE (e2, ei, e->src->succs)
1381 {
1382 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1383 if (has_crossing_succ)
1384 break;
1385 }
1386 if (!has_crossing_succ)
1387 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1388 }
1389 }
1390 }
1391
1392 /* Called when block BB has been reassigned to the cold partition,
1393 because it is now dominated by another cold block,
1394 to ensure that the region crossing attributes are updated. */
1395
1396 static void
fixup_new_cold_bb(basic_block bb)1397 fixup_new_cold_bb (basic_block bb)
1398 {
1399 edge e;
1400 edge_iterator ei;
1401
1402 /* This is called when a hot bb is found to now be dominated
1403 by a cold bb and therefore needs to become cold. Therefore,
1404 its preds will no longer be region crossing. Any non-dominating
1405 preds that were previously hot would also have become cold
1406 in the caller for the same region. Any preds that were previously
1407 region-crossing will be adjusted in fixup_partition_crossing. */
1408 FOR_EACH_EDGE (e, ei, bb->preds)
1409 {
1410 fixup_partition_crossing (e);
1411 }
1412
1413 /* Possibly need to make bb's successor edges region crossing,
1414 or remove stale region crossing. */
1415 FOR_EACH_EDGE (e, ei, bb->succs)
1416 {
1417 /* We can't have fall-through edges across partition boundaries.
1418 Note that force_nonfallthru will do any necessary partition
1419 boundary fixup by calling fixup_partition_crossing itself. */
1420 if ((e->flags & EDGE_FALLTHRU)
1421 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1422 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1423 force_nonfallthru (e);
1424 else
1425 fixup_partition_crossing (e);
1426 }
1427 }
1428
1429 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1430 expense of adding new instructions or reordering basic blocks.
1431
1432 Function can be also called with edge destination equivalent to the TARGET.
1433 Then it should try the simplifications and do nothing if none is possible.
1434
1435 Return edge representing the branch if transformation succeeded. Return NULL
1436 on failure.
1437 We still return NULL in case E already destinated TARGET and we didn't
1438 managed to simplify instruction stream. */
1439
1440 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1441 rtl_redirect_edge_and_branch (edge e, basic_block target)
1442 {
1443 edge ret;
1444 basic_block src = e->src;
1445 basic_block dest = e->dest;
1446
1447 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1448 return NULL;
1449
1450 if (dest == target)
1451 return e;
1452
1453 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1454 {
1455 df_set_bb_dirty (src);
1456 fixup_partition_crossing (ret);
1457 return ret;
1458 }
1459
1460 ret = redirect_branch_edge (e, target);
1461 if (!ret)
1462 return NULL;
1463
1464 df_set_bb_dirty (src);
1465 fixup_partition_crossing (ret);
1466 return ret;
1467 }
1468
1469 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1470
1471 void
emit_barrier_after_bb(basic_block bb)1472 emit_barrier_after_bb (basic_block bb)
1473 {
1474 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1475 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1476 || current_ir_type () == IR_RTL_CFGLAYOUT);
1477 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1478 {
1479 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1480
1481 if (BB_FOOTER (bb))
1482 {
1483 rtx_insn *footer_tail = BB_FOOTER (bb);
1484
1485 while (NEXT_INSN (footer_tail))
1486 footer_tail = NEXT_INSN (footer_tail);
1487 if (!BARRIER_P (footer_tail))
1488 {
1489 SET_NEXT_INSN (footer_tail) = insn;
1490 SET_PREV_INSN (insn) = footer_tail;
1491 }
1492 }
1493 else
1494 BB_FOOTER (bb) = insn;
1495 }
1496 }
1497
1498 /* Like force_nonfallthru below, but additionally performs redirection
1499 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1500 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1501 simple_return_rtx, indicating which kind of returnjump to create.
1502 It should be NULL otherwise. */
1503
1504 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1505 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1506 {
1507 basic_block jump_block, new_bb = NULL, src = e->src;
1508 rtx note;
1509 edge new_edge;
1510 int abnormal_edge_flags = 0;
1511 bool asm_goto_edge = false;
1512 int loc;
1513
1514 /* In the case the last instruction is conditional jump to the next
1515 instruction, first redirect the jump itself and then continue
1516 by creating a basic block afterwards to redirect fallthru edge. */
1517 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1518 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1519 && any_condjump_p (BB_END (e->src))
1520 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1521 {
1522 rtx note;
1523 edge b = unchecked_make_edge (e->src, target, 0);
1524 bool redirected;
1525
1526 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1527 block_label (target), 0);
1528 gcc_assert (redirected);
1529
1530 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1531 if (note)
1532 {
1533 int prob = XINT (note, 0);
1534
1535 b->probability = profile_probability::from_reg_br_prob_note (prob);
1536 e->probability -= e->probability;
1537 }
1538 }
1539
1540 if (e->flags & EDGE_ABNORMAL)
1541 {
1542 /* Irritating special case - fallthru edge to the same block as abnormal
1543 edge.
1544 We can't redirect abnormal edge, but we still can split the fallthru
1545 one and create separate abnormal edge to original destination.
1546 This allows bb-reorder to make such edge non-fallthru. */
1547 gcc_assert (e->dest == target);
1548 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1549 e->flags &= EDGE_FALLTHRU;
1550 }
1551 else
1552 {
1553 gcc_assert (e->flags & EDGE_FALLTHRU);
1554 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1555 {
1556 /* We can't redirect the entry block. Create an empty block
1557 at the start of the function which we use to add the new
1558 jump. */
1559 edge tmp;
1560 edge_iterator ei;
1561 bool found = false;
1562
1563 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1564 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1565 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1566
1567 /* Make sure new block ends up in correct hot/cold section. */
1568 BB_COPY_PARTITION (bb, e->dest);
1569
1570 /* Change the existing edge's source to be the new block, and add
1571 a new edge from the entry block to the new block. */
1572 e->src = bb;
1573 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1574 (tmp = ei_safe_edge (ei)); )
1575 {
1576 if (tmp == e)
1577 {
1578 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1579 found = true;
1580 break;
1581 }
1582 else
1583 ei_next (&ei);
1584 }
1585
1586 gcc_assert (found);
1587
1588 vec_safe_push (bb->succs, e);
1589 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1590 EDGE_FALLTHRU);
1591 }
1592 }
1593
1594 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1595 don't point to the target or fallthru label. */
1596 if (JUMP_P (BB_END (e->src))
1597 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1598 && (e->flags & EDGE_FALLTHRU)
1599 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1600 {
1601 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1602 bool adjust_jump_target = false;
1603
1604 for (i = 0; i < n; ++i)
1605 {
1606 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1607 {
1608 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1609 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1610 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1611 adjust_jump_target = true;
1612 }
1613 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1614 asm_goto_edge = true;
1615 }
1616 if (adjust_jump_target)
1617 {
1618 rtx_insn *insn = BB_END (e->src);
1619 rtx note;
1620 rtx_insn *old_label = BB_HEAD (e->dest);
1621 rtx_insn *new_label = BB_HEAD (target);
1622
1623 if (JUMP_LABEL (insn) == old_label)
1624 {
1625 JUMP_LABEL (insn) = new_label;
1626 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1627 if (note)
1628 remove_note (insn, note);
1629 }
1630 else
1631 {
1632 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1633 if (note)
1634 remove_note (insn, note);
1635 if (JUMP_LABEL (insn) != new_label
1636 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1637 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1638 }
1639 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1640 != NULL_RTX)
1641 XEXP (note, 0) = new_label;
1642 }
1643 }
1644
1645 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1646 {
1647 rtx_insn *new_head;
1648 profile_count count = e->count ();
1649 profile_probability probability = e->probability;
1650 /* Create the new structures. */
1651
1652 /* If the old block ended with a tablejump, skip its table
1653 by searching forward from there. Otherwise start searching
1654 forward from the last instruction of the old block. */
1655 rtx_jump_table_data *table;
1656 if (tablejump_p (BB_END (e->src), NULL, &table))
1657 new_head = table;
1658 else
1659 new_head = BB_END (e->src);
1660 new_head = NEXT_INSN (new_head);
1661
1662 jump_block = create_basic_block (new_head, NULL, e->src);
1663 jump_block->count = count;
1664
1665 /* Make sure new block ends up in correct hot/cold section. */
1666
1667 BB_COPY_PARTITION (jump_block, e->src);
1668
1669 /* Wire edge in. */
1670 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1671 new_edge->probability = probability;
1672
1673 /* Redirect old edge. */
1674 redirect_edge_pred (e, jump_block);
1675 e->probability = profile_probability::always ();
1676
1677 /* If e->src was previously region crossing, it no longer is
1678 and the reg crossing note should be removed. */
1679 fixup_partition_crossing (new_edge);
1680
1681 /* If asm goto has any label refs to target's label,
1682 add also edge from asm goto bb to target. */
1683 if (asm_goto_edge)
1684 {
1685 new_edge->probability = new_edge->probability.apply_scale (1, 2);
1686 jump_block->count = jump_block->count.apply_scale (1, 2);
1687 edge new_edge2 = make_edge (new_edge->src, target,
1688 e->flags & ~EDGE_FALLTHRU);
1689 new_edge2->probability = probability - new_edge->probability;
1690 }
1691
1692 new_bb = jump_block;
1693 }
1694 else
1695 jump_block = e->src;
1696
1697 loc = e->goto_locus;
1698 e->flags &= ~EDGE_FALLTHRU;
1699 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1700 {
1701 if (jump_label == ret_rtx)
1702 emit_jump_insn_after_setloc (targetm.gen_return (),
1703 BB_END (jump_block), loc);
1704 else
1705 {
1706 gcc_assert (jump_label == simple_return_rtx);
1707 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1708 BB_END (jump_block), loc);
1709 }
1710 set_return_jump_label (BB_END (jump_block));
1711 }
1712 else
1713 {
1714 rtx_code_label *label = block_label (target);
1715 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1716 BB_END (jump_block), loc);
1717 JUMP_LABEL (BB_END (jump_block)) = label;
1718 LABEL_NUSES (label)++;
1719 }
1720
1721 /* We might be in cfg layout mode, and if so, the following routine will
1722 insert the barrier correctly. */
1723 emit_barrier_after_bb (jump_block);
1724 redirect_edge_succ_nodup (e, target);
1725
1726 if (abnormal_edge_flags)
1727 make_edge (src, target, abnormal_edge_flags);
1728
1729 df_mark_solutions_dirty ();
1730 fixup_partition_crossing (e);
1731 return new_bb;
1732 }
1733
1734 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1735 (and possibly create new basic block) to make edge non-fallthru.
1736 Return newly created BB or NULL if none. */
1737
1738 static basic_block
rtl_force_nonfallthru(edge e)1739 rtl_force_nonfallthru (edge e)
1740 {
1741 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1742 }
1743
1744 /* Redirect edge even at the expense of creating new jump insn or
1745 basic block. Return new basic block if created, NULL otherwise.
1746 Conversion must be possible. */
1747
1748 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1749 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1750 {
1751 if (redirect_edge_and_branch (e, target)
1752 || e->dest == target)
1753 return NULL;
1754
1755 /* In case the edge redirection failed, try to force it to be non-fallthru
1756 and redirect newly created simplejump. */
1757 df_set_bb_dirty (e->src);
1758 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1759 }
1760
1761 /* The given edge should potentially be a fallthru edge. If that is in
1762 fact true, delete the jump and barriers that are in the way. */
1763
1764 static void
rtl_tidy_fallthru_edge(edge e)1765 rtl_tidy_fallthru_edge (edge e)
1766 {
1767 rtx_insn *q;
1768 basic_block b = e->src, c = b->next_bb;
1769
1770 /* ??? In a late-running flow pass, other folks may have deleted basic
1771 blocks by nopping out blocks, leaving multiple BARRIERs between here
1772 and the target label. They ought to be chastised and fixed.
1773
1774 We can also wind up with a sequence of undeletable labels between
1775 one block and the next.
1776
1777 So search through a sequence of barriers, labels, and notes for
1778 the head of block C and assert that we really do fall through. */
1779
1780 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1781 if (NONDEBUG_INSN_P (q))
1782 return;
1783
1784 /* Remove what will soon cease being the jump insn from the source block.
1785 If block B consisted only of this single jump, turn it into a deleted
1786 note. */
1787 q = BB_END (b);
1788 if (JUMP_P (q)
1789 && onlyjump_p (q)
1790 && (any_uncondjump_p (q)
1791 || single_succ_p (b)))
1792 {
1793 rtx_insn *label;
1794 rtx_jump_table_data *table;
1795
1796 if (tablejump_p (q, &label, &table))
1797 {
1798 /* The label is likely mentioned in some instruction before
1799 the tablejump and might not be DCEd, so turn it into
1800 a note instead and move before the tablejump that is going to
1801 be deleted. */
1802 const char *name = LABEL_NAME (label);
1803 PUT_CODE (label, NOTE);
1804 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1805 NOTE_DELETED_LABEL_NAME (label) = name;
1806 reorder_insns (label, label, PREV_INSN (q));
1807 delete_insn (table);
1808 }
1809
1810 /* If this was a conditional jump, we need to also delete
1811 the insn that set cc0. */
1812 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1813 q = PREV_INSN (q);
1814
1815 q = PREV_INSN (q);
1816 }
1817 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1818 together with the barrier) should never have a fallthru edge. */
1819 else if (JUMP_P (q) && any_uncondjump_p (q))
1820 return;
1821
1822 /* Selectively unlink the sequence. */
1823 if (q != PREV_INSN (BB_HEAD (c)))
1824 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1825
1826 e->flags |= EDGE_FALLTHRU;
1827 }
1828
1829 /* Should move basic block BB after basic block AFTER. NIY. */
1830
1831 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1832 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1833 basic_block after ATTRIBUTE_UNUSED)
1834 {
1835 return false;
1836 }
1837
1838 /* Locate the last bb in the same partition as START_BB. */
1839
1840 static basic_block
last_bb_in_partition(basic_block start_bb)1841 last_bb_in_partition (basic_block start_bb)
1842 {
1843 basic_block bb;
1844 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1845 {
1846 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1847 return bb;
1848 }
1849 /* Return bb before the exit block. */
1850 return bb->prev_bb;
1851 }
1852
1853 /* Split a (typically critical) edge. Return the new block.
1854 The edge must not be abnormal.
1855
1856 ??? The code generally expects to be called on critical edges.
1857 The case of a block ending in an unconditional jump to a
1858 block with multiple predecessors is not handled optimally. */
1859
1860 static basic_block
rtl_split_edge(edge edge_in)1861 rtl_split_edge (edge edge_in)
1862 {
1863 basic_block bb, new_bb;
1864 rtx_insn *before;
1865
1866 /* Abnormal edges cannot be split. */
1867 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1868
1869 /* We are going to place the new block in front of edge destination.
1870 Avoid existence of fallthru predecessors. */
1871 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1872 {
1873 edge e = find_fallthru_edge (edge_in->dest->preds);
1874
1875 if (e)
1876 force_nonfallthru (e);
1877 }
1878
1879 /* Create the basic block note. */
1880 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1881 before = BB_HEAD (edge_in->dest);
1882 else
1883 before = NULL;
1884
1885 /* If this is a fall through edge to the exit block, the blocks might be
1886 not adjacent, and the right place is after the source. */
1887 if ((edge_in->flags & EDGE_FALLTHRU)
1888 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1889 {
1890 before = NEXT_INSN (BB_END (edge_in->src));
1891 bb = create_basic_block (before, NULL, edge_in->src);
1892 BB_COPY_PARTITION (bb, edge_in->src);
1893 }
1894 else
1895 {
1896 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1897 {
1898 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1899 BB_COPY_PARTITION (bb, edge_in->dest);
1900 }
1901 else
1902 {
1903 basic_block after = edge_in->dest->prev_bb;
1904 /* If this is post-bb reordering, and the edge crosses a partition
1905 boundary, the new block needs to be inserted in the bb chain
1906 at the end of the src partition (since we put the new bb into
1907 that partition, see below). Otherwise we may end up creating
1908 an extra partition crossing in the chain, which is illegal.
1909 It can't go after the src, because src may have a fall-through
1910 to a different block. */
1911 if (crtl->bb_reorder_complete
1912 && (edge_in->flags & EDGE_CROSSING))
1913 {
1914 after = last_bb_in_partition (edge_in->src);
1915 before = get_last_bb_insn (after);
1916 /* The instruction following the last bb in partition should
1917 be a barrier, since it cannot end in a fall-through. */
1918 gcc_checking_assert (BARRIER_P (before));
1919 before = NEXT_INSN (before);
1920 }
1921 bb = create_basic_block (before, NULL, after);
1922 /* Put the split bb into the src partition, to avoid creating
1923 a situation where a cold bb dominates a hot bb, in the case
1924 where src is cold and dest is hot. The src will dominate
1925 the new bb (whereas it might not have dominated dest). */
1926 BB_COPY_PARTITION (bb, edge_in->src);
1927 }
1928 }
1929
1930 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1931
1932 /* Can't allow a region crossing edge to be fallthrough. */
1933 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1934 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1935 {
1936 new_bb = force_nonfallthru (single_succ_edge (bb));
1937 gcc_assert (!new_bb);
1938 }
1939
1940 /* For non-fallthru edges, we must adjust the predecessor's
1941 jump instruction to target our new block. */
1942 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1943 {
1944 edge redirected = redirect_edge_and_branch (edge_in, bb);
1945 gcc_assert (redirected);
1946 }
1947 else
1948 {
1949 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1950 {
1951 /* For asm goto even splitting of fallthru edge might
1952 need insn patching, as other labels might point to the
1953 old label. */
1954 rtx_insn *last = BB_END (edge_in->src);
1955 if (last
1956 && JUMP_P (last)
1957 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1958 && (extract_asm_operands (PATTERN (last))
1959 || JUMP_LABEL (last) == before)
1960 && patch_jump_insn (last, before, bb))
1961 df_set_bb_dirty (edge_in->src);
1962 }
1963 redirect_edge_succ (edge_in, bb);
1964 }
1965
1966 return bb;
1967 }
1968
1969 /* Queue instructions for insertion on an edge between two basic blocks.
1970 The new instructions and basic blocks (if any) will not appear in the
1971 CFG until commit_edge_insertions is called. */
1972
1973 void
insert_insn_on_edge(rtx pattern,edge e)1974 insert_insn_on_edge (rtx pattern, edge e)
1975 {
1976 /* We cannot insert instructions on an abnormal critical edge.
1977 It will be easier to find the culprit if we die now. */
1978 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1979
1980 if (e->insns.r == NULL_RTX)
1981 start_sequence ();
1982 else
1983 push_to_sequence (e->insns.r);
1984
1985 emit_insn (pattern);
1986
1987 e->insns.r = get_insns ();
1988 end_sequence ();
1989 }
1990
1991 /* Update the CFG for the instructions queued on edge E. */
1992
1993 void
commit_one_edge_insertion(edge e)1994 commit_one_edge_insertion (edge e)
1995 {
1996 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1997 basic_block bb;
1998
1999 /* Pull the insns off the edge now since the edge might go away. */
2000 insns = e->insns.r;
2001 e->insns.r = NULL;
2002
2003 /* Figure out where to put these insns. If the destination has
2004 one predecessor, insert there. Except for the exit block. */
2005 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2006 {
2007 bb = e->dest;
2008
2009 /* Get the location correct wrt a code label, and "nice" wrt
2010 a basic block note, and before everything else. */
2011 tmp = BB_HEAD (bb);
2012 if (LABEL_P (tmp))
2013 tmp = NEXT_INSN (tmp);
2014 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2015 tmp = NEXT_INSN (tmp);
2016 if (tmp == BB_HEAD (bb))
2017 before = tmp;
2018 else if (tmp)
2019 after = PREV_INSN (tmp);
2020 else
2021 after = get_last_insn ();
2022 }
2023
2024 /* If the source has one successor and the edge is not abnormal,
2025 insert there. Except for the entry block.
2026 Don't do this if the predecessor ends in a jump other than
2027 unconditional simple jump. E.g. for asm goto that points all
2028 its labels at the fallthru basic block, we can't insert instructions
2029 before the asm goto, as the asm goto can have various of side effects,
2030 and can't emit instructions after the asm goto, as it must end
2031 the basic block. */
2032 else if ((e->flags & EDGE_ABNORMAL) == 0
2033 && single_succ_p (e->src)
2034 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2035 && (!JUMP_P (BB_END (e->src))
2036 || simplejump_p (BB_END (e->src))))
2037 {
2038 bb = e->src;
2039
2040 /* It is possible to have a non-simple jump here. Consider a target
2041 where some forms of unconditional jumps clobber a register. This
2042 happens on the fr30 for example.
2043
2044 We know this block has a single successor, so we can just emit
2045 the queued insns before the jump. */
2046 if (JUMP_P (BB_END (bb)))
2047 before = BB_END (bb);
2048 else
2049 {
2050 /* We'd better be fallthru, or we've lost track of what's what. */
2051 gcc_assert (e->flags & EDGE_FALLTHRU);
2052
2053 after = BB_END (bb);
2054 }
2055 }
2056
2057 /* Otherwise we must split the edge. */
2058 else
2059 {
2060 bb = split_edge (e);
2061
2062 /* If E crossed a partition boundary, we needed to make bb end in
2063 a region-crossing jump, even though it was originally fallthru. */
2064 if (JUMP_P (BB_END (bb)))
2065 before = BB_END (bb);
2066 else
2067 after = BB_END (bb);
2068 }
2069
2070 /* Now that we've found the spot, do the insertion. */
2071 if (before)
2072 {
2073 emit_insn_before_noloc (insns, before, bb);
2074 last = prev_nonnote_insn (before);
2075 }
2076 else
2077 last = emit_insn_after_noloc (insns, after, bb);
2078
2079 if (returnjump_p (last))
2080 {
2081 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2082 This is not currently a problem because this only happens
2083 for the (single) epilogue, which already has a fallthru edge
2084 to EXIT. */
2085
2086 e = single_succ_edge (bb);
2087 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2088 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2089
2090 e->flags &= ~EDGE_FALLTHRU;
2091 emit_barrier_after (last);
2092
2093 if (before)
2094 delete_insn (before);
2095 }
2096 else
2097 gcc_assert (!JUMP_P (last));
2098 }
2099
2100 /* Update the CFG for all queued instructions. */
2101
2102 void
commit_edge_insertions(void)2103 commit_edge_insertions (void)
2104 {
2105 basic_block bb;
2106
2107 /* Optimization passes that invoke this routine can cause hot blocks
2108 previously reached by both hot and cold blocks to become dominated only
2109 by cold blocks. This will cause the verification below to fail,
2110 and lead to now cold code in the hot section. In some cases this
2111 may only be visible after newly unreachable blocks are deleted,
2112 which will be done by fixup_partitions. */
2113 fixup_partitions ();
2114
2115 if (!currently_expanding_to_rtl)
2116 checking_verify_flow_info ();
2117
2118 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2119 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2120 {
2121 edge e;
2122 edge_iterator ei;
2123
2124 FOR_EACH_EDGE (e, ei, bb->succs)
2125 if (e->insns.r)
2126 {
2127 if (currently_expanding_to_rtl)
2128 rebuild_jump_labels_chain (e->insns.r);
2129 commit_one_edge_insertion (e);
2130 }
2131 }
2132 }
2133
2134
2135 /* Print out RTL-specific basic block information (live information
2136 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2137 documented in dumpfile.h. */
2138
2139 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2140 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2141 {
2142 char *s_indent;
2143
2144 s_indent = (char *) alloca ((size_t) indent + 1);
2145 memset (s_indent, ' ', (size_t) indent);
2146 s_indent[indent] = '\0';
2147
2148 if (df && (flags & TDF_DETAILS))
2149 {
2150 df_dump_top (bb, outf);
2151 putc ('\n', outf);
2152 }
2153
2154 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2155 {
2156 rtx_insn *last = BB_END (bb);
2157 if (last)
2158 last = NEXT_INSN (last);
2159 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2160 {
2161 if (flags & TDF_DETAILS)
2162 df_dump_insn_top (insn, outf);
2163 if (! (flags & TDF_SLIM))
2164 print_rtl_single (outf, insn);
2165 else
2166 dump_insn_slim (outf, insn);
2167 if (flags & TDF_DETAILS)
2168 df_dump_insn_bottom (insn, outf);
2169 }
2170 }
2171
2172 if (df && (flags & TDF_DETAILS))
2173 {
2174 df_dump_bottom (bb, outf);
2175 putc ('\n', outf);
2176 }
2177
2178 }
2179
2180 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2181 for the start of each basic block. FLAGS are the TDF_* masks documented
2182 in dumpfile.h. */
2183
2184 void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2185 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2186 {
2187 const rtx_insn *tmp_rtx;
2188 if (rtx_first == 0)
2189 fprintf (outf, "(nil)\n");
2190 else
2191 {
2192 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2193 int max_uid = get_max_uid ();
2194 basic_block *start = XCNEWVEC (basic_block, max_uid);
2195 basic_block *end = XCNEWVEC (basic_block, max_uid);
2196 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2197 basic_block bb;
2198
2199 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2200 insns, but the CFG is not maintained so the basic block info
2201 is not reliable. Therefore it's omitted from the dumps. */
2202 if (! (cfun->curr_properties & PROP_cfg))
2203 flags &= ~TDF_BLOCKS;
2204
2205 if (df)
2206 df_dump_start (outf);
2207
2208 if (cfun->curr_properties & PROP_cfg)
2209 {
2210 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 {
2212 rtx_insn *x;
2213
2214 start[INSN_UID (BB_HEAD (bb))] = bb;
2215 end[INSN_UID (BB_END (bb))] = bb;
2216 if (flags & TDF_BLOCKS)
2217 {
2218 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2219 {
2220 enum bb_state state = IN_MULTIPLE_BB;
2221
2222 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2223 state = IN_ONE_BB;
2224 in_bb_p[INSN_UID (x)] = state;
2225
2226 if (x == BB_END (bb))
2227 break;
2228 }
2229 }
2230 }
2231 }
2232
2233 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2234 {
2235 if (flags & TDF_BLOCKS)
2236 {
2237 bb = start[INSN_UID (tmp_rtx)];
2238 if (bb != NULL)
2239 {
2240 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2241 if (df && (flags & TDF_DETAILS))
2242 df_dump_top (bb, outf);
2243 }
2244
2245 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2246 && !NOTE_P (tmp_rtx)
2247 && !BARRIER_P (tmp_rtx))
2248 fprintf (outf, ";; Insn is not within a basic block\n");
2249 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2250 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2251 }
2252
2253 if (flags & TDF_DETAILS)
2254 df_dump_insn_top (tmp_rtx, outf);
2255 if (! (flags & TDF_SLIM))
2256 print_rtl_single (outf, tmp_rtx);
2257 else
2258 dump_insn_slim (outf, tmp_rtx);
2259 if (flags & TDF_DETAILS)
2260 df_dump_insn_bottom (tmp_rtx, outf);
2261
2262 bb = end[INSN_UID (tmp_rtx)];
2263 if (bb != NULL)
2264 {
2265 if (flags & TDF_BLOCKS)
2266 {
2267 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2268 if (df && (flags & TDF_DETAILS))
2269 df_dump_bottom (bb, outf);
2270 putc ('\n', outf);
2271 }
2272 /* Emit a hint if the fallthrough target of current basic block
2273 isn't the one placed right next. */
2274 else if (EDGE_COUNT (bb->succs) > 0)
2275 {
2276 gcc_assert (BB_END (bb) == tmp_rtx);
2277 const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2278 /* Bypass intervening deleted-insn notes and debug insns. */
2279 while (ninsn
2280 && !NONDEBUG_INSN_P (ninsn)
2281 && !start[INSN_UID (ninsn)])
2282 ninsn = NEXT_INSN (ninsn);
2283 edge e = find_fallthru_edge (bb->succs);
2284 if (e && ninsn)
2285 {
2286 basic_block dest = e->dest;
2287 if (start[INSN_UID (ninsn)] != dest)
2288 fprintf (outf, "%s ; pc falls through to BB %d\n",
2289 print_rtx_head, dest->index);
2290 }
2291 }
2292 }
2293 }
2294
2295 free (start);
2296 free (end);
2297 free (in_bb_p);
2298 }
2299 }
2300
2301 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2302
2303 void
update_br_prob_note(basic_block bb)2304 update_br_prob_note (basic_block bb)
2305 {
2306 rtx note;
2307 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2308 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309 {
2310 if (note)
2311 {
2312 rtx *note_link, this_rtx;
2313
2314 note_link = ®_NOTES (BB_END (bb));
2315 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2316 if (this_rtx == note)
2317 {
2318 *note_link = XEXP (this_rtx, 1);
2319 break;
2320 }
2321 }
2322 return;
2323 }
2324 if (!note
2325 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2326 return;
2327 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2328 }
2329
2330 /* Get the last insn associated with block BB (that includes barriers and
2331 tablejumps after BB). */
2332 rtx_insn *
get_last_bb_insn(basic_block bb)2333 get_last_bb_insn (basic_block bb)
2334 {
2335 rtx_jump_table_data *table;
2336 rtx_insn *tmp;
2337 rtx_insn *end = BB_END (bb);
2338
2339 /* Include any jump table following the basic block. */
2340 if (tablejump_p (end, NULL, &table))
2341 end = table;
2342
2343 /* Include any barriers that may follow the basic block. */
2344 tmp = next_nonnote_nondebug_insn_bb (end);
2345 while (tmp && BARRIER_P (tmp))
2346 {
2347 end = tmp;
2348 tmp = next_nonnote_nondebug_insn_bb (end);
2349 }
2350
2351 return end;
2352 }
2353
2354 /* Add all BBs reachable from entry via hot paths into the SET. */
2355
2356 void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2357 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358 {
2359 auto_vec<basic_block, 64> worklist;
2360
2361 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2362 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363
2364 while (worklist.length () > 0)
2365 {
2366 basic_block bb = worklist.pop ();
2367 edge_iterator ei;
2368 edge e;
2369
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2372 && !set->add (e->dest))
2373 worklist.safe_push (e->dest);
2374 }
2375 }
2376
2377 /* Sanity check partition hotness to ensure that basic blocks in
2378 the cold partition don't dominate basic blocks in the hot partition.
2379 If FLAG_ONLY is true, report violations as errors. Otherwise
2380 re-mark the dominated blocks as cold, since this is run after
2381 cfg optimizations that may make hot blocks previously reached
2382 by both hot and cold blocks now only reachable along cold paths. */
2383
2384 static vec<basic_block>
find_partition_fixes(bool flag_only)2385 find_partition_fixes (bool flag_only)
2386 {
2387 basic_block bb;
2388 vec<basic_block> bbs_to_fix = vNULL;
2389 hash_set<basic_block> set;
2390
2391 /* Callers check this. */
2392 gcc_checking_assert (crtl->has_bb_partition);
2393
2394 find_bbs_reachable_by_hot_paths (&set);
2395
2396 FOR_EACH_BB_FN (bb, cfun)
2397 if (!set.contains (bb)
2398 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2399 {
2400 if (flag_only)
2401 error ("non-cold basic block %d reachable only "
2402 "by paths crossing the cold partition", bb->index);
2403 else
2404 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2405 bbs_to_fix.safe_push (bb);
2406 }
2407
2408 return bbs_to_fix;
2409 }
2410
2411 /* Perform cleanup on the hot/cold bb partitioning after optimization
2412 passes that modify the cfg. */
2413
2414 void
fixup_partitions(void)2415 fixup_partitions (void)
2416 {
2417 if (!crtl->has_bb_partition)
2418 return;
2419
2420 /* Delete any blocks that became unreachable and weren't
2421 already cleaned up, for example during edge forwarding
2422 and convert_jumps_to_returns. This will expose more
2423 opportunities for fixing the partition boundaries here.
2424 Also, the calculation of the dominance graph during verification
2425 will assert if there are unreachable nodes. */
2426 delete_unreachable_blocks ();
2427
2428 /* If there are partitions, do a sanity check on them: A basic block in
2429 a cold partition cannot dominate a basic block in a hot partition.
2430 Fixup any that now violate this requirement, as a result of edge
2431 forwarding and unreachable block deletion. */
2432 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2433
2434 /* Do the partition fixup after all necessary blocks have been converted to
2435 cold, so that we only update the region crossings the minimum number of
2436 places, which can require forcing edges to be non fallthru. */
2437 if (! bbs_to_fix.is_empty ())
2438 {
2439 do
2440 {
2441 basic_block bb = bbs_to_fix.pop ();
2442 fixup_new_cold_bb (bb);
2443 }
2444 while (! bbs_to_fix.is_empty ());
2445
2446 /* Fix up hot cold block grouping if needed. */
2447 if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2448 {
2449 basic_block bb, first = NULL, second = NULL;
2450 int current_partition = BB_UNPARTITIONED;
2451
2452 FOR_EACH_BB_FN (bb, cfun)
2453 {
2454 if (current_partition != BB_UNPARTITIONED
2455 && BB_PARTITION (bb) != current_partition)
2456 {
2457 if (first == NULL)
2458 first = bb;
2459 else if (second == NULL)
2460 second = bb;
2461 else
2462 {
2463 /* If we switch partitions for the 3rd, 5th etc. time,
2464 move bbs first (inclusive) .. second (exclusive) right
2465 before bb. */
2466 basic_block prev_first = first->prev_bb;
2467 basic_block prev_second = second->prev_bb;
2468 basic_block prev_bb = bb->prev_bb;
2469 prev_first->next_bb = second;
2470 second->prev_bb = prev_first;
2471 prev_second->next_bb = bb;
2472 bb->prev_bb = prev_second;
2473 prev_bb->next_bb = first;
2474 first->prev_bb = prev_bb;
2475 rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2476 rtx_insn *prev_second_insn
2477 = PREV_INSN (BB_HEAD (second));
2478 rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2479 SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2480 SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2481 SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2482 SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2483 SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2484 SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2485 second = NULL;
2486 }
2487 }
2488 current_partition = BB_PARTITION (bb);
2489 }
2490 gcc_assert (!second);
2491 }
2492 }
2493 }
2494
2495 /* Verify, in the basic block chain, that there is at most one switch
2496 between hot/cold partitions. This condition will not be true until
2497 after reorder_basic_blocks is called. */
2498
2499 static int
verify_hot_cold_block_grouping(void)2500 verify_hot_cold_block_grouping (void)
2501 {
2502 basic_block bb;
2503 int err = 0;
2504 bool switched_sections = false;
2505 int current_partition = BB_UNPARTITIONED;
2506
2507 /* Even after bb reordering is complete, we go into cfglayout mode
2508 again (in compgoto). Ensure we don't call this before going back
2509 into linearized RTL when any layout fixes would have been committed. */
2510 if (!crtl->bb_reorder_complete
2511 || current_ir_type () != IR_RTL_CFGRTL)
2512 return err;
2513
2514 FOR_EACH_BB_FN (bb, cfun)
2515 {
2516 if (current_partition != BB_UNPARTITIONED
2517 && BB_PARTITION (bb) != current_partition)
2518 {
2519 if (switched_sections)
2520 {
2521 error ("multiple hot/cold transitions found (bb %i)",
2522 bb->index);
2523 err = 1;
2524 }
2525 else
2526 switched_sections = true;
2527
2528 if (!crtl->has_bb_partition)
2529 error ("partition found but function partition flag not set");
2530 }
2531 current_partition = BB_PARTITION (bb);
2532 }
2533
2534 return err;
2535 }
2536
2537
2538 /* Perform several checks on the edges out of each block, such as
2539 the consistency of the branch probabilities, the correctness
2540 of hot/cold partition crossing edges, and the number of expected
2541 successor edges. Also verify that the dominance relationship
2542 between hot/cold blocks is sane. */
2543
2544 static int
rtl_verify_edges(void)2545 rtl_verify_edges (void)
2546 {
2547 int err = 0;
2548 basic_block bb;
2549
2550 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2551 {
2552 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2553 int n_eh = 0, n_abnormal = 0;
2554 edge e, fallthru = NULL;
2555 edge_iterator ei;
2556 rtx note;
2557 bool has_crossing_edge = false;
2558
2559 if (JUMP_P (BB_END (bb))
2560 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2561 && EDGE_COUNT (bb->succs) >= 2
2562 && any_condjump_p (BB_END (bb)))
2563 {
2564 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2565 {
2566 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2567 {
2568 error ("verify_flow_info: "
2569 "REG_BR_PROB is set but cfg probability is not");
2570 err = 1;
2571 }
2572 }
2573 else if (XINT (note, 0)
2574 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2575 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2576 {
2577 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2578 XINT (note, 0),
2579 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2580 err = 1;
2581 }
2582 }
2583
2584 FOR_EACH_EDGE (e, ei, bb->succs)
2585 {
2586 bool is_crossing;
2587
2588 if (e->flags & EDGE_FALLTHRU)
2589 n_fallthru++, fallthru = e;
2590
2591 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2592 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2593 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2594 has_crossing_edge |= is_crossing;
2595 if (e->flags & EDGE_CROSSING)
2596 {
2597 if (!is_crossing)
2598 {
2599 error ("EDGE_CROSSING incorrectly set across same section");
2600 err = 1;
2601 }
2602 if (e->flags & EDGE_FALLTHRU)
2603 {
2604 error ("fallthru edge crosses section boundary in bb %i",
2605 e->src->index);
2606 err = 1;
2607 }
2608 if (e->flags & EDGE_EH)
2609 {
2610 error ("EH edge crosses section boundary in bb %i",
2611 e->src->index);
2612 err = 1;
2613 }
2614 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2615 {
2616 error ("No region crossing jump at section boundary in bb %i",
2617 bb->index);
2618 err = 1;
2619 }
2620 }
2621 else if (is_crossing)
2622 {
2623 error ("EDGE_CROSSING missing across section boundary");
2624 err = 1;
2625 }
2626
2627 if ((e->flags & ~(EDGE_DFS_BACK
2628 | EDGE_CAN_FALLTHRU
2629 | EDGE_IRREDUCIBLE_LOOP
2630 | EDGE_LOOP_EXIT
2631 | EDGE_CROSSING
2632 | EDGE_PRESERVE)) == 0)
2633 n_branch++;
2634
2635 if (e->flags & EDGE_ABNORMAL_CALL)
2636 n_abnormal_call++;
2637
2638 if (e->flags & EDGE_SIBCALL)
2639 n_sibcall++;
2640
2641 if (e->flags & EDGE_EH)
2642 n_eh++;
2643
2644 if (e->flags & EDGE_ABNORMAL)
2645 n_abnormal++;
2646 }
2647
2648 if (!has_crossing_edge
2649 && JUMP_P (BB_END (bb))
2650 && CROSSING_JUMP_P (BB_END (bb)))
2651 {
2652 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2653 error ("Region crossing jump across same section in bb %i",
2654 bb->index);
2655 err = 1;
2656 }
2657
2658 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2659 {
2660 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2661 err = 1;
2662 }
2663 if (n_eh > 1)
2664 {
2665 error ("too many exception handling edges in bb %i", bb->index);
2666 err = 1;
2667 }
2668 if (n_branch
2669 && (!JUMP_P (BB_END (bb))
2670 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2671 || any_condjump_p (BB_END (bb))))))
2672 {
2673 error ("too many outgoing branch edges from bb %i", bb->index);
2674 err = 1;
2675 }
2676 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2677 {
2678 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2679 err = 1;
2680 }
2681 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2682 {
2683 error ("wrong number of branch edges after unconditional jump"
2684 " in bb %i", bb->index);
2685 err = 1;
2686 }
2687 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2688 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2689 {
2690 error ("wrong amount of branch edges after conditional jump"
2691 " in bb %i", bb->index);
2692 err = 1;
2693 }
2694 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2695 {
2696 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2697 err = 1;
2698 }
2699 if (n_sibcall && !CALL_P (BB_END (bb)))
2700 {
2701 error ("sibcall edges for non-call insn in bb %i", bb->index);
2702 err = 1;
2703 }
2704 if (n_abnormal > n_eh
2705 && !(CALL_P (BB_END (bb))
2706 && n_abnormal == n_abnormal_call + n_sibcall)
2707 && (!JUMP_P (BB_END (bb))
2708 || any_condjump_p (BB_END (bb))
2709 || any_uncondjump_p (BB_END (bb))))
2710 {
2711 error ("abnormal edges for no purpose in bb %i", bb->index);
2712 err = 1;
2713 }
2714
2715 int has_eh = -1;
2716 FOR_EACH_EDGE (e, ei, bb->preds)
2717 {
2718 if (has_eh == -1)
2719 has_eh = (e->flags & EDGE_EH);
2720 if ((e->flags & EDGE_EH) == has_eh)
2721 continue;
2722 error ("EH incoming edge mixed with non-EH incoming edges "
2723 "in bb %i", bb->index);
2724 err = 1;
2725 break;
2726 }
2727 }
2728
2729 /* If there are partitions, do a sanity check on them: A basic block in
2730 a cold partition cannot dominate a basic block in a hot partition. */
2731 if (crtl->has_bb_partition && !err
2732 && current_ir_type () == IR_RTL_CFGLAYOUT)
2733 {
2734 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2735 err = !bbs_to_fix.is_empty ();
2736 }
2737
2738 /* Clean up. */
2739 return err;
2740 }
2741
2742 /* Checks on the instructions within blocks. Currently checks that each
2743 block starts with a basic block note, and that basic block notes and
2744 control flow jumps are not found in the middle of the block. */
2745
2746 static int
rtl_verify_bb_insns(void)2747 rtl_verify_bb_insns (void)
2748 {
2749 rtx_insn *x;
2750 int err = 0;
2751 basic_block bb;
2752
2753 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2754 {
2755 /* Now check the header of basic
2756 block. It ought to contain optional CODE_LABEL followed
2757 by NOTE_BASIC_BLOCK. */
2758 x = BB_HEAD (bb);
2759 if (LABEL_P (x))
2760 {
2761 if (BB_END (bb) == x)
2762 {
2763 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2764 bb->index);
2765 err = 1;
2766 }
2767
2768 x = NEXT_INSN (x);
2769 }
2770
2771 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2772 {
2773 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2774 bb->index);
2775 err = 1;
2776 }
2777
2778 if (BB_END (bb) == x)
2779 /* Do checks for empty blocks here. */
2780 ;
2781 else
2782 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2783 {
2784 if (NOTE_INSN_BASIC_BLOCK_P (x))
2785 {
2786 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2787 INSN_UID (x), bb->index);
2788 err = 1;
2789 }
2790
2791 if (x == BB_END (bb))
2792 break;
2793
2794 if (control_flow_insn_p (x))
2795 {
2796 error ("in basic block %d:", bb->index);
2797 fatal_insn ("flow control insn inside a basic block", x);
2798 }
2799 }
2800 }
2801
2802 /* Clean up. */
2803 return err;
2804 }
2805
2806 /* Verify that block pointers for instructions in basic blocks, headers and
2807 footers are set appropriately. */
2808
2809 static int
rtl_verify_bb_pointers(void)2810 rtl_verify_bb_pointers (void)
2811 {
2812 int err = 0;
2813 basic_block bb;
2814
2815 /* Check the general integrity of the basic blocks. */
2816 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2817 {
2818 rtx_insn *insn;
2819
2820 if (!(bb->flags & BB_RTL))
2821 {
2822 error ("BB_RTL flag not set for block %d", bb->index);
2823 err = 1;
2824 }
2825
2826 FOR_BB_INSNS (bb, insn)
2827 if (BLOCK_FOR_INSN (insn) != bb)
2828 {
2829 error ("insn %d basic block pointer is %d, should be %d",
2830 INSN_UID (insn),
2831 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2832 bb->index);
2833 err = 1;
2834 }
2835
2836 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2837 if (!BARRIER_P (insn)
2838 && BLOCK_FOR_INSN (insn) != NULL)
2839 {
2840 error ("insn %d in header of bb %d has non-NULL basic block",
2841 INSN_UID (insn), bb->index);
2842 err = 1;
2843 }
2844 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2845 if (!BARRIER_P (insn)
2846 && BLOCK_FOR_INSN (insn) != NULL)
2847 {
2848 error ("insn %d in footer of bb %d has non-NULL basic block",
2849 INSN_UID (insn), bb->index);
2850 err = 1;
2851 }
2852 }
2853
2854 /* Clean up. */
2855 return err;
2856 }
2857
2858 /* Verify the CFG and RTL consistency common for both underlying RTL and
2859 cfglayout RTL.
2860
2861 Currently it does following checks:
2862
2863 - overlapping of basic blocks
2864 - insns with wrong BLOCK_FOR_INSN pointers
2865 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2866 - tails of basic blocks (ensure that boundary is necessary)
2867 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2868 and NOTE_INSN_BASIC_BLOCK
2869 - verify that no fall_thru edge crosses hot/cold partition boundaries
2870 - verify that there are no pending RTL branch predictions
2871 - verify that hot blocks are not dominated by cold blocks
2872
2873 In future it can be extended check a lot of other stuff as well
2874 (reachability of basic blocks, life information, etc. etc.). */
2875
2876 static int
rtl_verify_flow_info_1(void)2877 rtl_verify_flow_info_1 (void)
2878 {
2879 int err = 0;
2880
2881 err |= rtl_verify_bb_pointers ();
2882
2883 err |= rtl_verify_bb_insns ();
2884
2885 err |= rtl_verify_edges ();
2886
2887 return err;
2888 }
2889
2890 /* Walk the instruction chain and verify that bb head/end pointers
2891 are correct, and that instructions are in exactly one bb and have
2892 correct block pointers. */
2893
2894 static int
rtl_verify_bb_insn_chain(void)2895 rtl_verify_bb_insn_chain (void)
2896 {
2897 basic_block bb;
2898 int err = 0;
2899 rtx_insn *x;
2900 rtx_insn *last_head = get_last_insn ();
2901 basic_block *bb_info;
2902 const int max_uid = get_max_uid ();
2903
2904 bb_info = XCNEWVEC (basic_block, max_uid);
2905
2906 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2907 {
2908 rtx_insn *head = BB_HEAD (bb);
2909 rtx_insn *end = BB_END (bb);
2910
2911 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2912 {
2913 /* Verify the end of the basic block is in the INSN chain. */
2914 if (x == end)
2915 break;
2916
2917 /* And that the code outside of basic blocks has NULL bb field. */
2918 if (!BARRIER_P (x)
2919 && BLOCK_FOR_INSN (x) != NULL)
2920 {
2921 error ("insn %d outside of basic blocks has non-NULL bb field",
2922 INSN_UID (x));
2923 err = 1;
2924 }
2925 }
2926
2927 if (!x)
2928 {
2929 error ("end insn %d for block %d not found in the insn stream",
2930 INSN_UID (end), bb->index);
2931 err = 1;
2932 }
2933
2934 /* Work backwards from the end to the head of the basic block
2935 to verify the head is in the RTL chain. */
2936 for (; x != NULL_RTX; x = PREV_INSN (x))
2937 {
2938 /* While walking over the insn chain, verify insns appear
2939 in only one basic block. */
2940 if (bb_info[INSN_UID (x)] != NULL)
2941 {
2942 error ("insn %d is in multiple basic blocks (%d and %d)",
2943 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2944 err = 1;
2945 }
2946
2947 bb_info[INSN_UID (x)] = bb;
2948
2949 if (x == head)
2950 break;
2951 }
2952 if (!x)
2953 {
2954 error ("head insn %d for block %d not found in the insn stream",
2955 INSN_UID (head), bb->index);
2956 err = 1;
2957 }
2958
2959 last_head = PREV_INSN (x);
2960 }
2961
2962 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2963 {
2964 /* Check that the code before the first basic block has NULL
2965 bb field. */
2966 if (!BARRIER_P (x)
2967 && BLOCK_FOR_INSN (x) != NULL)
2968 {
2969 error ("insn %d outside of basic blocks has non-NULL bb field",
2970 INSN_UID (x));
2971 err = 1;
2972 }
2973 }
2974 free (bb_info);
2975
2976 return err;
2977 }
2978
2979 /* Verify that fallthru edges point to adjacent blocks in layout order and
2980 that barriers exist after non-fallthru blocks. */
2981
2982 static int
rtl_verify_fallthru(void)2983 rtl_verify_fallthru (void)
2984 {
2985 basic_block bb;
2986 int err = 0;
2987
2988 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2989 {
2990 edge e;
2991
2992 e = find_fallthru_edge (bb->succs);
2993 if (!e)
2994 {
2995 rtx_insn *insn;
2996
2997 /* Ensure existence of barrier in BB with no fallthru edges. */
2998 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2999 {
3000 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3001 {
3002 error ("missing barrier after block %i", bb->index);
3003 err = 1;
3004 break;
3005 }
3006 if (BARRIER_P (insn))
3007 break;
3008 }
3009 }
3010 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3011 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3012 {
3013 rtx_insn *insn;
3014
3015 if (e->src->next_bb != e->dest)
3016 {
3017 error
3018 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3019 e->src->index, e->dest->index);
3020 err = 1;
3021 }
3022 else
3023 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3024 insn = NEXT_INSN (insn))
3025 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3026 {
3027 error ("verify_flow_info: Incorrect fallthru %i->%i",
3028 e->src->index, e->dest->index);
3029 fatal_insn ("wrong insn in the fallthru edge", insn);
3030 err = 1;
3031 }
3032 }
3033 }
3034
3035 return err;
3036 }
3037
3038 /* Verify that blocks are laid out in consecutive order. While walking the
3039 instructions, verify that all expected instructions are inside the basic
3040 blocks, and that all returns are followed by barriers. */
3041
3042 static int
rtl_verify_bb_layout(void)3043 rtl_verify_bb_layout (void)
3044 {
3045 basic_block bb;
3046 int err = 0;
3047 rtx_insn *x, *y;
3048 int num_bb_notes;
3049 rtx_insn * const rtx_first = get_insns ();
3050 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3051
3052 num_bb_notes = 0;
3053
3054 for (x = rtx_first; x; x = NEXT_INSN (x))
3055 {
3056 if (NOTE_INSN_BASIC_BLOCK_P (x))
3057 {
3058 bb = NOTE_BASIC_BLOCK (x);
3059
3060 num_bb_notes++;
3061 if (bb != last_bb_seen->next_bb)
3062 internal_error ("basic blocks not laid down consecutively");
3063
3064 curr_bb = last_bb_seen = bb;
3065 }
3066
3067 if (!curr_bb)
3068 {
3069 switch (GET_CODE (x))
3070 {
3071 case BARRIER:
3072 case NOTE:
3073 break;
3074
3075 case CODE_LABEL:
3076 /* An ADDR_VEC is placed outside any basic block. */
3077 if (NEXT_INSN (x)
3078 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3079 x = NEXT_INSN (x);
3080
3081 /* But in any case, non-deletable labels can appear anywhere. */
3082 break;
3083
3084 default:
3085 fatal_insn ("insn outside basic block", x);
3086 }
3087 }
3088
3089 if (JUMP_P (x)
3090 && returnjump_p (x) && ! condjump_p (x)
3091 && ! ((y = next_nonnote_nondebug_insn (x))
3092 && BARRIER_P (y)))
3093 fatal_insn ("return not followed by barrier", x);
3094
3095 if (curr_bb && x == BB_END (curr_bb))
3096 curr_bb = NULL;
3097 }
3098
3099 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3100 internal_error
3101 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3102 num_bb_notes, n_basic_blocks_for_fn (cfun));
3103
3104 return err;
3105 }
3106
3107 /* Verify the CFG and RTL consistency common for both underlying RTL and
3108 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3109
3110 Currently it does following checks:
3111 - all checks of rtl_verify_flow_info_1
3112 - test head/end pointers
3113 - check that blocks are laid out in consecutive order
3114 - check that all insns are in the basic blocks
3115 (except the switch handling code, barriers and notes)
3116 - check that all returns are followed by barriers
3117 - check that all fallthru edge points to the adjacent blocks
3118 - verify that there is a single hot/cold partition boundary after bbro */
3119
3120 static int
rtl_verify_flow_info(void)3121 rtl_verify_flow_info (void)
3122 {
3123 int err = 0;
3124
3125 err |= rtl_verify_flow_info_1 ();
3126
3127 err |= rtl_verify_bb_insn_chain ();
3128
3129 err |= rtl_verify_fallthru ();
3130
3131 err |= rtl_verify_bb_layout ();
3132
3133 err |= verify_hot_cold_block_grouping ();
3134
3135 return err;
3136 }
3137
3138 /* Assume that the preceding pass has possibly eliminated jump instructions
3139 or converted the unconditional jumps. Eliminate the edges from CFG.
3140 Return true if any edges are eliminated. */
3141
3142 bool
purge_dead_edges(basic_block bb)3143 purge_dead_edges (basic_block bb)
3144 {
3145 edge e;
3146 rtx_insn *insn = BB_END (bb);
3147 rtx note;
3148 bool purged = false;
3149 bool found;
3150 edge_iterator ei;
3151
3152 if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3153 do
3154 insn = PREV_INSN (insn);
3155 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3156
3157 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3158 if (NONJUMP_INSN_P (insn)
3159 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3160 {
3161 rtx eqnote;
3162
3163 if (! may_trap_p (PATTERN (insn))
3164 || ((eqnote = find_reg_equal_equiv_note (insn))
3165 && ! may_trap_p (XEXP (eqnote, 0))))
3166 remove_note (insn, note);
3167 }
3168
3169 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3170 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3171 {
3172 bool remove = false;
3173
3174 /* There are three types of edges we need to handle correctly here: EH
3175 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3176 latter can appear when nonlocal gotos are used. */
3177 if (e->flags & EDGE_ABNORMAL_CALL)
3178 {
3179 if (!CALL_P (insn))
3180 remove = true;
3181 else if (can_nonlocal_goto (insn))
3182 ;
3183 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3184 ;
3185 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3186 ;
3187 else
3188 remove = true;
3189 }
3190 else if (e->flags & EDGE_EH)
3191 remove = !can_throw_internal (insn);
3192
3193 if (remove)
3194 {
3195 remove_edge (e);
3196 df_set_bb_dirty (bb);
3197 purged = true;
3198 }
3199 else
3200 ei_next (&ei);
3201 }
3202
3203 if (JUMP_P (insn))
3204 {
3205 rtx note;
3206 edge b,f;
3207 edge_iterator ei;
3208
3209 /* We do care only about conditional jumps and simplejumps. */
3210 if (!any_condjump_p (insn)
3211 && !returnjump_p (insn)
3212 && !simplejump_p (insn))
3213 return purged;
3214
3215 /* Branch probability/prediction notes are defined only for
3216 condjumps. We've possibly turned condjump into simplejump. */
3217 if (simplejump_p (insn))
3218 {
3219 note = find_reg_note (insn, REG_BR_PROB, NULL);
3220 if (note)
3221 remove_note (insn, note);
3222 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3223 remove_note (insn, note);
3224 }
3225
3226 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3227 {
3228 /* Avoid abnormal flags to leak from computed jumps turned
3229 into simplejumps. */
3230
3231 e->flags &= ~EDGE_ABNORMAL;
3232
3233 /* See if this edge is one we should keep. */
3234 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3235 /* A conditional jump can fall through into the next
3236 block, so we should keep the edge. */
3237 {
3238 ei_next (&ei);
3239 continue;
3240 }
3241 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3242 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3243 /* If the destination block is the target of the jump,
3244 keep the edge. */
3245 {
3246 ei_next (&ei);
3247 continue;
3248 }
3249 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3250 && returnjump_p (insn))
3251 /* If the destination block is the exit block, and this
3252 instruction is a return, then keep the edge. */
3253 {
3254 ei_next (&ei);
3255 continue;
3256 }
3257 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3258 /* Keep the edges that correspond to exceptions thrown by
3259 this instruction and rematerialize the EDGE_ABNORMAL
3260 flag we just cleared above. */
3261 {
3262 e->flags |= EDGE_ABNORMAL;
3263 ei_next (&ei);
3264 continue;
3265 }
3266
3267 /* We do not need this edge. */
3268 df_set_bb_dirty (bb);
3269 purged = true;
3270 remove_edge (e);
3271 }
3272
3273 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3274 return purged;
3275
3276 if (dump_file)
3277 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3278
3279 if (!optimize)
3280 return purged;
3281
3282 /* Redistribute probabilities. */
3283 if (single_succ_p (bb))
3284 {
3285 single_succ_edge (bb)->probability = profile_probability::always ();
3286 }
3287 else
3288 {
3289 note = find_reg_note (insn, REG_BR_PROB, NULL);
3290 if (!note)
3291 return purged;
3292
3293 b = BRANCH_EDGE (bb);
3294 f = FALLTHRU_EDGE (bb);
3295 b->probability = profile_probability::from_reg_br_prob_note
3296 (XINT (note, 0));
3297 f->probability = b->probability.invert ();
3298 }
3299
3300 return purged;
3301 }
3302 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3303 {
3304 /* First, there should not be any EH or ABCALL edges resulting
3305 from non-local gotos and the like. If there were, we shouldn't
3306 have created the sibcall in the first place. Second, there
3307 should of course never have been a fallthru edge. */
3308 gcc_assert (single_succ_p (bb));
3309 gcc_assert (single_succ_edge (bb)->flags
3310 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3311
3312 return 0;
3313 }
3314
3315 /* If we don't see a jump insn, we don't know exactly why the block would
3316 have been broken at this point. Look for a simple, non-fallthru edge,
3317 as these are only created by conditional branches. If we find such an
3318 edge we know that there used to be a jump here and can then safely
3319 remove all non-fallthru edges. */
3320 found = false;
3321 FOR_EACH_EDGE (e, ei, bb->succs)
3322 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3323 {
3324 found = true;
3325 break;
3326 }
3327
3328 if (!found)
3329 return purged;
3330
3331 /* Remove all but the fake and fallthru edges. The fake edge may be
3332 the only successor for this block in the case of noreturn
3333 calls. */
3334 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3335 {
3336 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3337 {
3338 df_set_bb_dirty (bb);
3339 remove_edge (e);
3340 purged = true;
3341 }
3342 else
3343 ei_next (&ei);
3344 }
3345
3346 gcc_assert (single_succ_p (bb));
3347
3348 single_succ_edge (bb)->probability = profile_probability::always ();
3349
3350 if (dump_file)
3351 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3352 bb->index);
3353 return purged;
3354 }
3355
3356 /* Search all basic blocks for potentially dead edges and purge them. Return
3357 true if some edge has been eliminated. */
3358
3359 bool
purge_all_dead_edges(void)3360 purge_all_dead_edges (void)
3361 {
3362 int purged = false;
3363 basic_block bb;
3364
3365 FOR_EACH_BB_FN (bb, cfun)
3366 {
3367 bool purged_here = purge_dead_edges (bb);
3368
3369 purged |= purged_here;
3370 }
3371
3372 return purged;
3373 }
3374
3375 /* This is used by a few passes that emit some instructions after abnormal
3376 calls, moving the basic block's end, while they in fact do want to emit
3377 them on the fallthru edge. Look for abnormal call edges, find backward
3378 the call in the block and insert the instructions on the edge instead.
3379
3380 Similarly, handle instructions throwing exceptions internally.
3381
3382 Return true when instructions have been found and inserted on edges. */
3383
3384 bool
fixup_abnormal_edges(void)3385 fixup_abnormal_edges (void)
3386 {
3387 bool inserted = false;
3388 basic_block bb;
3389
3390 FOR_EACH_BB_FN (bb, cfun)
3391 {
3392 edge e;
3393 edge_iterator ei;
3394
3395 /* Look for cases we are interested in - calls or instructions causing
3396 exceptions. */
3397 FOR_EACH_EDGE (e, ei, bb->succs)
3398 if ((e->flags & EDGE_ABNORMAL_CALL)
3399 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3400 == (EDGE_ABNORMAL | EDGE_EH)))
3401 break;
3402
3403 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3404 {
3405 rtx_insn *insn;
3406
3407 /* Get past the new insns generated. Allow notes, as the insns
3408 may be already deleted. */
3409 insn = BB_END (bb);
3410 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3411 && !can_throw_internal (insn)
3412 && insn != BB_HEAD (bb))
3413 insn = PREV_INSN (insn);
3414
3415 if (CALL_P (insn) || can_throw_internal (insn))
3416 {
3417 rtx_insn *stop, *next;
3418
3419 e = find_fallthru_edge (bb->succs);
3420
3421 stop = NEXT_INSN (BB_END (bb));
3422 BB_END (bb) = insn;
3423
3424 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3425 {
3426 next = NEXT_INSN (insn);
3427 if (INSN_P (insn))
3428 {
3429 delete_insn (insn);
3430
3431 /* Sometimes there's still the return value USE.
3432 If it's placed after a trapping call (i.e. that
3433 call is the last insn anyway), we have no fallthru
3434 edge. Simply delete this use and don't try to insert
3435 on the non-existent edge.
3436 Similarly, sometimes a call that can throw is
3437 followed in the source with __builtin_unreachable (),
3438 meaning that there is UB if the call returns rather
3439 than throws. If there weren't any instructions
3440 following such calls before, supposedly even the ones
3441 we've deleted aren't significant and can be
3442 removed. */
3443 if (e)
3444 {
3445 /* We're not deleting it, we're moving it. */
3446 insn->set_undeleted ();
3447 SET_PREV_INSN (insn) = NULL_RTX;
3448 SET_NEXT_INSN (insn) = NULL_RTX;
3449
3450 insert_insn_on_edge (insn, e);
3451 inserted = true;
3452 }
3453 }
3454 else if (!BARRIER_P (insn))
3455 set_block_for_insn (insn, NULL);
3456 }
3457 }
3458
3459 /* It may be that we don't find any trapping insn. In this
3460 case we discovered quite late that the insn that had been
3461 marked as can_throw_internal in fact couldn't trap at all.
3462 So we should in fact delete the EH edges out of the block. */
3463 else
3464 purge_dead_edges (bb);
3465 }
3466 }
3467
3468 return inserted;
3469 }
3470
3471 /* Cut the insns from FIRST to LAST out of the insns stream. */
3472
3473 rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)3474 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3475 {
3476 rtx_insn *prevfirst = PREV_INSN (first);
3477 rtx_insn *nextlast = NEXT_INSN (last);
3478
3479 SET_PREV_INSN (first) = NULL;
3480 SET_NEXT_INSN (last) = NULL;
3481 if (prevfirst)
3482 SET_NEXT_INSN (prevfirst) = nextlast;
3483 if (nextlast)
3484 SET_PREV_INSN (nextlast) = prevfirst;
3485 else
3486 set_last_insn (prevfirst);
3487 if (!prevfirst)
3488 set_first_insn (nextlast);
3489 return first;
3490 }
3491
3492 /* Skip over inter-block insns occurring after BB which are typically
3493 associated with BB (e.g., barriers). If there are any such insns,
3494 we return the last one. Otherwise, we return the end of BB. */
3495
3496 static rtx_insn *
skip_insns_after_block(basic_block bb)3497 skip_insns_after_block (basic_block bb)
3498 {
3499 rtx_insn *insn, *last_insn, *next_head, *prev;
3500
3501 next_head = NULL;
3502 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3503 next_head = BB_HEAD (bb->next_bb);
3504
3505 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3506 {
3507 if (insn == next_head)
3508 break;
3509
3510 switch (GET_CODE (insn))
3511 {
3512 case BARRIER:
3513 last_insn = insn;
3514 continue;
3515
3516 case NOTE:
3517 switch (NOTE_KIND (insn))
3518 {
3519 case NOTE_INSN_BLOCK_END:
3520 gcc_unreachable ();
3521 continue;
3522 default:
3523 continue;
3524 break;
3525 }
3526 break;
3527
3528 case CODE_LABEL:
3529 if (NEXT_INSN (insn)
3530 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3531 {
3532 insn = NEXT_INSN (insn);
3533 last_insn = insn;
3534 continue;
3535 }
3536 break;
3537
3538 default:
3539 break;
3540 }
3541
3542 break;
3543 }
3544
3545 /* It is possible to hit contradictory sequence. For instance:
3546
3547 jump_insn
3548 NOTE_INSN_BLOCK_BEG
3549 barrier
3550
3551 Where barrier belongs to jump_insn, but the note does not. This can be
3552 created by removing the basic block originally following
3553 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3554
3555 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3556 {
3557 prev = PREV_INSN (insn);
3558 if (NOTE_P (insn))
3559 switch (NOTE_KIND (insn))
3560 {
3561 case NOTE_INSN_BLOCK_END:
3562 gcc_unreachable ();
3563 break;
3564 case NOTE_INSN_DELETED:
3565 case NOTE_INSN_DELETED_LABEL:
3566 case NOTE_INSN_DELETED_DEBUG_LABEL:
3567 continue;
3568 default:
3569 reorder_insns (insn, insn, last_insn);
3570 }
3571 }
3572
3573 return last_insn;
3574 }
3575
3576 /* Locate or create a label for a given basic block. */
3577
3578 static rtx_insn *
label_for_bb(basic_block bb)3579 label_for_bb (basic_block bb)
3580 {
3581 rtx_insn *label = BB_HEAD (bb);
3582
3583 if (!LABEL_P (label))
3584 {
3585 if (dump_file)
3586 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3587
3588 label = block_label (bb);
3589 }
3590
3591 return label;
3592 }
3593
3594 /* Locate the effective beginning and end of the insn chain for each
3595 block, as defined by skip_insns_after_block above. */
3596
3597 static void
record_effective_endpoints(void)3598 record_effective_endpoints (void)
3599 {
3600 rtx_insn *next_insn;
3601 basic_block bb;
3602 rtx_insn *insn;
3603
3604 for (insn = get_insns ();
3605 insn
3606 && NOTE_P (insn)
3607 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3608 insn = NEXT_INSN (insn))
3609 continue;
3610 /* No basic blocks at all? */
3611 gcc_assert (insn);
3612
3613 if (PREV_INSN (insn))
3614 cfg_layout_function_header =
3615 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3616 else
3617 cfg_layout_function_header = NULL;
3618
3619 next_insn = get_insns ();
3620 FOR_EACH_BB_FN (bb, cfun)
3621 {
3622 rtx_insn *end;
3623
3624 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3625 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3626 PREV_INSN (BB_HEAD (bb)));
3627 end = skip_insns_after_block (bb);
3628 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3629 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3630 next_insn = NEXT_INSN (BB_END (bb));
3631 }
3632
3633 cfg_layout_function_footer = next_insn;
3634 if (cfg_layout_function_footer)
3635 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3636 }
3637
3638 namespace {
3639
3640 const pass_data pass_data_into_cfg_layout_mode =
3641 {
3642 RTL_PASS, /* type */
3643 "into_cfglayout", /* name */
3644 OPTGROUP_NONE, /* optinfo_flags */
3645 TV_CFG, /* tv_id */
3646 0, /* properties_required */
3647 PROP_cfglayout, /* properties_provided */
3648 0, /* properties_destroyed */
3649 0, /* todo_flags_start */
3650 0, /* todo_flags_finish */
3651 };
3652
3653 class pass_into_cfg_layout_mode : public rtl_opt_pass
3654 {
3655 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3656 pass_into_cfg_layout_mode (gcc::context *ctxt)
3657 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3658 {}
3659
3660 /* opt_pass methods: */
execute(function *)3661 virtual unsigned int execute (function *)
3662 {
3663 cfg_layout_initialize (0);
3664 return 0;
3665 }
3666
3667 }; // class pass_into_cfg_layout_mode
3668
3669 } // anon namespace
3670
3671 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3672 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3673 {
3674 return new pass_into_cfg_layout_mode (ctxt);
3675 }
3676
3677 namespace {
3678
3679 const pass_data pass_data_outof_cfg_layout_mode =
3680 {
3681 RTL_PASS, /* type */
3682 "outof_cfglayout", /* name */
3683 OPTGROUP_NONE, /* optinfo_flags */
3684 TV_CFG, /* tv_id */
3685 0, /* properties_required */
3686 0, /* properties_provided */
3687 PROP_cfglayout, /* properties_destroyed */
3688 0, /* todo_flags_start */
3689 0, /* todo_flags_finish */
3690 };
3691
3692 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3693 {
3694 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3695 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3696 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3697 {}
3698
3699 /* opt_pass methods: */
3700 virtual unsigned int execute (function *);
3701
3702 }; // class pass_outof_cfg_layout_mode
3703
3704 unsigned int
execute(function * fun)3705 pass_outof_cfg_layout_mode::execute (function *fun)
3706 {
3707 basic_block bb;
3708
3709 FOR_EACH_BB_FN (bb, fun)
3710 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3711 bb->aux = bb->next_bb;
3712
3713 cfg_layout_finalize ();
3714
3715 return 0;
3716 }
3717
3718 } // anon namespace
3719
3720 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3721 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3722 {
3723 return new pass_outof_cfg_layout_mode (ctxt);
3724 }
3725
3726
3727 /* Link the basic blocks in the correct order, compacting the basic
3728 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3729 function also clears the basic block header and footer fields.
3730
3731 This function is usually called after a pass (e.g. tracer) finishes
3732 some transformations while in cfglayout mode. The required sequence
3733 of the basic blocks is in a linked list along the bb->aux field.
3734 This functions re-links the basic block prev_bb and next_bb pointers
3735 accordingly, and it compacts and renumbers the blocks.
3736
3737 FIXME: This currently works only for RTL, but the only RTL-specific
3738 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3739 to GIMPLE a long time ago, but it doesn't relink the basic block
3740 chain. It could do that (to give better initial RTL) if this function
3741 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3742
3743 void
relink_block_chain(bool stay_in_cfglayout_mode)3744 relink_block_chain (bool stay_in_cfglayout_mode)
3745 {
3746 basic_block bb, prev_bb;
3747 int index;
3748
3749 /* Maybe dump the re-ordered sequence. */
3750 if (dump_file)
3751 {
3752 fprintf (dump_file, "Reordered sequence:\n");
3753 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3754 NUM_FIXED_BLOCKS;
3755 bb;
3756 bb = (basic_block) bb->aux, index++)
3757 {
3758 fprintf (dump_file, " %i ", index);
3759 if (get_bb_original (bb))
3760 fprintf (dump_file, "duplicate of %i\n",
3761 get_bb_original (bb)->index);
3762 else if (forwarder_block_p (bb)
3763 && !LABEL_P (BB_HEAD (bb)))
3764 fprintf (dump_file, "compensation\n");
3765 else
3766 fprintf (dump_file, "bb %i\n", bb->index);
3767 }
3768 }
3769
3770 /* Now reorder the blocks. */
3771 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3772 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3773 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3774 {
3775 bb->prev_bb = prev_bb;
3776 prev_bb->next_bb = bb;
3777 }
3778 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3779 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3780
3781 /* Then, clean up the aux fields. */
3782 FOR_ALL_BB_FN (bb, cfun)
3783 {
3784 bb->aux = NULL;
3785 if (!stay_in_cfglayout_mode)
3786 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3787 }
3788
3789 /* Maybe reset the original copy tables, they are not valid anymore
3790 when we renumber the basic blocks in compact_blocks. If we are
3791 are going out of cfglayout mode, don't re-allocate the tables. */
3792 if (original_copy_tables_initialized_p ())
3793 free_original_copy_tables ();
3794 if (stay_in_cfglayout_mode)
3795 initialize_original_copy_tables ();
3796
3797 /* Finally, put basic_block_info in the new order. */
3798 compact_blocks ();
3799 }
3800
3801
3802 /* Given a reorder chain, rearrange the code to match. */
3803
3804 static void
fixup_reorder_chain(void)3805 fixup_reorder_chain (void)
3806 {
3807 basic_block bb;
3808 rtx_insn *insn = NULL;
3809
3810 if (cfg_layout_function_header)
3811 {
3812 set_first_insn (cfg_layout_function_header);
3813 insn = cfg_layout_function_header;
3814 while (NEXT_INSN (insn))
3815 insn = NEXT_INSN (insn);
3816 }
3817
3818 /* First do the bulk reordering -- rechain the blocks without regard to
3819 the needed changes to jumps and labels. */
3820
3821 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3822 bb->aux)
3823 {
3824 if (BB_HEADER (bb))
3825 {
3826 if (insn)
3827 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3828 else
3829 set_first_insn (BB_HEADER (bb));
3830 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3831 insn = BB_HEADER (bb);
3832 while (NEXT_INSN (insn))
3833 insn = NEXT_INSN (insn);
3834 }
3835 if (insn)
3836 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3837 else
3838 set_first_insn (BB_HEAD (bb));
3839 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3840 insn = BB_END (bb);
3841 if (BB_FOOTER (bb))
3842 {
3843 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3844 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3845 while (NEXT_INSN (insn))
3846 insn = NEXT_INSN (insn);
3847 }
3848 }
3849
3850 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3851 if (cfg_layout_function_footer)
3852 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3853
3854 while (NEXT_INSN (insn))
3855 insn = NEXT_INSN (insn);
3856
3857 set_last_insn (insn);
3858 if (flag_checking)
3859 verify_insn_chain ();
3860
3861 /* Now add jumps and labels as needed to match the blocks new
3862 outgoing edges. */
3863
3864 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3865 bb->aux)
3866 {
3867 edge e_fall, e_taken, e;
3868 rtx_insn *bb_end_insn;
3869 rtx ret_label = NULL_RTX;
3870 basic_block nb;
3871 edge_iterator ei;
3872
3873 if (EDGE_COUNT (bb->succs) == 0)
3874 continue;
3875
3876 /* Find the old fallthru edge, and another non-EH edge for
3877 a taken jump. */
3878 e_taken = e_fall = NULL;
3879
3880 FOR_EACH_EDGE (e, ei, bb->succs)
3881 if (e->flags & EDGE_FALLTHRU)
3882 e_fall = e;
3883 else if (! (e->flags & EDGE_EH))
3884 e_taken = e;
3885
3886 bb_end_insn = BB_END (bb);
3887 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3888 {
3889 ret_label = JUMP_LABEL (bb_end_jump);
3890 if (any_condjump_p (bb_end_jump))
3891 {
3892 /* This might happen if the conditional jump has side
3893 effects and could therefore not be optimized away.
3894 Make the basic block to end with a barrier in order
3895 to prevent rtl_verify_flow_info from complaining. */
3896 if (!e_fall)
3897 {
3898 gcc_assert (!onlyjump_p (bb_end_jump)
3899 || returnjump_p (bb_end_jump)
3900 || (e_taken->flags & EDGE_CROSSING));
3901 emit_barrier_after (bb_end_jump);
3902 continue;
3903 }
3904
3905 /* If the old fallthru is still next, nothing to do. */
3906 if (bb->aux == e_fall->dest
3907 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3908 continue;
3909
3910 /* The degenerated case of conditional jump jumping to the next
3911 instruction can happen for jumps with side effects. We need
3912 to construct a forwarder block and this will be done just
3913 fine by force_nonfallthru below. */
3914 if (!e_taken)
3915 ;
3916
3917 /* There is another special case: if *neither* block is next,
3918 such as happens at the very end of a function, then we'll
3919 need to add a new unconditional jump. Choose the taken
3920 edge based on known or assumed probability. */
3921 else if (bb->aux != e_taken->dest)
3922 {
3923 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3924
3925 if (note
3926 && profile_probability::from_reg_br_prob_note
3927 (XINT (note, 0)) < profile_probability::even ()
3928 && invert_jump (bb_end_jump,
3929 (e_fall->dest
3930 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3931 ? NULL_RTX
3932 : label_for_bb (e_fall->dest)), 0))
3933 {
3934 e_fall->flags &= ~EDGE_FALLTHRU;
3935 gcc_checking_assert (could_fall_through
3936 (e_taken->src, e_taken->dest));
3937 e_taken->flags |= EDGE_FALLTHRU;
3938 update_br_prob_note (bb);
3939 e = e_fall, e_fall = e_taken, e_taken = e;
3940 }
3941 }
3942
3943 /* If the "jumping" edge is a crossing edge, and the fall
3944 through edge is non-crossing, leave things as they are. */
3945 else if ((e_taken->flags & EDGE_CROSSING)
3946 && !(e_fall->flags & EDGE_CROSSING))
3947 continue;
3948
3949 /* Otherwise we can try to invert the jump. This will
3950 basically never fail, however, keep up the pretense. */
3951 else if (invert_jump (bb_end_jump,
3952 (e_fall->dest
3953 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3954 ? NULL_RTX
3955 : label_for_bb (e_fall->dest)), 0))
3956 {
3957 e_fall->flags &= ~EDGE_FALLTHRU;
3958 gcc_checking_assert (could_fall_through
3959 (e_taken->src, e_taken->dest));
3960 e_taken->flags |= EDGE_FALLTHRU;
3961 update_br_prob_note (bb);
3962 if (LABEL_NUSES (ret_label) == 0
3963 && single_pred_p (e_taken->dest))
3964 delete_insn (as_a<rtx_insn *> (ret_label));
3965 continue;
3966 }
3967 }
3968 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3969 {
3970 /* If the old fallthru is still next or if
3971 asm goto doesn't have a fallthru (e.g. when followed by
3972 __builtin_unreachable ()), nothing to do. */
3973 if (! e_fall
3974 || bb->aux == e_fall->dest
3975 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3976 continue;
3977
3978 /* Otherwise we'll have to use the fallthru fixup below. */
3979 }
3980 else
3981 {
3982 /* Otherwise we have some return, switch or computed
3983 jump. In the 99% case, there should not have been a
3984 fallthru edge. */
3985 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3986 continue;
3987 }
3988 }
3989 else
3990 {
3991 /* No fallthru implies a noreturn function with EH edges, or
3992 something similarly bizarre. In any case, we don't need to
3993 do anything. */
3994 if (! e_fall)
3995 continue;
3996
3997 /* If the fallthru block is still next, nothing to do. */
3998 if (bb->aux == e_fall->dest)
3999 continue;
4000
4001 /* A fallthru to exit block. */
4002 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4003 continue;
4004 }
4005
4006 /* We got here if we need to add a new jump insn.
4007 Note force_nonfallthru can delete E_FALL and thus we have to
4008 save E_FALL->src prior to the call to force_nonfallthru. */
4009 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
4010 if (nb)
4011 {
4012 nb->aux = bb->aux;
4013 bb->aux = nb;
4014 /* Don't process this new block. */
4015 bb = nb;
4016 }
4017 }
4018
4019 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4020
4021 /* Annoying special case - jump around dead jumptables left in the code. */
4022 FOR_EACH_BB_FN (bb, cfun)
4023 {
4024 edge e = find_fallthru_edge (bb->succs);
4025
4026 if (e && !can_fallthru (e->src, e->dest))
4027 force_nonfallthru (e);
4028 }
4029
4030 /* Ensure goto_locus from edges has some instructions with that locus in RTL
4031 when not optimizing. */
4032 if (!optimize && !DECL_IGNORED_P (current_function_decl))
4033 FOR_EACH_BB_FN (bb, cfun)
4034 {
4035 edge e;
4036 edge_iterator ei;
4037
4038 FOR_EACH_EDGE (e, ei, bb->succs)
4039 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4040 && !(e->flags & EDGE_ABNORMAL))
4041 {
4042 edge e2;
4043 edge_iterator ei2;
4044 basic_block dest, nb;
4045 rtx_insn *end;
4046
4047 insn = BB_END (e->src);
4048 end = PREV_INSN (BB_HEAD (e->src));
4049 while (insn != end
4050 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4051 insn = PREV_INSN (insn);
4052 if (insn != end
4053 && INSN_LOCATION (insn) == e->goto_locus)
4054 continue;
4055 if (simplejump_p (BB_END (e->src))
4056 && !INSN_HAS_LOCATION (BB_END (e->src)))
4057 {
4058 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4059 continue;
4060 }
4061 dest = e->dest;
4062 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4063 {
4064 /* Non-fallthru edges to the exit block cannot be split. */
4065 if (!(e->flags & EDGE_FALLTHRU))
4066 continue;
4067 }
4068 else
4069 {
4070 insn = BB_HEAD (dest);
4071 end = NEXT_INSN (BB_END (dest));
4072 while (insn != end && !NONDEBUG_INSN_P (insn))
4073 insn = NEXT_INSN (insn);
4074 if (insn != end && INSN_HAS_LOCATION (insn)
4075 && INSN_LOCATION (insn) == e->goto_locus)
4076 continue;
4077 }
4078 nb = split_edge (e);
4079 if (!INSN_P (BB_END (nb)))
4080 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4081 nb);
4082 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4083
4084 /* If there are other incoming edges to the destination block
4085 with the same goto locus, redirect them to the new block as
4086 well, this can prevent other such blocks from being created
4087 in subsequent iterations of the loop. */
4088 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4089 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4090 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4091 && e->goto_locus == e2->goto_locus)
4092 redirect_edge_and_branch (e2, nb);
4093 else
4094 ei_next (&ei2);
4095 }
4096 }
4097 }
4098
4099 /* Perform sanity checks on the insn chain.
4100 1. Check that next/prev pointers are consistent in both the forward and
4101 reverse direction.
4102 2. Count insns in chain, going both directions, and check if equal.
4103 3. Check that get_last_insn () returns the actual end of chain. */
4104
4105 DEBUG_FUNCTION void
verify_insn_chain(void)4106 verify_insn_chain (void)
4107 {
4108 rtx_insn *x, *prevx, *nextx;
4109 int insn_cnt1, insn_cnt2;
4110
4111 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4112 x != 0;
4113 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4114 gcc_assert (PREV_INSN (x) == prevx);
4115
4116 gcc_assert (prevx == get_last_insn ());
4117
4118 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4119 x != 0;
4120 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4121 gcc_assert (NEXT_INSN (x) == nextx);
4122
4123 gcc_assert (insn_cnt1 == insn_cnt2);
4124 }
4125
4126 /* If we have assembler epilogues, the block falling through to exit must
4127 be the last one in the reordered chain when we reach final. Ensure
4128 that this condition is met. */
4129 static void
fixup_fallthru_exit_predecessor(void)4130 fixup_fallthru_exit_predecessor (void)
4131 {
4132 edge e;
4133 basic_block bb = NULL;
4134
4135 /* This transformation is not valid before reload, because we might
4136 separate a call from the instruction that copies the return
4137 value. */
4138 gcc_assert (reload_completed);
4139
4140 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4141 if (e)
4142 bb = e->src;
4143
4144 if (bb && bb->aux)
4145 {
4146 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4147
4148 /* If the very first block is the one with the fall-through exit
4149 edge, we have to split that block. */
4150 if (c == bb)
4151 {
4152 bb = split_block_after_labels (bb)->dest;
4153 bb->aux = c->aux;
4154 c->aux = bb;
4155 BB_FOOTER (bb) = BB_FOOTER (c);
4156 BB_FOOTER (c) = NULL;
4157 }
4158
4159 while (c->aux != bb)
4160 c = (basic_block) c->aux;
4161
4162 c->aux = bb->aux;
4163 while (c->aux)
4164 c = (basic_block) c->aux;
4165
4166 c->aux = bb;
4167 bb->aux = NULL;
4168 }
4169 }
4170
4171 /* In case there are more than one fallthru predecessors of exit, force that
4172 there is only one. */
4173
4174 static void
force_one_exit_fallthru(void)4175 force_one_exit_fallthru (void)
4176 {
4177 edge e, predecessor = NULL;
4178 bool more = false;
4179 edge_iterator ei;
4180 basic_block forwarder, bb;
4181
4182 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4183 if (e->flags & EDGE_FALLTHRU)
4184 {
4185 if (predecessor == NULL)
4186 predecessor = e;
4187 else
4188 {
4189 more = true;
4190 break;
4191 }
4192 }
4193
4194 if (!more)
4195 return;
4196
4197 /* Exit has several fallthru predecessors. Create a forwarder block for
4198 them. */
4199 forwarder = split_edge (predecessor);
4200 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4201 (e = ei_safe_edge (ei)); )
4202 {
4203 if (e->src == forwarder
4204 || !(e->flags & EDGE_FALLTHRU))
4205 ei_next (&ei);
4206 else
4207 redirect_edge_and_branch_force (e, forwarder);
4208 }
4209
4210 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4211 exit block. */
4212 FOR_EACH_BB_FN (bb, cfun)
4213 {
4214 if (bb->aux == NULL && bb != forwarder)
4215 {
4216 bb->aux = forwarder;
4217 break;
4218 }
4219 }
4220 }
4221
4222 /* Return true in case it is possible to duplicate the basic block BB. */
4223
4224 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4225 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4226 {
4227 /* Do not attempt to duplicate tablejumps, as we need to unshare
4228 the dispatch table. This is difficult to do, as the instructions
4229 computing jump destination may be hoisted outside the basic block. */
4230 if (tablejump_p (BB_END (bb), NULL, NULL))
4231 return false;
4232
4233 /* Do not duplicate blocks containing insns that can't be copied. */
4234 if (targetm.cannot_copy_insn_p)
4235 {
4236 rtx_insn *insn = BB_HEAD (bb);
4237 while (1)
4238 {
4239 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4240 return false;
4241 if (insn == BB_END (bb))
4242 break;
4243 insn = NEXT_INSN (insn);
4244 }
4245 }
4246
4247 return true;
4248 }
4249
4250 rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to)4251 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4252 {
4253 rtx_insn *insn, *next, *copy;
4254 rtx_note *last;
4255
4256 /* Avoid updating of boundaries of previous basic block. The
4257 note will get removed from insn stream in fixup. */
4258 last = emit_note (NOTE_INSN_DELETED);
4259
4260 /* Create copy at the end of INSN chain. The chain will
4261 be reordered later. */
4262 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4263 {
4264 switch (GET_CODE (insn))
4265 {
4266 case DEBUG_INSN:
4267 /* Don't duplicate label debug insns. */
4268 if (DEBUG_BIND_INSN_P (insn)
4269 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4270 break;
4271 /* FALLTHRU */
4272 case INSN:
4273 case CALL_INSN:
4274 case JUMP_INSN:
4275 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4276 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4277 && ANY_RETURN_P (JUMP_LABEL (insn)))
4278 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4279 maybe_copy_prologue_epilogue_insn (insn, copy);
4280 break;
4281
4282 case JUMP_TABLE_DATA:
4283 /* Avoid copying of dispatch tables. We never duplicate
4284 tablejumps, so this can hit only in case the table got
4285 moved far from original jump.
4286 Avoid copying following barrier as well if any
4287 (and debug insns in between). */
4288 for (next = NEXT_INSN (insn);
4289 next != NEXT_INSN (to);
4290 next = NEXT_INSN (next))
4291 if (!DEBUG_INSN_P (next))
4292 break;
4293 if (next != NEXT_INSN (to) && BARRIER_P (next))
4294 insn = next;
4295 break;
4296
4297 case CODE_LABEL:
4298 break;
4299
4300 case BARRIER:
4301 emit_barrier ();
4302 break;
4303
4304 case NOTE:
4305 switch (NOTE_KIND (insn))
4306 {
4307 /* In case prologue is empty and function contain label
4308 in first BB, we may want to copy the block. */
4309 case NOTE_INSN_PROLOGUE_END:
4310
4311 case NOTE_INSN_DELETED:
4312 case NOTE_INSN_DELETED_LABEL:
4313 case NOTE_INSN_DELETED_DEBUG_LABEL:
4314 /* No problem to strip these. */
4315 case NOTE_INSN_FUNCTION_BEG:
4316 /* There is always just single entry to function. */
4317 case NOTE_INSN_BASIC_BLOCK:
4318 /* We should only switch text sections once. */
4319 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4320 break;
4321
4322 case NOTE_INSN_EPILOGUE_BEG:
4323 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4324 emit_note_copy (as_a <rtx_note *> (insn));
4325 break;
4326
4327 default:
4328 /* All other notes should have already been eliminated. */
4329 gcc_unreachable ();
4330 }
4331 break;
4332 default:
4333 gcc_unreachable ();
4334 }
4335 }
4336 insn = NEXT_INSN (last);
4337 delete_insn (last);
4338 return insn;
4339 }
4340
4341 /* Create a duplicate of the basic block BB. */
4342
4343 static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data *)4344 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
4345 {
4346 rtx_insn *insn;
4347 basic_block new_bb;
4348
4349 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4350 new_bb = create_basic_block (insn,
4351 insn ? get_last_insn () : NULL,
4352 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4353
4354 BB_COPY_PARTITION (new_bb, bb);
4355 if (BB_HEADER (bb))
4356 {
4357 insn = BB_HEADER (bb);
4358 while (NEXT_INSN (insn))
4359 insn = NEXT_INSN (insn);
4360 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4361 if (insn)
4362 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4363 }
4364
4365 if (BB_FOOTER (bb))
4366 {
4367 insn = BB_FOOTER (bb);
4368 while (NEXT_INSN (insn))
4369 insn = NEXT_INSN (insn);
4370 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4371 if (insn)
4372 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4373 }
4374
4375 return new_bb;
4376 }
4377
4378
4379 /* Main entry point to this module - initialize the datastructures for
4380 CFG layout changes. It keeps LOOPS up-to-date if not null.
4381
4382 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4383
4384 void
cfg_layout_initialize(int flags)4385 cfg_layout_initialize (int flags)
4386 {
4387 rtx_insn_list *x;
4388 basic_block bb;
4389
4390 /* Once bb partitioning is complete, cfg layout mode should not be
4391 re-entered. Entering cfg layout mode may require fixups. As an
4392 example, if edge forwarding performed when optimizing the cfg
4393 layout required moving a block from the hot to the cold
4394 section. This would create an illegal partitioning unless some
4395 manual fixup was performed. */
4396 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4397
4398 initialize_original_copy_tables ();
4399
4400 cfg_layout_rtl_register_cfg_hooks ();
4401
4402 record_effective_endpoints ();
4403
4404 /* Make sure that the targets of non local gotos are marked. */
4405 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4406 {
4407 bb = BLOCK_FOR_INSN (x->insn ());
4408 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4409 }
4410
4411 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4412 }
4413
4414 /* Splits superblocks. */
4415 void
break_superblocks(void)4416 break_superblocks (void)
4417 {
4418 bool need = false;
4419 basic_block bb;
4420
4421 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4422 bitmap_clear (superblocks);
4423
4424 FOR_EACH_BB_FN (bb, cfun)
4425 if (bb->flags & BB_SUPERBLOCK)
4426 {
4427 bb->flags &= ~BB_SUPERBLOCK;
4428 bitmap_set_bit (superblocks, bb->index);
4429 need = true;
4430 }
4431
4432 if (need)
4433 {
4434 rebuild_jump_labels (get_insns ());
4435 find_many_sub_basic_blocks (superblocks);
4436 }
4437 }
4438
4439 /* Finalize the changes: reorder insn list according to the sequence specified
4440 by aux pointers, enter compensation code, rebuild scope forest. */
4441
4442 void
cfg_layout_finalize(void)4443 cfg_layout_finalize (void)
4444 {
4445 free_dominance_info (CDI_DOMINATORS);
4446 force_one_exit_fallthru ();
4447 rtl_register_cfg_hooks ();
4448 if (reload_completed && !targetm.have_epilogue ())
4449 fixup_fallthru_exit_predecessor ();
4450 fixup_reorder_chain ();
4451
4452 rebuild_jump_labels (get_insns ());
4453 delete_dead_jumptables ();
4454
4455 if (flag_checking)
4456 verify_insn_chain ();
4457 checking_verify_flow_info ();
4458 }
4459
4460
4461 /* Same as split_block but update cfg_layout structures. */
4462
4463 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4464 cfg_layout_split_block (basic_block bb, void *insnp)
4465 {
4466 rtx insn = (rtx) insnp;
4467 basic_block new_bb = rtl_split_block (bb, insn);
4468
4469 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4470 BB_FOOTER (bb) = NULL;
4471
4472 return new_bb;
4473 }
4474
4475 /* Redirect Edge to DEST. */
4476 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4477 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4478 {
4479 basic_block src = e->src;
4480 edge ret;
4481
4482 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4483 return NULL;
4484
4485 if (e->dest == dest)
4486 return e;
4487
4488 if (e->flags & EDGE_CROSSING
4489 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4490 && simplejump_p (BB_END (src)))
4491 {
4492 if (dump_file)
4493 fprintf (dump_file,
4494 "Removing crossing jump while redirecting edge form %i to %i\n",
4495 e->src->index, dest->index);
4496 delete_insn (BB_END (src));
4497 remove_barriers_from_footer (src);
4498 e->flags |= EDGE_FALLTHRU;
4499 }
4500
4501 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4502 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4503 {
4504 df_set_bb_dirty (src);
4505 return ret;
4506 }
4507
4508 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4509 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4510 {
4511 if (dump_file)
4512 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4513 e->src->index, dest->index);
4514
4515 df_set_bb_dirty (e->src);
4516 redirect_edge_succ (e, dest);
4517 return e;
4518 }
4519
4520 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4521 in the case the basic block appears to be in sequence. Avoid this
4522 transformation. */
4523
4524 if (e->flags & EDGE_FALLTHRU)
4525 {
4526 /* Redirect any branch edges unified with the fallthru one. */
4527 if (JUMP_P (BB_END (src))
4528 && label_is_jump_target_p (BB_HEAD (e->dest),
4529 BB_END (src)))
4530 {
4531 edge redirected;
4532
4533 if (dump_file)
4534 fprintf (dump_file, "Fallthru edge unified with branch "
4535 "%i->%i redirected to %i\n",
4536 e->src->index, e->dest->index, dest->index);
4537 e->flags &= ~EDGE_FALLTHRU;
4538 redirected = redirect_branch_edge (e, dest);
4539 gcc_assert (redirected);
4540 redirected->flags |= EDGE_FALLTHRU;
4541 df_set_bb_dirty (redirected->src);
4542 return redirected;
4543 }
4544 /* In case we are redirecting fallthru edge to the branch edge
4545 of conditional jump, remove it. */
4546 if (EDGE_COUNT (src->succs) == 2)
4547 {
4548 /* Find the edge that is different from E. */
4549 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4550
4551 if (s->dest == dest
4552 && any_condjump_p (BB_END (src))
4553 && onlyjump_p (BB_END (src)))
4554 delete_insn (BB_END (src));
4555 }
4556 if (dump_file)
4557 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4558 e->src->index, e->dest->index, dest->index);
4559 ret = redirect_edge_succ_nodup (e, dest);
4560 }
4561 else
4562 ret = redirect_branch_edge (e, dest);
4563
4564 if (!ret)
4565 return NULL;
4566
4567 fixup_partition_crossing (ret);
4568 /* We don't want simplejumps in the insn stream during cfglayout. */
4569 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4570
4571 df_set_bb_dirty (src);
4572 return ret;
4573 }
4574
4575 /* Simple wrapper as we always can redirect fallthru edges. */
4576 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4577 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4578 {
4579 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4580
4581 gcc_assert (redirected);
4582 return NULL;
4583 }
4584
4585 /* Same as delete_basic_block but update cfg_layout structures. */
4586
4587 static void
cfg_layout_delete_block(basic_block bb)4588 cfg_layout_delete_block (basic_block bb)
4589 {
4590 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4591 rtx_insn **to;
4592
4593 if (BB_HEADER (bb))
4594 {
4595 next = BB_HEAD (bb);
4596 if (prev)
4597 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4598 else
4599 set_first_insn (BB_HEADER (bb));
4600 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4601 insn = BB_HEADER (bb);
4602 while (NEXT_INSN (insn))
4603 insn = NEXT_INSN (insn);
4604 SET_NEXT_INSN (insn) = next;
4605 SET_PREV_INSN (next) = insn;
4606 }
4607 next = NEXT_INSN (BB_END (bb));
4608 if (BB_FOOTER (bb))
4609 {
4610 insn = BB_FOOTER (bb);
4611 while (insn)
4612 {
4613 if (BARRIER_P (insn))
4614 {
4615 if (PREV_INSN (insn))
4616 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4617 else
4618 BB_FOOTER (bb) = NEXT_INSN (insn);
4619 if (NEXT_INSN (insn))
4620 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4621 }
4622 if (LABEL_P (insn))
4623 break;
4624 insn = NEXT_INSN (insn);
4625 }
4626 if (BB_FOOTER (bb))
4627 {
4628 insn = BB_END (bb);
4629 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4630 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4631 while (NEXT_INSN (insn))
4632 insn = NEXT_INSN (insn);
4633 SET_NEXT_INSN (insn) = next;
4634 if (next)
4635 SET_PREV_INSN (next) = insn;
4636 else
4637 set_last_insn (insn);
4638 }
4639 }
4640 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4641 to = &BB_HEADER (bb->next_bb);
4642 else
4643 to = &cfg_layout_function_footer;
4644
4645 rtl_delete_block (bb);
4646
4647 if (prev)
4648 prev = NEXT_INSN (prev);
4649 else
4650 prev = get_insns ();
4651 if (next)
4652 next = PREV_INSN (next);
4653 else
4654 next = get_last_insn ();
4655
4656 if (next && NEXT_INSN (next) != prev)
4657 {
4658 remaints = unlink_insn_chain (prev, next);
4659 insn = remaints;
4660 while (NEXT_INSN (insn))
4661 insn = NEXT_INSN (insn);
4662 SET_NEXT_INSN (insn) = *to;
4663 if (*to)
4664 SET_PREV_INSN (*to) = insn;
4665 *to = remaints;
4666 }
4667 }
4668
4669 /* Return true when blocks A and B can be safely merged. */
4670
4671 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4672 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4673 {
4674 /* If we are partitioning hot/cold basic blocks, we don't want to
4675 mess up unconditional or indirect jumps that cross between hot
4676 and cold sections.
4677
4678 Basic block partitioning may result in some jumps that appear to
4679 be optimizable (or blocks that appear to be mergeable), but which really
4680 must be left untouched (they are required to make it safely across
4681 partition boundaries). See the comments at the top of
4682 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4683
4684 if (BB_PARTITION (a) != BB_PARTITION (b))
4685 return false;
4686
4687 /* Protect the loop latches. */
4688 if (current_loops && b->loop_father->latch == b)
4689 return false;
4690
4691 /* If we would end up moving B's instructions, make sure it doesn't fall
4692 through into the exit block, since we cannot recover from a fallthrough
4693 edge into the exit block occurring in the middle of a function. */
4694 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4695 {
4696 edge e = find_fallthru_edge (b->succs);
4697 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4698 return false;
4699 }
4700
4701 /* There must be exactly one edge in between the blocks. */
4702 return (single_succ_p (a)
4703 && single_succ (a) == b
4704 && single_pred_p (b) == 1
4705 && a != b
4706 /* Must be simple edge. */
4707 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4708 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4709 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4710 /* If the jump insn has side effects, we can't kill the edge.
4711 When not optimizing, try_redirect_by_replacing_jump will
4712 not allow us to redirect an edge by replacing a table jump. */
4713 && (!JUMP_P (BB_END (a))
4714 || ((!optimize || reload_completed)
4715 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4716 }
4717
4718 /* Merge block A and B. The blocks must be mergeable. */
4719
4720 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4721 cfg_layout_merge_blocks (basic_block a, basic_block b)
4722 {
4723 /* If B is a forwarder block whose outgoing edge has no location, we'll
4724 propagate the locus of the edge between A and B onto it. */
4725 const bool forward_edge_locus
4726 = (b->flags & BB_FORWARDER_BLOCK) != 0
4727 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4728 rtx_insn *insn;
4729
4730 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4731
4732 if (dump_file)
4733 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4734 a->index);
4735
4736 /* If there was a CODE_LABEL beginning B, delete it. */
4737 if (LABEL_P (BB_HEAD (b)))
4738 {
4739 delete_insn (BB_HEAD (b));
4740 }
4741
4742 /* We should have fallthru edge in a, or we can do dummy redirection to get
4743 it cleaned up. */
4744 if (JUMP_P (BB_END (a)))
4745 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4746 gcc_assert (!JUMP_P (BB_END (a)));
4747
4748 /* If not optimizing, preserve the locus of the single edge between
4749 blocks A and B if necessary by emitting a nop. */
4750 if (!optimize
4751 && !forward_edge_locus
4752 && !DECL_IGNORED_P (current_function_decl))
4753 emit_nop_for_unique_locus_between (a, b);
4754
4755 /* Move things from b->footer after a->footer. */
4756 if (BB_FOOTER (b))
4757 {
4758 if (!BB_FOOTER (a))
4759 BB_FOOTER (a) = BB_FOOTER (b);
4760 else
4761 {
4762 rtx_insn *last = BB_FOOTER (a);
4763
4764 while (NEXT_INSN (last))
4765 last = NEXT_INSN (last);
4766 SET_NEXT_INSN (last) = BB_FOOTER (b);
4767 SET_PREV_INSN (BB_FOOTER (b)) = last;
4768 }
4769 BB_FOOTER (b) = NULL;
4770 }
4771
4772 /* Move things from b->header before a->footer.
4773 Note that this may include dead tablejump data, but we don't clean
4774 those up until we go out of cfglayout mode. */
4775 if (BB_HEADER (b))
4776 {
4777 if (! BB_FOOTER (a))
4778 BB_FOOTER (a) = BB_HEADER (b);
4779 else
4780 {
4781 rtx_insn *last = BB_HEADER (b);
4782
4783 while (NEXT_INSN (last))
4784 last = NEXT_INSN (last);
4785 SET_NEXT_INSN (last) = BB_FOOTER (a);
4786 SET_PREV_INSN (BB_FOOTER (a)) = last;
4787 BB_FOOTER (a) = BB_HEADER (b);
4788 }
4789 BB_HEADER (b) = NULL;
4790 }
4791
4792 /* In the case basic blocks are not adjacent, move them around. */
4793 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4794 {
4795 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4796
4797 emit_insn_after_noloc (insn, BB_END (a), a);
4798 }
4799 /* Otherwise just re-associate the instructions. */
4800 else
4801 {
4802 insn = BB_HEAD (b);
4803 BB_END (a) = BB_END (b);
4804 }
4805
4806 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4807 We need to explicitly call. */
4808 update_bb_for_insn_chain (insn, BB_END (b), a);
4809
4810 /* Skip possible DELETED_LABEL insn. */
4811 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4812 insn = NEXT_INSN (insn);
4813 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4814 BB_HEAD (b) = BB_END (b) = NULL;
4815 delete_insn (insn);
4816
4817 df_bb_delete (b->index);
4818
4819 if (forward_edge_locus)
4820 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4821
4822 if (dump_file)
4823 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4824 }
4825
4826 /* Split edge E. */
4827
4828 static basic_block
cfg_layout_split_edge(edge e)4829 cfg_layout_split_edge (edge e)
4830 {
4831 basic_block new_bb =
4832 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4833 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4834 NULL_RTX, e->src);
4835
4836 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4837 BB_COPY_PARTITION (new_bb, e->src);
4838 else
4839 BB_COPY_PARTITION (new_bb, e->dest);
4840 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4841 redirect_edge_and_branch_force (e, new_bb);
4842
4843 return new_bb;
4844 }
4845
4846 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4847
4848 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4849 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4850 {
4851 }
4852
4853 /* Return true if BB contains only labels or non-executable
4854 instructions. */
4855
4856 static bool
rtl_block_empty_p(basic_block bb)4857 rtl_block_empty_p (basic_block bb)
4858 {
4859 rtx_insn *insn;
4860
4861 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4862 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4863 return true;
4864
4865 FOR_BB_INSNS (bb, insn)
4866 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4867 return false;
4868
4869 return true;
4870 }
4871
4872 /* Split a basic block if it ends with a conditional branch and if
4873 the other part of the block is not empty. */
4874
4875 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4876 rtl_split_block_before_cond_jump (basic_block bb)
4877 {
4878 rtx_insn *insn;
4879 rtx_insn *split_point = NULL;
4880 rtx_insn *last = NULL;
4881 bool found_code = false;
4882
4883 FOR_BB_INSNS (bb, insn)
4884 {
4885 if (any_condjump_p (insn))
4886 split_point = last;
4887 else if (NONDEBUG_INSN_P (insn))
4888 found_code = true;
4889 last = insn;
4890 }
4891
4892 /* Did not find everything. */
4893 if (found_code && split_point)
4894 return split_block (bb, split_point)->dest;
4895 else
4896 return NULL;
4897 }
4898
4899 /* Return 1 if BB ends with a call, possibly followed by some
4900 instructions that must stay with the call, 0 otherwise. */
4901
4902 static bool
rtl_block_ends_with_call_p(basic_block bb)4903 rtl_block_ends_with_call_p (basic_block bb)
4904 {
4905 rtx_insn *insn = BB_END (bb);
4906
4907 while (!CALL_P (insn)
4908 && insn != BB_HEAD (bb)
4909 && (keep_with_call_p (insn)
4910 || NOTE_P (insn)
4911 || DEBUG_INSN_P (insn)))
4912 insn = PREV_INSN (insn);
4913 return (CALL_P (insn));
4914 }
4915
4916 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4917
4918 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)4919 rtl_block_ends_with_condjump_p (const_basic_block bb)
4920 {
4921 return any_condjump_p (BB_END (bb));
4922 }
4923
4924 /* Return true if we need to add fake edge to exit.
4925 Helper function for rtl_flow_call_edges_add. */
4926
4927 static bool
need_fake_edge_p(const rtx_insn * insn)4928 need_fake_edge_p (const rtx_insn *insn)
4929 {
4930 if (!INSN_P (insn))
4931 return false;
4932
4933 if ((CALL_P (insn)
4934 && !SIBLING_CALL_P (insn)
4935 && !find_reg_note (insn, REG_NORETURN, NULL)
4936 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4937 return true;
4938
4939 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4940 && MEM_VOLATILE_P (PATTERN (insn)))
4941 || (GET_CODE (PATTERN (insn)) == PARALLEL
4942 && asm_noperands (insn) != -1
4943 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4944 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4945 }
4946
4947 /* Add fake edges to the function exit for any non constant and non noreturn
4948 calls, volatile inline assembly in the bitmap of blocks specified by
4949 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4950 that were split.
4951
4952 The goal is to expose cases in which entering a basic block does not imply
4953 that all subsequent instructions must be executed. */
4954
4955 static int
rtl_flow_call_edges_add(sbitmap blocks)4956 rtl_flow_call_edges_add (sbitmap blocks)
4957 {
4958 int i;
4959 int blocks_split = 0;
4960 int last_bb = last_basic_block_for_fn (cfun);
4961 bool check_last_block = false;
4962
4963 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4964 return 0;
4965
4966 if (! blocks)
4967 check_last_block = true;
4968 else
4969 check_last_block = bitmap_bit_p (blocks,
4970 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4971
4972 /* In the last basic block, before epilogue generation, there will be
4973 a fallthru edge to EXIT. Special care is required if the last insn
4974 of the last basic block is a call because make_edge folds duplicate
4975 edges, which would result in the fallthru edge also being marked
4976 fake, which would result in the fallthru edge being removed by
4977 remove_fake_edges, which would result in an invalid CFG.
4978
4979 Moreover, we can't elide the outgoing fake edge, since the block
4980 profiler needs to take this into account in order to solve the minimal
4981 spanning tree in the case that the call doesn't return.
4982
4983 Handle this by adding a dummy instruction in a new last basic block. */
4984 if (check_last_block)
4985 {
4986 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4987 rtx_insn *insn = BB_END (bb);
4988
4989 /* Back up past insns that must be kept in the same block as a call. */
4990 while (insn != BB_HEAD (bb)
4991 && keep_with_call_p (insn))
4992 insn = PREV_INSN (insn);
4993
4994 if (need_fake_edge_p (insn))
4995 {
4996 edge e;
4997
4998 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4999 if (e)
5000 {
5001 insert_insn_on_edge (gen_use (const0_rtx), e);
5002 commit_edge_insertions ();
5003 }
5004 }
5005 }
5006
5007 /* Now add fake edges to the function exit for any non constant
5008 calls since there is no way that we can determine if they will
5009 return or not... */
5010
5011 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5012 {
5013 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5014 rtx_insn *insn;
5015 rtx_insn *prev_insn;
5016
5017 if (!bb)
5018 continue;
5019
5020 if (blocks && !bitmap_bit_p (blocks, i))
5021 continue;
5022
5023 for (insn = BB_END (bb); ; insn = prev_insn)
5024 {
5025 prev_insn = PREV_INSN (insn);
5026 if (need_fake_edge_p (insn))
5027 {
5028 edge e;
5029 rtx_insn *split_at_insn = insn;
5030
5031 /* Don't split the block between a call and an insn that should
5032 remain in the same block as the call. */
5033 if (CALL_P (insn))
5034 while (split_at_insn != BB_END (bb)
5035 && keep_with_call_p (NEXT_INSN (split_at_insn)))
5036 split_at_insn = NEXT_INSN (split_at_insn);
5037
5038 /* The handling above of the final block before the epilogue
5039 should be enough to verify that there is no edge to the exit
5040 block in CFG already. Calling make_edge in such case would
5041 cause us to mark that edge as fake and remove it later. */
5042
5043 if (flag_checking && split_at_insn == BB_END (bb))
5044 {
5045 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5046 gcc_assert (e == NULL);
5047 }
5048
5049 /* Note that the following may create a new basic block
5050 and renumber the existing basic blocks. */
5051 if (split_at_insn != BB_END (bb))
5052 {
5053 e = split_block (bb, split_at_insn);
5054 if (e)
5055 blocks_split++;
5056 }
5057
5058 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5059 ne->probability = profile_probability::guessed_never ();
5060 }
5061
5062 if (insn == BB_HEAD (bb))
5063 break;
5064 }
5065 }
5066
5067 if (blocks_split)
5068 verify_flow_info ();
5069
5070 return blocks_split;
5071 }
5072
5073 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5074 the conditional branch target, SECOND_HEAD should be the fall-thru
5075 there is no need to handle this here the loop versioning code handles
5076 this. the reason for SECON_HEAD is that it is needed for condition
5077 in trees, and this should be of the same type since it is a hook. */
5078 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)5079 rtl_lv_add_condition_to_bb (basic_block first_head ,
5080 basic_block second_head ATTRIBUTE_UNUSED,
5081 basic_block cond_bb, void *comp_rtx)
5082 {
5083 rtx_code_label *label;
5084 rtx_insn *seq, *jump;
5085 rtx op0 = XEXP ((rtx)comp_rtx, 0);
5086 rtx op1 = XEXP ((rtx)comp_rtx, 1);
5087 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5088 machine_mode mode;
5089
5090
5091 label = block_label (first_head);
5092 mode = GET_MODE (op0);
5093 if (mode == VOIDmode)
5094 mode = GET_MODE (op1);
5095
5096 start_sequence ();
5097 op0 = force_operand (op0, NULL_RTX);
5098 op1 = force_operand (op1, NULL_RTX);
5099 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5100 profile_probability::uninitialized ());
5101 jump = get_last_insn ();
5102 JUMP_LABEL (jump) = label;
5103 LABEL_NUSES (label)++;
5104 seq = get_insns ();
5105 end_sequence ();
5106
5107 /* Add the new cond, in the new head. */
5108 emit_insn_after (seq, BB_END (cond_bb));
5109 }
5110
5111
5112 /* Given a block B with unconditional branch at its end, get the
5113 store the return the branch edge and the fall-thru edge in
5114 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5115 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)5116 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5117 edge *fallthru_edge)
5118 {
5119 edge e = EDGE_SUCC (b, 0);
5120
5121 if (e->flags & EDGE_FALLTHRU)
5122 {
5123 *fallthru_edge = e;
5124 *branch_edge = EDGE_SUCC (b, 1);
5125 }
5126 else
5127 {
5128 *branch_edge = e;
5129 *fallthru_edge = EDGE_SUCC (b, 1);
5130 }
5131 }
5132
5133 void
init_rtl_bb_info(basic_block bb)5134 init_rtl_bb_info (basic_block bb)
5135 {
5136 gcc_assert (!bb->il.x.rtl);
5137 bb->il.x.head_ = NULL;
5138 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5139 }
5140
5141 /* Returns true if it is possible to remove edge E by redirecting
5142 it to the destination of the other edge from E->src. */
5143
5144 static bool
rtl_can_remove_branch_p(const_edge e)5145 rtl_can_remove_branch_p (const_edge e)
5146 {
5147 const_basic_block src = e->src;
5148 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5149 const rtx_insn *insn = BB_END (src);
5150 rtx set;
5151
5152 /* The conditions are taken from try_redirect_by_replacing_jump. */
5153 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5154 return false;
5155
5156 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5157 return false;
5158
5159 if (BB_PARTITION (src) != BB_PARTITION (target))
5160 return false;
5161
5162 if (!onlyjump_p (insn)
5163 || tablejump_p (insn, NULL, NULL))
5164 return false;
5165
5166 set = single_set (insn);
5167 if (!set || side_effects_p (set))
5168 return false;
5169
5170 return true;
5171 }
5172
5173 static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)5174 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5175 {
5176 bb = cfg_layout_duplicate_bb (bb, id);
5177 bb->aux = NULL;
5178 return bb;
5179 }
5180
5181 /* Do book-keeping of basic block BB for the profile consistency checker.
5182 Store the counting in RECORD. */
5183 static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)5184 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5185 {
5186 rtx_insn *insn;
5187 FOR_BB_INSNS (bb, insn)
5188 if (INSN_P (insn))
5189 {
5190 record->size += insn_cost (insn, false);
5191 if (bb->count.initialized_p ())
5192 record->time
5193 += insn_cost (insn, true) * bb->count.to_gcov_type ();
5194 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5195 record->time
5196 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5197 }
5198 }
5199
5200 /* Implementation of CFG manipulation for linearized RTL. */
5201 struct cfg_hooks rtl_cfg_hooks = {
5202 "rtl",
5203 rtl_verify_flow_info,
5204 rtl_dump_bb,
5205 rtl_dump_bb_for_graph,
5206 rtl_create_basic_block,
5207 rtl_redirect_edge_and_branch,
5208 rtl_redirect_edge_and_branch_force,
5209 rtl_can_remove_branch_p,
5210 rtl_delete_block,
5211 rtl_split_block,
5212 rtl_move_block_after,
5213 rtl_can_merge_blocks, /* can_merge_blocks_p */
5214 rtl_merge_blocks,
5215 rtl_predict_edge,
5216 rtl_predicted_by_p,
5217 cfg_layout_can_duplicate_bb_p,
5218 rtl_duplicate_bb,
5219 rtl_split_edge,
5220 rtl_make_forwarder_block,
5221 rtl_tidy_fallthru_edge,
5222 rtl_force_nonfallthru,
5223 rtl_block_ends_with_call_p,
5224 rtl_block_ends_with_condjump_p,
5225 rtl_flow_call_edges_add,
5226 NULL, /* execute_on_growing_pred */
5227 NULL, /* execute_on_shrinking_pred */
5228 NULL, /* duplicate loop for trees */
5229 NULL, /* lv_add_condition_to_bb */
5230 NULL, /* lv_adjust_loop_header_phi*/
5231 NULL, /* extract_cond_bb_edges */
5232 NULL, /* flush_pending_stmts */
5233 rtl_block_empty_p, /* block_empty_p */
5234 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5235 rtl_account_profile_record,
5236 };
5237
5238 /* Implementation of CFG manipulation for cfg layout RTL, where
5239 basic block connected via fallthru edges does not have to be adjacent.
5240 This representation will hopefully become the default one in future
5241 version of the compiler. */
5242
5243 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5244 "cfglayout mode",
5245 rtl_verify_flow_info_1,
5246 rtl_dump_bb,
5247 rtl_dump_bb_for_graph,
5248 cfg_layout_create_basic_block,
5249 cfg_layout_redirect_edge_and_branch,
5250 cfg_layout_redirect_edge_and_branch_force,
5251 rtl_can_remove_branch_p,
5252 cfg_layout_delete_block,
5253 cfg_layout_split_block,
5254 rtl_move_block_after,
5255 cfg_layout_can_merge_blocks_p,
5256 cfg_layout_merge_blocks,
5257 rtl_predict_edge,
5258 rtl_predicted_by_p,
5259 cfg_layout_can_duplicate_bb_p,
5260 cfg_layout_duplicate_bb,
5261 cfg_layout_split_edge,
5262 rtl_make_forwarder_block,
5263 NULL, /* tidy_fallthru_edge */
5264 rtl_force_nonfallthru,
5265 rtl_block_ends_with_call_p,
5266 rtl_block_ends_with_condjump_p,
5267 rtl_flow_call_edges_add,
5268 NULL, /* execute_on_growing_pred */
5269 NULL, /* execute_on_shrinking_pred */
5270 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5271 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5272 NULL, /* lv_adjust_loop_header_phi*/
5273 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5274 NULL, /* flush_pending_stmts */
5275 rtl_block_empty_p, /* block_empty_p */
5276 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5277 rtl_account_profile_record,
5278 };
5279
5280 #include "gt-cfgrtl.h"
5281
5282 #if __GNUC__ >= 10
5283 # pragma GCC diagnostic pop
5284 #endif
5285