1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
22
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
33
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
39
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64
65 /* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
67 static GTY(()) rtx_insn *cfg_layout_function_footer;
68 static GTY(()) rtx_insn *cfg_layout_function_header;
69
70 static rtx_insn *skip_insns_after_block (basic_block);
71 static void record_effective_endpoints (void);
72 static void fixup_reorder_chain (void);
73
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const rtx_note *);
77 static int can_delete_label_p (const rtx_code_label *);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
92
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94 so that we may simply delete it. */
95
96 static int
can_delete_note_p(const rtx_note * note)97 can_delete_note_p (const rtx_note *note)
98 {
99 switch (NOTE_KIND (note))
100 {
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
105
106 default:
107 return false;
108 }
109 }
110
111 /* True if a given label can be deleted. */
112
113 static int
can_delete_label_p(const rtx_code_label * label)114 can_delete_label_p (const rtx_code_label *label)
115 {
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
119 && !vec_safe_contains<rtx_insn *> (forced_labels,
120 const_cast<rtx_code_label *> (label)));
121 }
122
123 /* Delete INSN by patching it out. */
124
125 void
delete_insn(rtx_insn * insn)126 delete_insn (rtx_insn *insn)
127 {
128 rtx note;
129 bool really_delete = true;
130
131 if (LABEL_P (insn))
132 {
133 /* Some labels can't be directly removed from the INSN chain, as they
134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
136 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
137 {
138 const char *name = LABEL_NAME (insn);
139 basic_block bb = BLOCK_FOR_INSN (insn);
140 rtx_insn *bb_note = NEXT_INSN (insn);
141
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
145 NOTE_DELETED_LABEL_NAME (insn) = name;
146
147 /* If the note following the label starts a basic block, and the
148 label is a member of the same basic block, interchange the two. */
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
153 {
154 reorder_insns_nobb (insn, insn, bb_note);
155 BB_HEAD (bb) = bb_note;
156 if (BB_END (bb) == bb_note)
157 BB_END (bb) = insn;
158 }
159 }
160
161 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
162 }
163
164 if (really_delete)
165 {
166 /* If this insn has already been deleted, something is very wrong. */
167 gcc_assert (!insn->deleted ());
168 if (INSN_P (insn))
169 df_insn_delete (insn);
170 remove_insn (insn);
171 insn->set_deleted ();
172 }
173
174 /* If deleting a jump, decrement the use count of the label. Deleting
175 the label itself should happen in the normal course of block merging. */
176 if (JUMP_P (insn))
177 {
178 if (JUMP_LABEL (insn)
179 && LABEL_P (JUMP_LABEL (insn)))
180 LABEL_NUSES (JUMP_LABEL (insn))--;
181
182 /* If there are more targets, remove them too. */
183 while ((note
184 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
185 && LABEL_P (XEXP (note, 0)))
186 {
187 LABEL_NUSES (XEXP (note, 0))--;
188 remove_note (insn, note);
189 }
190 }
191
192 /* Also if deleting any insn that references a label as an operand. */
193 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
194 && LABEL_P (XEXP (note, 0)))
195 {
196 LABEL_NUSES (XEXP (note, 0))--;
197 remove_note (insn, note);
198 }
199
200 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
201 {
202 rtvec vec = table->get_labels ();
203 int len = GET_NUM_ELEM (vec);
204 int i;
205
206 for (i = 0; i < len; i++)
207 {
208 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
209
210 /* When deleting code in bulk (e.g. removing many unreachable
211 blocks) we can delete a label that's a target of the vector
212 before deleting the vector itself. */
213 if (!NOTE_P (label))
214 LABEL_NUSES (label)--;
215 }
216 }
217 }
218
219 /* Like delete_insn but also purge dead edges from BB.
220 Return true if any edges are eliminated. */
221
222 bool
delete_insn_and_edges(rtx_insn * insn)223 delete_insn_and_edges (rtx_insn *insn)
224 {
225 bool purge = false;
226
227 if (INSN_P (insn)
228 && BLOCK_FOR_INSN (insn)
229 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
230 purge = true;
231 delete_insn (insn);
232 if (purge)
233 return purge_dead_edges (BLOCK_FOR_INSN (insn));
234 return false;
235 }
236
237 /* Unlink a chain of insns between START and FINISH, leaving notes
238 that must be paired. If CLEAR_BB is true, we set bb field for
239 insns that cannot be removed to NULL. */
240
241 void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)242 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
243 {
244 /* Unchain the insns one by one. It would be quicker to delete all of these
245 with a single unchaining, rather than one at a time, but we need to keep
246 the NOTE's. */
247 rtx_insn *current = finish;
248 while (1)
249 {
250 rtx_insn *prev = PREV_INSN (current);
251 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
252 ;
253 else
254 delete_insn (current);
255
256 if (clear_bb && !current->deleted ())
257 set_block_for_insn (current, NULL);
258
259 if (current == start)
260 break;
261 current = prev;
262 }
263 }
264
265 /* Create a new basic block consisting of the instructions between HEAD and END
266 inclusive. This function is designed to allow fast BB construction - reuses
267 the note and basic block struct in BB_NOTE, if any and do not grow
268 BASIC_BLOCK chain and should be used directly only by CFG construction code.
269 END can be NULL in to create new empty basic block before HEAD. Both END
270 and HEAD can be NULL to create basic block at the end of INSN chain.
271 AFTER is the basic block we should be put after. */
272
273 basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)274 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
275 basic_block after)
276 {
277 basic_block bb;
278
279 if (bb_note
280 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
281 && bb->aux == NULL)
282 {
283 /* If we found an existing note, thread it back onto the chain. */
284
285 rtx_insn *after;
286
287 if (LABEL_P (head))
288 after = head;
289 else
290 {
291 after = PREV_INSN (head);
292 head = bb_note;
293 }
294
295 if (after != bb_note && NEXT_INSN (after) != bb_note)
296 reorder_insns_nobb (bb_note, bb_note, after);
297 }
298 else
299 {
300 /* Otherwise we must create a note and a basic block structure. */
301
302 bb = alloc_block ();
303
304 init_rtl_bb_info (bb);
305 if (!head && !end)
306 head = end = bb_note
307 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
308 else if (LABEL_P (head) && end)
309 {
310 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
311 if (head == end)
312 end = bb_note;
313 }
314 else
315 {
316 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
317 head = bb_note;
318 if (!end)
319 end = head;
320 }
321
322 NOTE_BASIC_BLOCK (bb_note) = bb;
323 }
324
325 /* Always include the bb note in the block. */
326 if (NEXT_INSN (end) == bb_note)
327 end = bb_note;
328
329 BB_HEAD (bb) = head;
330 BB_END (bb) = end;
331 bb->index = last_basic_block_for_fn (cfun)++;
332 bb->flags = BB_NEW | BB_RTL;
333 link_block (bb, after);
334 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
335 df_bb_refs_record (bb->index, false);
336 update_bb_for_insn (bb);
337 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
338
339 /* Tag the block so that we know it has been used when considering
340 other basic block notes. */
341 bb->aux = bb;
342
343 return bb;
344 }
345
346 /* Create new basic block consisting of instructions in between HEAD and END
347 and place it to the BB chain after block AFTER. END can be NULL to
348 create a new empty basic block before HEAD. Both END and HEAD can be
349 NULL to create basic block at the end of INSN chain. */
350
351 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)352 rtl_create_basic_block (void *headp, void *endp, basic_block after)
353 {
354 rtx_insn *head = (rtx_insn *) headp;
355 rtx_insn *end = (rtx_insn *) endp;
356 basic_block bb;
357
358 /* Grow the basic block array if needed. */
359 if ((size_t) last_basic_block_for_fn (cfun)
360 >= basic_block_info_for_fn (cfun)->length ())
361 {
362 size_t new_size =
363 (last_basic_block_for_fn (cfun)
364 + (last_basic_block_for_fn (cfun) + 3) / 4);
365 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
366 }
367
368 n_basic_blocks_for_fn (cfun)++;
369
370 bb = create_basic_block_structure (head, end, NULL, after);
371 bb->aux = NULL;
372 return bb;
373 }
374
375 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)376 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
377 {
378 basic_block newbb = rtl_create_basic_block (head, end, after);
379
380 return newbb;
381 }
382
383 /* Delete the insns in a (non-live) block. We physically delete every
384 non-deleted-note insn, and update the flow graph appropriately.
385
386 Return nonzero if we deleted an exception handler. */
387
388 /* ??? Preserving all such notes strikes me as wrong. It would be nice
389 to post-process the stream to remove empty blocks, loops, ranges, etc. */
390
391 static void
rtl_delete_block(basic_block b)392 rtl_delete_block (basic_block b)
393 {
394 rtx_insn *insn, *end;
395
396 /* If the head of this block is a CODE_LABEL, then it might be the
397 label for an exception handler which can't be reached. We need
398 to remove the label from the exception_handler_label list. */
399 insn = BB_HEAD (b);
400
401 end = get_last_bb_insn (b);
402
403 /* Selectively delete the entire chain. */
404 BB_HEAD (b) = NULL;
405 delete_insn_chain (insn, end, true);
406
407
408 if (dump_file)
409 fprintf (dump_file, "deleting block %d\n", b->index);
410 df_bb_delete (b->index);
411 }
412
413 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
414
415 void
compute_bb_for_insn(void)416 compute_bb_for_insn (void)
417 {
418 basic_block bb;
419
420 FOR_EACH_BB_FN (bb, cfun)
421 {
422 rtx_insn *end = BB_END (bb);
423 rtx_insn *insn;
424
425 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
426 {
427 BLOCK_FOR_INSN (insn) = bb;
428 if (insn == end)
429 break;
430 }
431 }
432 }
433
434 /* Release the basic_block_for_insn array. */
435
436 unsigned int
free_bb_for_insn(void)437 free_bb_for_insn (void)
438 {
439 rtx_insn *insn;
440 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
441 if (!BARRIER_P (insn))
442 BLOCK_FOR_INSN (insn) = NULL;
443 return 0;
444 }
445
446 namespace {
447
448 const pass_data pass_data_free_cfg =
449 {
450 RTL_PASS, /* type */
451 "*free_cfg", /* name */
452 OPTGROUP_NONE, /* optinfo_flags */
453 TV_NONE, /* tv_id */
454 0, /* properties_required */
455 0, /* properties_provided */
456 PROP_cfg, /* properties_destroyed */
457 0, /* todo_flags_start */
458 0, /* todo_flags_finish */
459 };
460
461 class pass_free_cfg : public rtl_opt_pass
462 {
463 public:
pass_free_cfg(gcc::context * ctxt)464 pass_free_cfg (gcc::context *ctxt)
465 : rtl_opt_pass (pass_data_free_cfg, ctxt)
466 {}
467
468 /* opt_pass methods: */
469 virtual unsigned int execute (function *);
470
471 }; // class pass_free_cfg
472
473 unsigned int
execute(function *)474 pass_free_cfg::execute (function *)
475 {
476 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
477 valid at that point so it would be too late to call df_analyze. */
478 if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
479 {
480 df_note_add_problem ();
481 df_analyze ();
482 }
483
484 if (crtl->has_bb_partition)
485 insert_section_boundary_note ();
486
487 free_bb_for_insn ();
488 return 0;
489 }
490
491 } // anon namespace
492
493 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)494 make_pass_free_cfg (gcc::context *ctxt)
495 {
496 return new pass_free_cfg (ctxt);
497 }
498
499 /* Return RTX to emit after when we want to emit code on the entry of function. */
500 rtx_insn *
entry_of_function(void)501 entry_of_function (void)
502 {
503 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
504 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
505 }
506
507 /* Emit INSN at the entry point of the function, ensuring that it is only
508 executed once per function. */
509 void
emit_insn_at_entry(rtx insn)510 emit_insn_at_entry (rtx insn)
511 {
512 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
513 edge e = ei_safe_edge (ei);
514 gcc_assert (e->flags & EDGE_FALLTHRU);
515
516 insert_insn_on_edge (insn, e);
517 commit_edge_insertions ();
518 }
519
520 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
521 (or BARRIER if found) and notify df of the bb change.
522 The insn chain range is inclusive
523 (i.e. both BEGIN and END will be updated. */
524
525 static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)526 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
527 {
528 rtx_insn *insn;
529
530 end = NEXT_INSN (end);
531 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
532 if (!BARRIER_P (insn))
533 df_insn_change_bb (insn, bb);
534 }
535
536 /* Update BLOCK_FOR_INSN of insns in BB to BB,
537 and notify df of the change. */
538
539 void
update_bb_for_insn(basic_block bb)540 update_bb_for_insn (basic_block bb)
541 {
542 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
543 }
544
545
546 /* Like active_insn_p, except keep the return value clobber around
547 even after reload. */
548
549 static bool
flow_active_insn_p(const rtx_insn * insn)550 flow_active_insn_p (const rtx_insn *insn)
551 {
552 if (active_insn_p (insn))
553 return true;
554
555 /* A clobber of the function return value exists for buggy
556 programs that fail to return a value. Its effect is to
557 keep the return value from being live across the entire
558 function. If we allow it to be skipped, we introduce the
559 possibility for register lifetime confusion. */
560 if (GET_CODE (PATTERN (insn)) == CLOBBER
561 && REG_P (XEXP (PATTERN (insn), 0))
562 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
563 return true;
564
565 return false;
566 }
567
568 /* Return true if the block has no effect and only forwards control flow to
569 its single destination. */
570
571 bool
contains_no_active_insn_p(const_basic_block bb)572 contains_no_active_insn_p (const_basic_block bb)
573 {
574 rtx_insn *insn;
575
576 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
577 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
578 || !single_succ_p (bb)
579 || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
580 return false;
581
582 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
583 if (INSN_P (insn) && flow_active_insn_p (insn))
584 return false;
585
586 return (!INSN_P (insn)
587 || (JUMP_P (insn) && simplejump_p (insn))
588 || !flow_active_insn_p (insn));
589 }
590
591 /* Likewise, but protect loop latches, headers and preheaders. */
592 /* FIXME: Make this a cfg hook. */
593
594 bool
forwarder_block_p(const_basic_block bb)595 forwarder_block_p (const_basic_block bb)
596 {
597 if (!contains_no_active_insn_p (bb))
598 return false;
599
600 /* Protect loop latches, headers and preheaders. */
601 if (current_loops)
602 {
603 basic_block dest;
604 if (bb->loop_father->header == bb)
605 return false;
606 dest = EDGE_SUCC (bb, 0)->dest;
607 if (dest->loop_father->header == dest)
608 return false;
609 }
610
611 return true;
612 }
613
614 /* Return nonzero if we can reach target from src by falling through. */
615 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
616
617 bool
can_fallthru(basic_block src,basic_block target)618 can_fallthru (basic_block src, basic_block target)
619 {
620 rtx_insn *insn = BB_END (src);
621 rtx_insn *insn2;
622 edge e;
623 edge_iterator ei;
624
625 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
626 return true;
627 if (src->next_bb != target)
628 return false;
629
630 /* ??? Later we may add code to move jump tables offline. */
631 if (tablejump_p (insn, NULL, NULL))
632 return false;
633
634 FOR_EACH_EDGE (e, ei, src->succs)
635 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
636 && e->flags & EDGE_FALLTHRU)
637 return false;
638
639 insn2 = BB_HEAD (target);
640 if (!active_insn_p (insn2))
641 insn2 = next_active_insn (insn2);
642
643 return next_active_insn (insn) == insn2;
644 }
645
646 /* Return nonzero if we could reach target from src by falling through,
647 if the target was made adjacent. If we already have a fall-through
648 edge to the exit block, we can't do that. */
649 static bool
could_fall_through(basic_block src,basic_block target)650 could_fall_through (basic_block src, basic_block target)
651 {
652 edge e;
653 edge_iterator ei;
654
655 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
656 return true;
657 FOR_EACH_EDGE (e, ei, src->succs)
658 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
659 && e->flags & EDGE_FALLTHRU)
660 return 0;
661 return true;
662 }
663
664 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
665 rtx_note *
bb_note(basic_block bb)666 bb_note (basic_block bb)
667 {
668 rtx_insn *note;
669
670 note = BB_HEAD (bb);
671 if (LABEL_P (note))
672 note = NEXT_INSN (note);
673
674 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
675 return as_a <rtx_note *> (note);
676 }
677
678 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
679 note associated with the BLOCK. */
680
681 static rtx_insn *
first_insn_after_basic_block_note(basic_block block)682 first_insn_after_basic_block_note (basic_block block)
683 {
684 rtx_insn *insn;
685
686 /* Get the first instruction in the block. */
687 insn = BB_HEAD (block);
688
689 if (insn == NULL_RTX)
690 return NULL;
691 if (LABEL_P (insn))
692 insn = NEXT_INSN (insn);
693 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
694
695 return NEXT_INSN (insn);
696 }
697
698 /* Creates a new basic block just after basic block BB by splitting
699 everything after specified instruction INSNP. */
700
701 static basic_block
rtl_split_block(basic_block bb,void * insnp)702 rtl_split_block (basic_block bb, void *insnp)
703 {
704 basic_block new_bb;
705 rtx_insn *insn = (rtx_insn *) insnp;
706 edge e;
707 edge_iterator ei;
708
709 if (!insn)
710 {
711 insn = first_insn_after_basic_block_note (bb);
712
713 if (insn)
714 {
715 rtx_insn *next = insn;
716
717 insn = PREV_INSN (insn);
718
719 /* If the block contains only debug insns, insn would have
720 been NULL in a non-debug compilation, and then we'd end
721 up emitting a DELETED note. For -fcompare-debug
722 stability, emit the note too. */
723 if (insn != BB_END (bb)
724 && DEBUG_INSN_P (next)
725 && DEBUG_INSN_P (BB_END (bb)))
726 {
727 while (next != BB_END (bb) && DEBUG_INSN_P (next))
728 next = NEXT_INSN (next);
729
730 if (next == BB_END (bb))
731 emit_note_after (NOTE_INSN_DELETED, next);
732 }
733 }
734 else
735 insn = get_last_insn ();
736 }
737
738 /* We probably should check type of the insn so that we do not create
739 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
740 bother. */
741 if (insn == BB_END (bb))
742 emit_note_after (NOTE_INSN_DELETED, insn);
743
744 /* Create the new basic block. */
745 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
746 BB_COPY_PARTITION (new_bb, bb);
747 BB_END (bb) = insn;
748
749 /* Redirect the outgoing edges. */
750 new_bb->succs = bb->succs;
751 bb->succs = NULL;
752 FOR_EACH_EDGE (e, ei, new_bb->succs)
753 e->src = new_bb;
754
755 /* The new block starts off being dirty. */
756 df_set_bb_dirty (bb);
757 return new_bb;
758 }
759
760 /* Return true if the single edge between blocks A and B is the only place
761 in RTL which holds some unique locus. */
762
763 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)764 unique_locus_on_edge_between_p (basic_block a, basic_block b)
765 {
766 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
767 rtx_insn *insn, *end;
768
769 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
770 return false;
771
772 /* First scan block A backward. */
773 insn = BB_END (a);
774 end = PREV_INSN (BB_HEAD (a));
775 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
776 insn = PREV_INSN (insn);
777
778 if (insn != end && INSN_LOCATION (insn) == goto_locus)
779 return false;
780
781 /* Then scan block B forward. */
782 insn = BB_HEAD (b);
783 if (insn)
784 {
785 end = NEXT_INSN (BB_END (b));
786 while (insn != end && !NONDEBUG_INSN_P (insn))
787 insn = NEXT_INSN (insn);
788
789 if (insn != end && INSN_HAS_LOCATION (insn)
790 && INSN_LOCATION (insn) == goto_locus)
791 return false;
792 }
793
794 return true;
795 }
796
797 /* If the single edge between blocks A and B is the only place in RTL which
798 holds some unique locus, emit a nop with that locus between the blocks. */
799
800 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)801 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
802 {
803 if (!unique_locus_on_edge_between_p (a, b))
804 return;
805
806 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
807 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
808 }
809
810 /* Blocks A and B are to be merged into a single block A. The insns
811 are already contiguous. */
812
813 static void
rtl_merge_blocks(basic_block a,basic_block b)814 rtl_merge_blocks (basic_block a, basic_block b)
815 {
816 /* If B is a forwarder block whose outgoing edge has no location, we'll
817 propagate the locus of the edge between A and B onto it. */
818 const bool forward_edge_locus
819 = (b->flags & BB_FORWARDER_BLOCK) != 0
820 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
821 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
822 rtx_insn *del_first = NULL, *del_last = NULL;
823 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
824 int b_empty = 0;
825
826 if (dump_file)
827 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
828 a->index);
829
830 while (DEBUG_INSN_P (b_end))
831 b_end = PREV_INSN (b_debug_start = b_end);
832
833 /* If there was a CODE_LABEL beginning B, delete it. */
834 if (LABEL_P (b_head))
835 {
836 /* Detect basic blocks with nothing but a label. This can happen
837 in particular at the end of a function. */
838 if (b_head == b_end)
839 b_empty = 1;
840
841 del_first = del_last = b_head;
842 b_head = NEXT_INSN (b_head);
843 }
844
845 /* Delete the basic block note and handle blocks containing just that
846 note. */
847 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
848 {
849 if (b_head == b_end)
850 b_empty = 1;
851 if (! del_last)
852 del_first = b_head;
853
854 del_last = b_head;
855 b_head = NEXT_INSN (b_head);
856 }
857
858 /* If there was a jump out of A, delete it. */
859 if (JUMP_P (a_end))
860 {
861 rtx_insn *prev;
862
863 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
864 if (!NOTE_P (prev)
865 || NOTE_INSN_BASIC_BLOCK_P (prev)
866 || prev == BB_HEAD (a))
867 break;
868
869 del_first = a_end;
870
871 /* If this was a conditional jump, we need to also delete
872 the insn that set cc0. */
873 if (HAVE_cc0 && only_sets_cc0_p (prev))
874 {
875 rtx_insn *tmp = prev;
876
877 prev = prev_nonnote_insn (prev);
878 if (!prev)
879 prev = BB_HEAD (a);
880 del_first = tmp;
881 }
882
883 a_end = PREV_INSN (del_first);
884 }
885 else if (BARRIER_P (NEXT_INSN (a_end)))
886 del_first = NEXT_INSN (a_end);
887
888 /* Delete everything marked above as well as crap that might be
889 hanging out between the two blocks. */
890 BB_END (a) = a_end;
891 BB_HEAD (b) = b_empty ? NULL : b_head;
892 delete_insn_chain (del_first, del_last, true);
893
894 /* If not optimizing, preserve the locus of the single edge between
895 blocks A and B if necessary by emitting a nop. */
896 if (!optimize
897 && !forward_edge_locus
898 && !DECL_IGNORED_P (current_function_decl))
899 {
900 emit_nop_for_unique_locus_between (a, b);
901 a_end = BB_END (a);
902 }
903
904 /* Reassociate the insns of B with A. */
905 if (!b_empty)
906 {
907 update_bb_for_insn_chain (a_end, b_debug_end, a);
908
909 BB_END (a) = b_debug_end;
910 BB_HEAD (b) = NULL;
911 }
912 else if (b_end != b_debug_end)
913 {
914 /* Move any deleted labels and other notes between the end of A
915 and the debug insns that make up B after the debug insns,
916 bringing the debug insns into A while keeping the notes after
917 the end of A. */
918 if (NEXT_INSN (a_end) != b_debug_start)
919 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
920 b_debug_end);
921 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
922 BB_END (a) = b_debug_end;
923 }
924
925 df_bb_delete (b->index);
926
927 if (forward_edge_locus)
928 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
929
930 if (dump_file)
931 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
932 }
933
934
935 /* Return true when block A and B can be merged. */
936
937 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)938 rtl_can_merge_blocks (basic_block a, basic_block b)
939 {
940 /* If we are partitioning hot/cold basic blocks, we don't want to
941 mess up unconditional or indirect jumps that cross between hot
942 and cold sections.
943
944 Basic block partitioning may result in some jumps that appear to
945 be optimizable (or blocks that appear to be mergeable), but which really
946 must be left untouched (they are required to make it safely across
947 partition boundaries). See the comments at the top of
948 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
949
950 if (BB_PARTITION (a) != BB_PARTITION (b))
951 return false;
952
953 /* Protect the loop latches. */
954 if (current_loops && b->loop_father->latch == b)
955 return false;
956
957 /* There must be exactly one edge in between the blocks. */
958 return (single_succ_p (a)
959 && single_succ (a) == b
960 && single_pred_p (b)
961 && a != b
962 /* Must be simple edge. */
963 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
964 && a->next_bb == b
965 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
966 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
967 /* If the jump insn has side effects,
968 we can't kill the edge. */
969 && (!JUMP_P (BB_END (a))
970 || (reload_completed
971 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
972 }
973
974 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
975 exist. */
976
977 rtx_code_label *
block_label(basic_block block)978 block_label (basic_block block)
979 {
980 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
981 return NULL;
982
983 if (!LABEL_P (BB_HEAD (block)))
984 {
985 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
986 }
987
988 return as_a <rtx_code_label *> (BB_HEAD (block));
989 }
990
991 /* Remove all barriers from BB_FOOTER of a BB. */
992
993 static void
remove_barriers_from_footer(basic_block bb)994 remove_barriers_from_footer (basic_block bb)
995 {
996 rtx_insn *insn = BB_FOOTER (bb);
997
998 /* Remove barriers but keep jumptables. */
999 while (insn)
1000 {
1001 if (BARRIER_P (insn))
1002 {
1003 if (PREV_INSN (insn))
1004 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1005 else
1006 BB_FOOTER (bb) = NEXT_INSN (insn);
1007 if (NEXT_INSN (insn))
1008 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1009 }
1010 if (LABEL_P (insn))
1011 return;
1012 insn = NEXT_INSN (insn);
1013 }
1014 }
1015
1016 /* Attempt to perform edge redirection by replacing possibly complex jump
1017 instruction by unconditional jump or removing jump completely. This can
1018 apply only if all edges now point to the same block. The parameters and
1019 return values are equivalent to redirect_edge_and_branch. */
1020
1021 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)1022 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1023 {
1024 basic_block src = e->src;
1025 rtx_insn *insn = BB_END (src), *kill_from;
1026 rtx set;
1027 int fallthru = 0;
1028
1029 /* If we are partitioning hot/cold basic blocks, we don't want to
1030 mess up unconditional or indirect jumps that cross between hot
1031 and cold sections.
1032
1033 Basic block partitioning may result in some jumps that appear to
1034 be optimizable (or blocks that appear to be mergeable), but which really
1035 must be left untouched (they are required to make it safely across
1036 partition boundaries). See the comments at the top of
1037 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1038
1039 if (BB_PARTITION (src) != BB_PARTITION (target))
1040 return NULL;
1041
1042 /* We can replace or remove a complex jump only when we have exactly
1043 two edges. Also, if we have exactly one outgoing edge, we can
1044 redirect that. */
1045 if (EDGE_COUNT (src->succs) >= 3
1046 /* Verify that all targets will be TARGET. Specifically, the
1047 edge that is not E must also go to TARGET. */
1048 || (EDGE_COUNT (src->succs) == 2
1049 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1050 return NULL;
1051
1052 if (!onlyjump_p (insn))
1053 return NULL;
1054 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1055 return NULL;
1056
1057 /* Avoid removing branch with side effects. */
1058 set = single_set (insn);
1059 if (!set || side_effects_p (set))
1060 return NULL;
1061
1062 /* In case we zap a conditional jump, we'll need to kill
1063 the cc0 setter too. */
1064 kill_from = insn;
1065 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1066 && only_sets_cc0_p (PREV_INSN (insn)))
1067 kill_from = PREV_INSN (insn);
1068
1069 /* See if we can create the fallthru edge. */
1070 if (in_cfglayout || can_fallthru (src, target))
1071 {
1072 if (dump_file)
1073 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1074 fallthru = 1;
1075
1076 /* Selectively unlink whole insn chain. */
1077 if (in_cfglayout)
1078 {
1079 delete_insn_chain (kill_from, BB_END (src), false);
1080 remove_barriers_from_footer (src);
1081 }
1082 else
1083 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1084 false);
1085 }
1086
1087 /* If this already is simplejump, redirect it. */
1088 else if (simplejump_p (insn))
1089 {
1090 if (e->dest == target)
1091 return NULL;
1092 if (dump_file)
1093 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1094 INSN_UID (insn), e->dest->index, target->index);
1095 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1096 block_label (target), 0))
1097 {
1098 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1099 return NULL;
1100 }
1101 }
1102
1103 /* Cannot do anything for target exit block. */
1104 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1105 return NULL;
1106
1107 /* Or replace possibly complicated jump insn by simple jump insn. */
1108 else
1109 {
1110 rtx_code_label *target_label = block_label (target);
1111 rtx_insn *barrier;
1112 rtx_insn *label;
1113 rtx_jump_table_data *table;
1114
1115 emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1116 JUMP_LABEL (BB_END (src)) = target_label;
1117 LABEL_NUSES (target_label)++;
1118 if (dump_file)
1119 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1120 INSN_UID (insn), INSN_UID (BB_END (src)));
1121
1122
1123 delete_insn_chain (kill_from, insn, false);
1124
1125 /* Recognize a tablejump that we are converting to a
1126 simple jump and remove its associated CODE_LABEL
1127 and ADDR_VEC or ADDR_DIFF_VEC. */
1128 if (tablejump_p (insn, &label, &table))
1129 delete_insn_chain (label, table, false);
1130
1131 barrier = next_nonnote_nondebug_insn (BB_END (src));
1132 if (!barrier || !BARRIER_P (barrier))
1133 emit_barrier_after (BB_END (src));
1134 else
1135 {
1136 if (barrier != NEXT_INSN (BB_END (src)))
1137 {
1138 /* Move the jump before barrier so that the notes
1139 which originally were or were created before jump table are
1140 inside the basic block. */
1141 rtx_insn *new_insn = BB_END (src);
1142
1143 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1144 PREV_INSN (barrier), src);
1145
1146 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1147 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1148
1149 SET_NEXT_INSN (new_insn) = barrier;
1150 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1151
1152 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1153 SET_PREV_INSN (barrier) = new_insn;
1154 }
1155 }
1156 }
1157
1158 /* Keep only one edge out and set proper flags. */
1159 if (!single_succ_p (src))
1160 remove_edge (e);
1161 gcc_assert (single_succ_p (src));
1162
1163 e = single_succ_edge (src);
1164 if (fallthru)
1165 e->flags = EDGE_FALLTHRU;
1166 else
1167 e->flags = 0;
1168
1169 e->probability = profile_probability::always ();
1170
1171 if (e->dest != target)
1172 redirect_edge_succ (e, target);
1173 return e;
1174 }
1175
1176 /* Subroutine of redirect_branch_edge that tries to patch the jump
1177 instruction INSN so that it reaches block NEW. Do this
1178 only when it originally reached block OLD. Return true if this
1179 worked or the original target wasn't OLD, return false if redirection
1180 doesn't work. */
1181
1182 static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)1183 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1184 {
1185 rtx_jump_table_data *table;
1186 rtx tmp;
1187 /* Recognize a tablejump and adjust all matching cases. */
1188 if (tablejump_p (insn, NULL, &table))
1189 {
1190 rtvec vec;
1191 int j;
1192 rtx_code_label *new_label = block_label (new_bb);
1193
1194 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1195 return false;
1196 vec = table->get_labels ();
1197
1198 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1199 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1200 {
1201 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1202 --LABEL_NUSES (old_label);
1203 ++LABEL_NUSES (new_label);
1204 }
1205
1206 /* Handle casesi dispatch insns. */
1207 if ((tmp = single_set (insn)) != NULL
1208 && SET_DEST (tmp) == pc_rtx
1209 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1210 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1211 && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1212 {
1213 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1214 new_label);
1215 --LABEL_NUSES (old_label);
1216 ++LABEL_NUSES (new_label);
1217 }
1218 }
1219 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1220 {
1221 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1222 rtx note;
1223
1224 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1225 return false;
1226 rtx_code_label *new_label = block_label (new_bb);
1227
1228 for (i = 0; i < n; ++i)
1229 {
1230 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1231 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1232 if (XEXP (old_ref, 0) == old_label)
1233 {
1234 ASM_OPERANDS_LABEL (tmp, i)
1235 = gen_rtx_LABEL_REF (Pmode, new_label);
1236 --LABEL_NUSES (old_label);
1237 ++LABEL_NUSES (new_label);
1238 }
1239 }
1240
1241 if (JUMP_LABEL (insn) == old_label)
1242 {
1243 JUMP_LABEL (insn) = new_label;
1244 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1245 if (note)
1246 remove_note (insn, note);
1247 }
1248 else
1249 {
1250 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1251 if (note)
1252 remove_note (insn, note);
1253 if (JUMP_LABEL (insn) != new_label
1254 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1255 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1256 }
1257 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1258 != NULL_RTX)
1259 XEXP (note, 0) = new_label;
1260 }
1261 else
1262 {
1263 /* ?? We may play the games with moving the named labels from
1264 one basic block to the other in case only one computed_jump is
1265 available. */
1266 if (computed_jump_p (insn)
1267 /* A return instruction can't be redirected. */
1268 || returnjump_p (insn))
1269 return false;
1270
1271 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1272 {
1273 /* If the insn doesn't go where we think, we're confused. */
1274 gcc_assert (JUMP_LABEL (insn) == old_label);
1275
1276 /* If the substitution doesn't succeed, die. This can happen
1277 if the back end emitted unrecognizable instructions or if
1278 target is exit block on some arches. Or for crossing
1279 jumps. */
1280 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1281 block_label (new_bb), 0))
1282 {
1283 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1284 || CROSSING_JUMP_P (insn));
1285 return false;
1286 }
1287 }
1288 }
1289 return true;
1290 }
1291
1292
1293 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1294 NULL on failure */
1295 static edge
redirect_branch_edge(edge e,basic_block target)1296 redirect_branch_edge (edge e, basic_block target)
1297 {
1298 rtx_insn *old_label = BB_HEAD (e->dest);
1299 basic_block src = e->src;
1300 rtx_insn *insn = BB_END (src);
1301
1302 /* We can only redirect non-fallthru edges of jump insn. */
1303 if (e->flags & EDGE_FALLTHRU)
1304 return NULL;
1305 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1306 return NULL;
1307
1308 if (!currently_expanding_to_rtl)
1309 {
1310 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1311 return NULL;
1312 }
1313 else
1314 /* When expanding this BB might actually contain multiple
1315 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1316 Redirect all of those that match our label. */
1317 FOR_BB_INSNS (src, insn)
1318 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1319 old_label, target))
1320 return NULL;
1321
1322 if (dump_file)
1323 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1324 e->src->index, e->dest->index, target->index);
1325
1326 if (e->dest != target)
1327 e = redirect_edge_succ_nodup (e, target);
1328
1329 return e;
1330 }
1331
1332 /* Called when edge E has been redirected to a new destination,
1333 in order to update the region crossing flag on the edge and
1334 jump. */
1335
1336 static void
fixup_partition_crossing(edge e)1337 fixup_partition_crossing (edge e)
1338 {
1339 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1340 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1341 return;
1342 /* If we redirected an existing edge, it may already be marked
1343 crossing, even though the new src is missing a reg crossing note.
1344 But make sure reg crossing note doesn't already exist before
1345 inserting. */
1346 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1347 {
1348 e->flags |= EDGE_CROSSING;
1349 if (JUMP_P (BB_END (e->src)))
1350 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1351 }
1352 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1353 {
1354 e->flags &= ~EDGE_CROSSING;
1355 /* Remove the section crossing note from jump at end of
1356 src if it exists, and if no other successors are
1357 still crossing. */
1358 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1359 {
1360 bool has_crossing_succ = false;
1361 edge e2;
1362 edge_iterator ei;
1363 FOR_EACH_EDGE (e2, ei, e->src->succs)
1364 {
1365 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1366 if (has_crossing_succ)
1367 break;
1368 }
1369 if (!has_crossing_succ)
1370 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1371 }
1372 }
1373 }
1374
1375 /* Called when block BB has been reassigned to the cold partition,
1376 because it is now dominated by another cold block,
1377 to ensure that the region crossing attributes are updated. */
1378
1379 static void
fixup_new_cold_bb(basic_block bb)1380 fixup_new_cold_bb (basic_block bb)
1381 {
1382 edge e;
1383 edge_iterator ei;
1384
1385 /* This is called when a hot bb is found to now be dominated
1386 by a cold bb and therefore needs to become cold. Therefore,
1387 its preds will no longer be region crossing. Any non-dominating
1388 preds that were previously hot would also have become cold
1389 in the caller for the same region. Any preds that were previously
1390 region-crossing will be adjusted in fixup_partition_crossing. */
1391 FOR_EACH_EDGE (e, ei, bb->preds)
1392 {
1393 fixup_partition_crossing (e);
1394 }
1395
1396 /* Possibly need to make bb's successor edges region crossing,
1397 or remove stale region crossing. */
1398 FOR_EACH_EDGE (e, ei, bb->succs)
1399 {
1400 /* We can't have fall-through edges across partition boundaries.
1401 Note that force_nonfallthru will do any necessary partition
1402 boundary fixup by calling fixup_partition_crossing itself. */
1403 if ((e->flags & EDGE_FALLTHRU)
1404 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1405 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1406 force_nonfallthru (e);
1407 else
1408 fixup_partition_crossing (e);
1409 }
1410 }
1411
1412 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1413 expense of adding new instructions or reordering basic blocks.
1414
1415 Function can be also called with edge destination equivalent to the TARGET.
1416 Then it should try the simplifications and do nothing if none is possible.
1417
1418 Return edge representing the branch if transformation succeeded. Return NULL
1419 on failure.
1420 We still return NULL in case E already destinated TARGET and we didn't
1421 managed to simplify instruction stream. */
1422
1423 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1424 rtl_redirect_edge_and_branch (edge e, basic_block target)
1425 {
1426 edge ret;
1427 basic_block src = e->src;
1428 basic_block dest = e->dest;
1429
1430 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1431 return NULL;
1432
1433 if (dest == target)
1434 return e;
1435
1436 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1437 {
1438 df_set_bb_dirty (src);
1439 fixup_partition_crossing (ret);
1440 return ret;
1441 }
1442
1443 ret = redirect_branch_edge (e, target);
1444 if (!ret)
1445 return NULL;
1446
1447 df_set_bb_dirty (src);
1448 fixup_partition_crossing (ret);
1449 return ret;
1450 }
1451
1452 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1453
1454 void
emit_barrier_after_bb(basic_block bb)1455 emit_barrier_after_bb (basic_block bb)
1456 {
1457 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1458 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1459 || current_ir_type () == IR_RTL_CFGLAYOUT);
1460 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1461 {
1462 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1463
1464 if (BB_FOOTER (bb))
1465 {
1466 rtx_insn *footer_tail = BB_FOOTER (bb);
1467
1468 while (NEXT_INSN (footer_tail))
1469 footer_tail = NEXT_INSN (footer_tail);
1470 if (!BARRIER_P (footer_tail))
1471 {
1472 SET_NEXT_INSN (footer_tail) = insn;
1473 SET_PREV_INSN (insn) = footer_tail;
1474 }
1475 }
1476 else
1477 BB_FOOTER (bb) = insn;
1478 }
1479 }
1480
1481 /* Like force_nonfallthru below, but additionally performs redirection
1482 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1483 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1484 simple_return_rtx, indicating which kind of returnjump to create.
1485 It should be NULL otherwise. */
1486
1487 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1488 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1489 {
1490 basic_block jump_block, new_bb = NULL, src = e->src;
1491 rtx note;
1492 edge new_edge;
1493 int abnormal_edge_flags = 0;
1494 bool asm_goto_edge = false;
1495 int loc;
1496
1497 /* In the case the last instruction is conditional jump to the next
1498 instruction, first redirect the jump itself and then continue
1499 by creating a basic block afterwards to redirect fallthru edge. */
1500 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1501 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1502 && any_condjump_p (BB_END (e->src))
1503 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1504 {
1505 rtx note;
1506 edge b = unchecked_make_edge (e->src, target, 0);
1507 bool redirected;
1508
1509 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1510 block_label (target), 0);
1511 gcc_assert (redirected);
1512
1513 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1514 if (note)
1515 {
1516 int prob = XINT (note, 0);
1517
1518 b->probability = profile_probability::from_reg_br_prob_note (prob);
1519 e->probability -= e->probability;
1520 }
1521 }
1522
1523 if (e->flags & EDGE_ABNORMAL)
1524 {
1525 /* Irritating special case - fallthru edge to the same block as abnormal
1526 edge.
1527 We can't redirect abnormal edge, but we still can split the fallthru
1528 one and create separate abnormal edge to original destination.
1529 This allows bb-reorder to make such edge non-fallthru. */
1530 gcc_assert (e->dest == target);
1531 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1532 e->flags &= EDGE_FALLTHRU;
1533 }
1534 else
1535 {
1536 gcc_assert (e->flags & EDGE_FALLTHRU);
1537 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1538 {
1539 /* We can't redirect the entry block. Create an empty block
1540 at the start of the function which we use to add the new
1541 jump. */
1542 edge tmp;
1543 edge_iterator ei;
1544 bool found = false;
1545
1546 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1547 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1548 bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1549
1550 /* Make sure new block ends up in correct hot/cold section. */
1551 BB_COPY_PARTITION (bb, e->dest);
1552
1553 /* Change the existing edge's source to be the new block, and add
1554 a new edge from the entry block to the new block. */
1555 e->src = bb;
1556 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1557 (tmp = ei_safe_edge (ei)); )
1558 {
1559 if (tmp == e)
1560 {
1561 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1562 found = true;
1563 break;
1564 }
1565 else
1566 ei_next (&ei);
1567 }
1568
1569 gcc_assert (found);
1570
1571 vec_safe_push (bb->succs, e);
1572 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1573 EDGE_FALLTHRU);
1574 }
1575 }
1576
1577 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1578 don't point to the target or fallthru label. */
1579 if (JUMP_P (BB_END (e->src))
1580 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1581 && (e->flags & EDGE_FALLTHRU)
1582 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1583 {
1584 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1585 bool adjust_jump_target = false;
1586
1587 for (i = 0; i < n; ++i)
1588 {
1589 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1590 {
1591 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1592 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1593 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1594 adjust_jump_target = true;
1595 }
1596 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1597 asm_goto_edge = true;
1598 }
1599 if (adjust_jump_target)
1600 {
1601 rtx_insn *insn = BB_END (e->src);
1602 rtx note;
1603 rtx_insn *old_label = BB_HEAD (e->dest);
1604 rtx_insn *new_label = BB_HEAD (target);
1605
1606 if (JUMP_LABEL (insn) == old_label)
1607 {
1608 JUMP_LABEL (insn) = new_label;
1609 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1610 if (note)
1611 remove_note (insn, note);
1612 }
1613 else
1614 {
1615 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1616 if (note)
1617 remove_note (insn, note);
1618 if (JUMP_LABEL (insn) != new_label
1619 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1620 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1621 }
1622 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1623 != NULL_RTX)
1624 XEXP (note, 0) = new_label;
1625 }
1626 }
1627
1628 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1629 {
1630 rtx_insn *new_head;
1631 profile_count count = e->count ();
1632 profile_probability probability = e->probability;
1633 /* Create the new structures. */
1634
1635 /* If the old block ended with a tablejump, skip its table
1636 by searching forward from there. Otherwise start searching
1637 forward from the last instruction of the old block. */
1638 rtx_jump_table_data *table;
1639 if (tablejump_p (BB_END (e->src), NULL, &table))
1640 new_head = table;
1641 else
1642 new_head = BB_END (e->src);
1643 new_head = NEXT_INSN (new_head);
1644
1645 jump_block = create_basic_block (new_head, NULL, e->src);
1646 jump_block->count = count;
1647
1648 /* Make sure new block ends up in correct hot/cold section. */
1649
1650 BB_COPY_PARTITION (jump_block, e->src);
1651
1652 /* Wire edge in. */
1653 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1654 new_edge->probability = probability;
1655
1656 /* Redirect old edge. */
1657 redirect_edge_pred (e, jump_block);
1658 e->probability = profile_probability::always ();
1659
1660 /* If e->src was previously region crossing, it no longer is
1661 and the reg crossing note should be removed. */
1662 fixup_partition_crossing (new_edge);
1663
1664 /* If asm goto has any label refs to target's label,
1665 add also edge from asm goto bb to target. */
1666 if (asm_goto_edge)
1667 {
1668 new_edge->probability = new_edge->probability.apply_scale (1, 2);
1669 jump_block->count = jump_block->count.apply_scale (1, 2);
1670 edge new_edge2 = make_edge (new_edge->src, target,
1671 e->flags & ~EDGE_FALLTHRU);
1672 new_edge2->probability = probability - new_edge->probability;
1673 }
1674
1675 new_bb = jump_block;
1676 }
1677 else
1678 jump_block = e->src;
1679
1680 loc = e->goto_locus;
1681 e->flags &= ~EDGE_FALLTHRU;
1682 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1683 {
1684 if (jump_label == ret_rtx)
1685 emit_jump_insn_after_setloc (targetm.gen_return (),
1686 BB_END (jump_block), loc);
1687 else
1688 {
1689 gcc_assert (jump_label == simple_return_rtx);
1690 emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1691 BB_END (jump_block), loc);
1692 }
1693 set_return_jump_label (BB_END (jump_block));
1694 }
1695 else
1696 {
1697 rtx_code_label *label = block_label (target);
1698 emit_jump_insn_after_setloc (targetm.gen_jump (label),
1699 BB_END (jump_block), loc);
1700 JUMP_LABEL (BB_END (jump_block)) = label;
1701 LABEL_NUSES (label)++;
1702 }
1703
1704 /* We might be in cfg layout mode, and if so, the following routine will
1705 insert the barrier correctly. */
1706 emit_barrier_after_bb (jump_block);
1707 redirect_edge_succ_nodup (e, target);
1708
1709 if (abnormal_edge_flags)
1710 make_edge (src, target, abnormal_edge_flags);
1711
1712 df_mark_solutions_dirty ();
1713 fixup_partition_crossing (e);
1714 return new_bb;
1715 }
1716
1717 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1718 (and possibly create new basic block) to make edge non-fallthru.
1719 Return newly created BB or NULL if none. */
1720
1721 static basic_block
rtl_force_nonfallthru(edge e)1722 rtl_force_nonfallthru (edge e)
1723 {
1724 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1725 }
1726
1727 /* Redirect edge even at the expense of creating new jump insn or
1728 basic block. Return new basic block if created, NULL otherwise.
1729 Conversion must be possible. */
1730
1731 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1732 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1733 {
1734 if (redirect_edge_and_branch (e, target)
1735 || e->dest == target)
1736 return NULL;
1737
1738 /* In case the edge redirection failed, try to force it to be non-fallthru
1739 and redirect newly created simplejump. */
1740 df_set_bb_dirty (e->src);
1741 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1742 }
1743
1744 /* The given edge should potentially be a fallthru edge. If that is in
1745 fact true, delete the jump and barriers that are in the way. */
1746
1747 static void
rtl_tidy_fallthru_edge(edge e)1748 rtl_tidy_fallthru_edge (edge e)
1749 {
1750 rtx_insn *q;
1751 basic_block b = e->src, c = b->next_bb;
1752
1753 /* ??? In a late-running flow pass, other folks may have deleted basic
1754 blocks by nopping out blocks, leaving multiple BARRIERs between here
1755 and the target label. They ought to be chastised and fixed.
1756
1757 We can also wind up with a sequence of undeletable labels between
1758 one block and the next.
1759
1760 So search through a sequence of barriers, labels, and notes for
1761 the head of block C and assert that we really do fall through. */
1762
1763 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1764 if (NONDEBUG_INSN_P (q))
1765 return;
1766
1767 /* Remove what will soon cease being the jump insn from the source block.
1768 If block B consisted only of this single jump, turn it into a deleted
1769 note. */
1770 q = BB_END (b);
1771 if (JUMP_P (q)
1772 && onlyjump_p (q)
1773 && (any_uncondjump_p (q)
1774 || single_succ_p (b)))
1775 {
1776 rtx_insn *label;
1777 rtx_jump_table_data *table;
1778
1779 if (tablejump_p (q, &label, &table))
1780 {
1781 /* The label is likely mentioned in some instruction before
1782 the tablejump and might not be DCEd, so turn it into
1783 a note instead and move before the tablejump that is going to
1784 be deleted. */
1785 const char *name = LABEL_NAME (label);
1786 PUT_CODE (label, NOTE);
1787 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1788 NOTE_DELETED_LABEL_NAME (label) = name;
1789 reorder_insns (label, label, PREV_INSN (q));
1790 delete_insn (table);
1791 }
1792
1793 /* If this was a conditional jump, we need to also delete
1794 the insn that set cc0. */
1795 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1796 q = PREV_INSN (q);
1797
1798 q = PREV_INSN (q);
1799 }
1800 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1801 together with the barrier) should never have a fallthru edge. */
1802 else if (JUMP_P (q) && any_uncondjump_p (q))
1803 return;
1804
1805 /* Selectively unlink the sequence. */
1806 if (q != PREV_INSN (BB_HEAD (c)))
1807 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1808
1809 e->flags |= EDGE_FALLTHRU;
1810 }
1811
1812 /* Should move basic block BB after basic block AFTER. NIY. */
1813
1814 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1815 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1816 basic_block after ATTRIBUTE_UNUSED)
1817 {
1818 return false;
1819 }
1820
1821 /* Locate the last bb in the same partition as START_BB. */
1822
1823 static basic_block
last_bb_in_partition(basic_block start_bb)1824 last_bb_in_partition (basic_block start_bb)
1825 {
1826 basic_block bb;
1827 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1828 {
1829 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1830 return bb;
1831 }
1832 /* Return bb before the exit block. */
1833 return bb->prev_bb;
1834 }
1835
1836 /* Split a (typically critical) edge. Return the new block.
1837 The edge must not be abnormal.
1838
1839 ??? The code generally expects to be called on critical edges.
1840 The case of a block ending in an unconditional jump to a
1841 block with multiple predecessors is not handled optimally. */
1842
1843 static basic_block
rtl_split_edge(edge edge_in)1844 rtl_split_edge (edge edge_in)
1845 {
1846 basic_block bb, new_bb;
1847 rtx_insn *before;
1848
1849 /* Abnormal edges cannot be split. */
1850 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1851
1852 /* We are going to place the new block in front of edge destination.
1853 Avoid existence of fallthru predecessors. */
1854 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1855 {
1856 edge e = find_fallthru_edge (edge_in->dest->preds);
1857
1858 if (e)
1859 force_nonfallthru (e);
1860 }
1861
1862 /* Create the basic block note. */
1863 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1864 before = BB_HEAD (edge_in->dest);
1865 else
1866 before = NULL;
1867
1868 /* If this is a fall through edge to the exit block, the blocks might be
1869 not adjacent, and the right place is after the source. */
1870 if ((edge_in->flags & EDGE_FALLTHRU)
1871 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1872 {
1873 before = NEXT_INSN (BB_END (edge_in->src));
1874 bb = create_basic_block (before, NULL, edge_in->src);
1875 BB_COPY_PARTITION (bb, edge_in->src);
1876 }
1877 else
1878 {
1879 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1880 {
1881 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1882 BB_COPY_PARTITION (bb, edge_in->dest);
1883 }
1884 else
1885 {
1886 basic_block after = edge_in->dest->prev_bb;
1887 /* If this is post-bb reordering, and the edge crosses a partition
1888 boundary, the new block needs to be inserted in the bb chain
1889 at the end of the src partition (since we put the new bb into
1890 that partition, see below). Otherwise we may end up creating
1891 an extra partition crossing in the chain, which is illegal.
1892 It can't go after the src, because src may have a fall-through
1893 to a different block. */
1894 if (crtl->bb_reorder_complete
1895 && (edge_in->flags & EDGE_CROSSING))
1896 {
1897 after = last_bb_in_partition (edge_in->src);
1898 before = get_last_bb_insn (after);
1899 /* The instruction following the last bb in partition should
1900 be a barrier, since it cannot end in a fall-through. */
1901 gcc_checking_assert (BARRIER_P (before));
1902 before = NEXT_INSN (before);
1903 }
1904 bb = create_basic_block (before, NULL, after);
1905 /* Put the split bb into the src partition, to avoid creating
1906 a situation where a cold bb dominates a hot bb, in the case
1907 where src is cold and dest is hot. The src will dominate
1908 the new bb (whereas it might not have dominated dest). */
1909 BB_COPY_PARTITION (bb, edge_in->src);
1910 }
1911 }
1912
1913 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1914
1915 /* Can't allow a region crossing edge to be fallthrough. */
1916 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1917 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1918 {
1919 new_bb = force_nonfallthru (single_succ_edge (bb));
1920 gcc_assert (!new_bb);
1921 }
1922
1923 /* For non-fallthru edges, we must adjust the predecessor's
1924 jump instruction to target our new block. */
1925 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1926 {
1927 edge redirected = redirect_edge_and_branch (edge_in, bb);
1928 gcc_assert (redirected);
1929 }
1930 else
1931 {
1932 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1933 {
1934 /* For asm goto even splitting of fallthru edge might
1935 need insn patching, as other labels might point to the
1936 old label. */
1937 rtx_insn *last = BB_END (edge_in->src);
1938 if (last
1939 && JUMP_P (last)
1940 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1941 && (extract_asm_operands (PATTERN (last))
1942 || JUMP_LABEL (last) == before)
1943 && patch_jump_insn (last, before, bb))
1944 df_set_bb_dirty (edge_in->src);
1945 }
1946 redirect_edge_succ (edge_in, bb);
1947 }
1948
1949 return bb;
1950 }
1951
1952 /* Queue instructions for insertion on an edge between two basic blocks.
1953 The new instructions and basic blocks (if any) will not appear in the
1954 CFG until commit_edge_insertions is called. */
1955
1956 void
insert_insn_on_edge(rtx pattern,edge e)1957 insert_insn_on_edge (rtx pattern, edge e)
1958 {
1959 /* We cannot insert instructions on an abnormal critical edge.
1960 It will be easier to find the culprit if we die now. */
1961 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1962
1963 if (e->insns.r == NULL_RTX)
1964 start_sequence ();
1965 else
1966 push_to_sequence (e->insns.r);
1967
1968 emit_insn (pattern);
1969
1970 e->insns.r = get_insns ();
1971 end_sequence ();
1972 }
1973
1974 /* Update the CFG for the instructions queued on edge E. */
1975
1976 void
commit_one_edge_insertion(edge e)1977 commit_one_edge_insertion (edge e)
1978 {
1979 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1980 basic_block bb;
1981
1982 /* Pull the insns off the edge now since the edge might go away. */
1983 insns = e->insns.r;
1984 e->insns.r = NULL;
1985
1986 /* Figure out where to put these insns. If the destination has
1987 one predecessor, insert there. Except for the exit block. */
1988 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1989 {
1990 bb = e->dest;
1991
1992 /* Get the location correct wrt a code label, and "nice" wrt
1993 a basic block note, and before everything else. */
1994 tmp = BB_HEAD (bb);
1995 if (LABEL_P (tmp))
1996 tmp = NEXT_INSN (tmp);
1997 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1998 tmp = NEXT_INSN (tmp);
1999 if (tmp == BB_HEAD (bb))
2000 before = tmp;
2001 else if (tmp)
2002 after = PREV_INSN (tmp);
2003 else
2004 after = get_last_insn ();
2005 }
2006
2007 /* If the source has one successor and the edge is not abnormal,
2008 insert there. Except for the entry block.
2009 Don't do this if the predecessor ends in a jump other than
2010 unconditional simple jump. E.g. for asm goto that points all
2011 its labels at the fallthru basic block, we can't insert instructions
2012 before the asm goto, as the asm goto can have various of side effects,
2013 and can't emit instructions after the asm goto, as it must end
2014 the basic block. */
2015 else if ((e->flags & EDGE_ABNORMAL) == 0
2016 && single_succ_p (e->src)
2017 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2018 && (!JUMP_P (BB_END (e->src))
2019 || simplejump_p (BB_END (e->src))))
2020 {
2021 bb = e->src;
2022
2023 /* It is possible to have a non-simple jump here. Consider a target
2024 where some forms of unconditional jumps clobber a register. This
2025 happens on the fr30 for example.
2026
2027 We know this block has a single successor, so we can just emit
2028 the queued insns before the jump. */
2029 if (JUMP_P (BB_END (bb)))
2030 before = BB_END (bb);
2031 else
2032 {
2033 /* We'd better be fallthru, or we've lost track of what's what. */
2034 gcc_assert (e->flags & EDGE_FALLTHRU);
2035
2036 after = BB_END (bb);
2037 }
2038 }
2039
2040 /* Otherwise we must split the edge. */
2041 else
2042 {
2043 bb = split_edge (e);
2044
2045 /* If E crossed a partition boundary, we needed to make bb end in
2046 a region-crossing jump, even though it was originally fallthru. */
2047 if (JUMP_P (BB_END (bb)))
2048 before = BB_END (bb);
2049 else
2050 after = BB_END (bb);
2051 }
2052
2053 /* Now that we've found the spot, do the insertion. */
2054 if (before)
2055 {
2056 emit_insn_before_noloc (insns, before, bb);
2057 last = prev_nonnote_insn (before);
2058 }
2059 else
2060 last = emit_insn_after_noloc (insns, after, bb);
2061
2062 if (returnjump_p (last))
2063 {
2064 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2065 This is not currently a problem because this only happens
2066 for the (single) epilogue, which already has a fallthru edge
2067 to EXIT. */
2068
2069 e = single_succ_edge (bb);
2070 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2071 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2072
2073 e->flags &= ~EDGE_FALLTHRU;
2074 emit_barrier_after (last);
2075
2076 if (before)
2077 delete_insn (before);
2078 }
2079 else
2080 gcc_assert (!JUMP_P (last));
2081 }
2082
2083 /* Update the CFG for all queued instructions. */
2084
2085 void
commit_edge_insertions(void)2086 commit_edge_insertions (void)
2087 {
2088 basic_block bb;
2089
2090 /* Optimization passes that invoke this routine can cause hot blocks
2091 previously reached by both hot and cold blocks to become dominated only
2092 by cold blocks. This will cause the verification below to fail,
2093 and lead to now cold code in the hot section. In some cases this
2094 may only be visible after newly unreachable blocks are deleted,
2095 which will be done by fixup_partitions. */
2096 fixup_partitions ();
2097
2098 checking_verify_flow_info ();
2099
2100 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2101 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2102 {
2103 edge e;
2104 edge_iterator ei;
2105
2106 FOR_EACH_EDGE (e, ei, bb->succs)
2107 if (e->insns.r)
2108 commit_one_edge_insertion (e);
2109 }
2110 }
2111
2112
2113 /* Print out RTL-specific basic block information (live information
2114 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2115 documented in dumpfile.h. */
2116
2117 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2118 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2119 {
2120 char *s_indent;
2121
2122 s_indent = (char *) alloca ((size_t) indent + 1);
2123 memset (s_indent, ' ', (size_t) indent);
2124 s_indent[indent] = '\0';
2125
2126 if (df && (flags & TDF_DETAILS))
2127 {
2128 df_dump_top (bb, outf);
2129 putc ('\n', outf);
2130 }
2131
2132 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2133 {
2134 rtx_insn *last = BB_END (bb);
2135 if (last)
2136 last = NEXT_INSN (last);
2137 for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2138 {
2139 if (flags & TDF_DETAILS)
2140 df_dump_insn_top (insn, outf);
2141 if (! (flags & TDF_SLIM))
2142 print_rtl_single (outf, insn);
2143 else
2144 dump_insn_slim (outf, insn);
2145 if (flags & TDF_DETAILS)
2146 df_dump_insn_bottom (insn, outf);
2147 }
2148 }
2149
2150 if (df && (flags & TDF_DETAILS))
2151 {
2152 df_dump_bottom (bb, outf);
2153 putc ('\n', outf);
2154 }
2155
2156 }
2157
2158 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2159 for the start of each basic block. FLAGS are the TDF_* masks documented
2160 in dumpfile.h. */
2161
2162 void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2163 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2164 {
2165 const rtx_insn *tmp_rtx;
2166 if (rtx_first == 0)
2167 fprintf (outf, "(nil)\n");
2168 else
2169 {
2170 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2171 int max_uid = get_max_uid ();
2172 basic_block *start = XCNEWVEC (basic_block, max_uid);
2173 basic_block *end = XCNEWVEC (basic_block, max_uid);
2174 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2175 basic_block bb;
2176
2177 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2178 insns, but the CFG is not maintained so the basic block info
2179 is not reliable. Therefore it's omitted from the dumps. */
2180 if (! (cfun->curr_properties & PROP_cfg))
2181 flags &= ~TDF_BLOCKS;
2182
2183 if (df)
2184 df_dump_start (outf);
2185
2186 if (flags & TDF_BLOCKS)
2187 {
2188 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2189 {
2190 rtx_insn *x;
2191
2192 start[INSN_UID (BB_HEAD (bb))] = bb;
2193 end[INSN_UID (BB_END (bb))] = bb;
2194 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2195 {
2196 enum bb_state state = IN_MULTIPLE_BB;
2197
2198 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2199 state = IN_ONE_BB;
2200 in_bb_p[INSN_UID (x)] = state;
2201
2202 if (x == BB_END (bb))
2203 break;
2204 }
2205 }
2206 }
2207
2208 for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2209 {
2210 if (flags & TDF_BLOCKS)
2211 {
2212 bb = start[INSN_UID (tmp_rtx)];
2213 if (bb != NULL)
2214 {
2215 dump_bb_info (outf, bb, 0, dump_flags, true, false);
2216 if (df && (flags & TDF_DETAILS))
2217 df_dump_top (bb, outf);
2218 }
2219
2220 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2221 && !NOTE_P (tmp_rtx)
2222 && !BARRIER_P (tmp_rtx))
2223 fprintf (outf, ";; Insn is not within a basic block\n");
2224 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2225 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2226 }
2227
2228 if (flags & TDF_DETAILS)
2229 df_dump_insn_top (tmp_rtx, outf);
2230 if (! (flags & TDF_SLIM))
2231 print_rtl_single (outf, tmp_rtx);
2232 else
2233 dump_insn_slim (outf, tmp_rtx);
2234 if (flags & TDF_DETAILS)
2235 df_dump_insn_bottom (tmp_rtx, outf);
2236
2237 if (flags & TDF_BLOCKS)
2238 {
2239 bb = end[INSN_UID (tmp_rtx)];
2240 if (bb != NULL)
2241 {
2242 dump_bb_info (outf, bb, 0, dump_flags, false, true);
2243 if (df && (flags & TDF_DETAILS))
2244 df_dump_bottom (bb, outf);
2245 putc ('\n', outf);
2246 }
2247 }
2248 }
2249
2250 free (start);
2251 free (end);
2252 free (in_bb_p);
2253 }
2254 }
2255
2256 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2257
2258 void
update_br_prob_note(basic_block bb)2259 update_br_prob_note (basic_block bb)
2260 {
2261 rtx note;
2262 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2263 if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2264 {
2265 if (note)
2266 {
2267 rtx *note_link, this_rtx;
2268
2269 note_link = ®_NOTES (BB_END (bb));
2270 for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2271 if (this_rtx == note)
2272 {
2273 *note_link = XEXP (this_rtx, 1);
2274 break;
2275 }
2276 }
2277 return;
2278 }
2279 if (!note
2280 || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2281 return;
2282 XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2283 }
2284
2285 /* Get the last insn associated with block BB (that includes barriers and
2286 tablejumps after BB). */
2287 rtx_insn *
get_last_bb_insn(basic_block bb)2288 get_last_bb_insn (basic_block bb)
2289 {
2290 rtx_jump_table_data *table;
2291 rtx_insn *tmp;
2292 rtx_insn *end = BB_END (bb);
2293
2294 /* Include any jump table following the basic block. */
2295 if (tablejump_p (end, NULL, &table))
2296 end = table;
2297
2298 /* Include any barriers that may follow the basic block. */
2299 tmp = next_nonnote_nondebug_insn_bb (end);
2300 while (tmp && BARRIER_P (tmp))
2301 {
2302 end = tmp;
2303 tmp = next_nonnote_nondebug_insn_bb (end);
2304 }
2305
2306 return end;
2307 }
2308
2309 /* Add all BBs reachable from entry via hot paths into the SET. */
2310
2311 void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2312 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2313 {
2314 auto_vec<basic_block, 64> worklist;
2315
2316 set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2317 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2318
2319 while (worklist.length () > 0)
2320 {
2321 basic_block bb = worklist.pop ();
2322 edge_iterator ei;
2323 edge e;
2324
2325 FOR_EACH_EDGE (e, ei, bb->succs)
2326 if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2327 && !set->add (e->dest))
2328 worklist.safe_push (e->dest);
2329 }
2330 }
2331
2332 /* Sanity check partition hotness to ensure that basic blocks in
2333 the cold partition don't dominate basic blocks in the hot partition.
2334 If FLAG_ONLY is true, report violations as errors. Otherwise
2335 re-mark the dominated blocks as cold, since this is run after
2336 cfg optimizations that may make hot blocks previously reached
2337 by both hot and cold blocks now only reachable along cold paths. */
2338
2339 static vec<basic_block>
find_partition_fixes(bool flag_only)2340 find_partition_fixes (bool flag_only)
2341 {
2342 basic_block bb;
2343 vec<basic_block> bbs_in_cold_partition = vNULL;
2344 vec<basic_block> bbs_to_fix = vNULL;
2345 hash_set<basic_block> set;
2346
2347 /* Callers check this. */
2348 gcc_checking_assert (crtl->has_bb_partition);
2349
2350 find_bbs_reachable_by_hot_paths (&set);
2351
2352 FOR_EACH_BB_FN (bb, cfun)
2353 if (!set.contains (bb)
2354 && BB_PARTITION (bb) != BB_COLD_PARTITION)
2355 {
2356 if (flag_only)
2357 error ("non-cold basic block %d reachable only "
2358 "by paths crossing the cold partition", bb->index);
2359 else
2360 BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2361 bbs_to_fix.safe_push (bb);
2362 bbs_in_cold_partition.safe_push (bb);
2363 }
2364
2365 return bbs_to_fix;
2366 }
2367
2368 /* Perform cleanup on the hot/cold bb partitioning after optimization
2369 passes that modify the cfg. */
2370
2371 void
fixup_partitions(void)2372 fixup_partitions (void)
2373 {
2374 basic_block bb;
2375
2376 if (!crtl->has_bb_partition)
2377 return;
2378
2379 /* Delete any blocks that became unreachable and weren't
2380 already cleaned up, for example during edge forwarding
2381 and convert_jumps_to_returns. This will expose more
2382 opportunities for fixing the partition boundaries here.
2383 Also, the calculation of the dominance graph during verification
2384 will assert if there are unreachable nodes. */
2385 delete_unreachable_blocks ();
2386
2387 /* If there are partitions, do a sanity check on them: A basic block in
2388 a cold partition cannot dominate a basic block in a hot partition.
2389 Fixup any that now violate this requirement, as a result of edge
2390 forwarding and unreachable block deletion. */
2391 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2392
2393 /* Do the partition fixup after all necessary blocks have been converted to
2394 cold, so that we only update the region crossings the minimum number of
2395 places, which can require forcing edges to be non fallthru. */
2396 while (! bbs_to_fix.is_empty ())
2397 {
2398 bb = bbs_to_fix.pop ();
2399 fixup_new_cold_bb (bb);
2400 }
2401 }
2402
2403 /* Verify, in the basic block chain, that there is at most one switch
2404 between hot/cold partitions. This condition will not be true until
2405 after reorder_basic_blocks is called. */
2406
2407 static int
verify_hot_cold_block_grouping(void)2408 verify_hot_cold_block_grouping (void)
2409 {
2410 basic_block bb;
2411 int err = 0;
2412 bool switched_sections = false;
2413 int current_partition = BB_UNPARTITIONED;
2414
2415 /* Even after bb reordering is complete, we go into cfglayout mode
2416 again (in compgoto). Ensure we don't call this before going back
2417 into linearized RTL when any layout fixes would have been committed. */
2418 if (!crtl->bb_reorder_complete
2419 || current_ir_type () != IR_RTL_CFGRTL)
2420 return err;
2421
2422 FOR_EACH_BB_FN (bb, cfun)
2423 {
2424 if (current_partition != BB_UNPARTITIONED
2425 && BB_PARTITION (bb) != current_partition)
2426 {
2427 if (switched_sections)
2428 {
2429 error ("multiple hot/cold transitions found (bb %i)",
2430 bb->index);
2431 err = 1;
2432 }
2433 else
2434 switched_sections = true;
2435
2436 if (!crtl->has_bb_partition)
2437 error ("partition found but function partition flag not set");
2438 }
2439 current_partition = BB_PARTITION (bb);
2440 }
2441
2442 return err;
2443 }
2444
2445
2446 /* Perform several checks on the edges out of each block, such as
2447 the consistency of the branch probabilities, the correctness
2448 of hot/cold partition crossing edges, and the number of expected
2449 successor edges. Also verify that the dominance relationship
2450 between hot/cold blocks is sane. */
2451
2452 static int
rtl_verify_edges(void)2453 rtl_verify_edges (void)
2454 {
2455 int err = 0;
2456 basic_block bb;
2457
2458 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2459 {
2460 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2461 int n_eh = 0, n_abnormal = 0;
2462 edge e, fallthru = NULL;
2463 edge_iterator ei;
2464 rtx note;
2465 bool has_crossing_edge = false;
2466
2467 if (JUMP_P (BB_END (bb))
2468 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2469 && EDGE_COUNT (bb->succs) >= 2
2470 && any_condjump_p (BB_END (bb)))
2471 {
2472 if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2473 {
2474 if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2475 {
2476 error ("verify_flow_info: "
2477 "REG_BR_PROB is set but cfg probability is not");
2478 err = 1;
2479 }
2480 }
2481 else if (XINT (note, 0)
2482 != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2483 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2484 {
2485 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2486 XINT (note, 0),
2487 BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2488 err = 1;
2489 }
2490 }
2491
2492 FOR_EACH_EDGE (e, ei, bb->succs)
2493 {
2494 bool is_crossing;
2495
2496 if (e->flags & EDGE_FALLTHRU)
2497 n_fallthru++, fallthru = e;
2498
2499 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2500 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2501 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2502 has_crossing_edge |= is_crossing;
2503 if (e->flags & EDGE_CROSSING)
2504 {
2505 if (!is_crossing)
2506 {
2507 error ("EDGE_CROSSING incorrectly set across same section");
2508 err = 1;
2509 }
2510 if (e->flags & EDGE_FALLTHRU)
2511 {
2512 error ("fallthru edge crosses section boundary in bb %i",
2513 e->src->index);
2514 err = 1;
2515 }
2516 if (e->flags & EDGE_EH)
2517 {
2518 error ("EH edge crosses section boundary in bb %i",
2519 e->src->index);
2520 err = 1;
2521 }
2522 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2523 {
2524 error ("No region crossing jump at section boundary in bb %i",
2525 bb->index);
2526 err = 1;
2527 }
2528 }
2529 else if (is_crossing)
2530 {
2531 error ("EDGE_CROSSING missing across section boundary");
2532 err = 1;
2533 }
2534
2535 if ((e->flags & ~(EDGE_DFS_BACK
2536 | EDGE_CAN_FALLTHRU
2537 | EDGE_IRREDUCIBLE_LOOP
2538 | EDGE_LOOP_EXIT
2539 | EDGE_CROSSING
2540 | EDGE_PRESERVE)) == 0)
2541 n_branch++;
2542
2543 if (e->flags & EDGE_ABNORMAL_CALL)
2544 n_abnormal_call++;
2545
2546 if (e->flags & EDGE_SIBCALL)
2547 n_sibcall++;
2548
2549 if (e->flags & EDGE_EH)
2550 n_eh++;
2551
2552 if (e->flags & EDGE_ABNORMAL)
2553 n_abnormal++;
2554 }
2555
2556 if (!has_crossing_edge
2557 && JUMP_P (BB_END (bb))
2558 && CROSSING_JUMP_P (BB_END (bb)))
2559 {
2560 print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2561 error ("Region crossing jump across same section in bb %i",
2562 bb->index);
2563 err = 1;
2564 }
2565
2566 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2567 {
2568 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2569 err = 1;
2570 }
2571 if (n_eh > 1)
2572 {
2573 error ("too many exception handling edges in bb %i", bb->index);
2574 err = 1;
2575 }
2576 if (n_branch
2577 && (!JUMP_P (BB_END (bb))
2578 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2579 || any_condjump_p (BB_END (bb))))))
2580 {
2581 error ("too many outgoing branch edges from bb %i", bb->index);
2582 err = 1;
2583 }
2584 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2585 {
2586 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2587 err = 1;
2588 }
2589 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2590 {
2591 error ("wrong number of branch edges after unconditional jump"
2592 " in bb %i", bb->index);
2593 err = 1;
2594 }
2595 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2596 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2597 {
2598 error ("wrong amount of branch edges after conditional jump"
2599 " in bb %i", bb->index);
2600 err = 1;
2601 }
2602 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2603 {
2604 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2605 err = 1;
2606 }
2607 if (n_sibcall && !CALL_P (BB_END (bb)))
2608 {
2609 error ("sibcall edges for non-call insn in bb %i", bb->index);
2610 err = 1;
2611 }
2612 if (n_abnormal > n_eh
2613 && !(CALL_P (BB_END (bb))
2614 && n_abnormal == n_abnormal_call + n_sibcall)
2615 && (!JUMP_P (BB_END (bb))
2616 || any_condjump_p (BB_END (bb))
2617 || any_uncondjump_p (BB_END (bb))))
2618 {
2619 error ("abnormal edges for no purpose in bb %i", bb->index);
2620 err = 1;
2621 }
2622
2623 int has_eh = -1;
2624 FOR_EACH_EDGE (e, ei, bb->preds)
2625 {
2626 if (has_eh == -1)
2627 has_eh = (e->flags & EDGE_EH);
2628 if ((e->flags & EDGE_EH) == has_eh)
2629 continue;
2630 error ("EH incoming edge mixed with non-EH incoming edges "
2631 "in bb %i", bb->index);
2632 err = 1;
2633 break;
2634 }
2635 }
2636
2637 /* If there are partitions, do a sanity check on them: A basic block in
2638 a cold partition cannot dominate a basic block in a hot partition. */
2639 if (crtl->has_bb_partition && !err
2640 && current_ir_type () == IR_RTL_CFGLAYOUT)
2641 {
2642 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2643 err = !bbs_to_fix.is_empty ();
2644 }
2645
2646 /* Clean up. */
2647 return err;
2648 }
2649
2650 /* Checks on the instructions within blocks. Currently checks that each
2651 block starts with a basic block note, and that basic block notes and
2652 control flow jumps are not found in the middle of the block. */
2653
2654 static int
rtl_verify_bb_insns(void)2655 rtl_verify_bb_insns (void)
2656 {
2657 rtx_insn *x;
2658 int err = 0;
2659 basic_block bb;
2660
2661 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2662 {
2663 /* Now check the header of basic
2664 block. It ought to contain optional CODE_LABEL followed
2665 by NOTE_BASIC_BLOCK. */
2666 x = BB_HEAD (bb);
2667 if (LABEL_P (x))
2668 {
2669 if (BB_END (bb) == x)
2670 {
2671 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2672 bb->index);
2673 err = 1;
2674 }
2675
2676 x = NEXT_INSN (x);
2677 }
2678
2679 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2680 {
2681 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2682 bb->index);
2683 err = 1;
2684 }
2685
2686 if (BB_END (bb) == x)
2687 /* Do checks for empty blocks here. */
2688 ;
2689 else
2690 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2691 {
2692 if (NOTE_INSN_BASIC_BLOCK_P (x))
2693 {
2694 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2695 INSN_UID (x), bb->index);
2696 err = 1;
2697 }
2698
2699 if (x == BB_END (bb))
2700 break;
2701
2702 if (control_flow_insn_p (x))
2703 {
2704 error ("in basic block %d:", bb->index);
2705 fatal_insn ("flow control insn inside a basic block", x);
2706 }
2707 }
2708 }
2709
2710 /* Clean up. */
2711 return err;
2712 }
2713
2714 /* Verify that block pointers for instructions in basic blocks, headers and
2715 footers are set appropriately. */
2716
2717 static int
rtl_verify_bb_pointers(void)2718 rtl_verify_bb_pointers (void)
2719 {
2720 int err = 0;
2721 basic_block bb;
2722
2723 /* Check the general integrity of the basic blocks. */
2724 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2725 {
2726 rtx_insn *insn;
2727
2728 if (!(bb->flags & BB_RTL))
2729 {
2730 error ("BB_RTL flag not set for block %d", bb->index);
2731 err = 1;
2732 }
2733
2734 FOR_BB_INSNS (bb, insn)
2735 if (BLOCK_FOR_INSN (insn) != bb)
2736 {
2737 error ("insn %d basic block pointer is %d, should be %d",
2738 INSN_UID (insn),
2739 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2740 bb->index);
2741 err = 1;
2742 }
2743
2744 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2745 if (!BARRIER_P (insn)
2746 && BLOCK_FOR_INSN (insn) != NULL)
2747 {
2748 error ("insn %d in header of bb %d has non-NULL basic block",
2749 INSN_UID (insn), bb->index);
2750 err = 1;
2751 }
2752 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2753 if (!BARRIER_P (insn)
2754 && BLOCK_FOR_INSN (insn) != NULL)
2755 {
2756 error ("insn %d in footer of bb %d has non-NULL basic block",
2757 INSN_UID (insn), bb->index);
2758 err = 1;
2759 }
2760 }
2761
2762 /* Clean up. */
2763 return err;
2764 }
2765
2766 /* Verify the CFG and RTL consistency common for both underlying RTL and
2767 cfglayout RTL.
2768
2769 Currently it does following checks:
2770
2771 - overlapping of basic blocks
2772 - insns with wrong BLOCK_FOR_INSN pointers
2773 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2774 - tails of basic blocks (ensure that boundary is necessary)
2775 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2776 and NOTE_INSN_BASIC_BLOCK
2777 - verify that no fall_thru edge crosses hot/cold partition boundaries
2778 - verify that there are no pending RTL branch predictions
2779 - verify that hot blocks are not dominated by cold blocks
2780
2781 In future it can be extended check a lot of other stuff as well
2782 (reachability of basic blocks, life information, etc. etc.). */
2783
2784 static int
rtl_verify_flow_info_1(void)2785 rtl_verify_flow_info_1 (void)
2786 {
2787 int err = 0;
2788
2789 err |= rtl_verify_bb_pointers ();
2790
2791 err |= rtl_verify_bb_insns ();
2792
2793 err |= rtl_verify_edges ();
2794
2795 return err;
2796 }
2797
2798 /* Walk the instruction chain and verify that bb head/end pointers
2799 are correct, and that instructions are in exactly one bb and have
2800 correct block pointers. */
2801
2802 static int
rtl_verify_bb_insn_chain(void)2803 rtl_verify_bb_insn_chain (void)
2804 {
2805 basic_block bb;
2806 int err = 0;
2807 rtx_insn *x;
2808 rtx_insn *last_head = get_last_insn ();
2809 basic_block *bb_info;
2810 const int max_uid = get_max_uid ();
2811
2812 bb_info = XCNEWVEC (basic_block, max_uid);
2813
2814 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2815 {
2816 rtx_insn *head = BB_HEAD (bb);
2817 rtx_insn *end = BB_END (bb);
2818
2819 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2820 {
2821 /* Verify the end of the basic block is in the INSN chain. */
2822 if (x == end)
2823 break;
2824
2825 /* And that the code outside of basic blocks has NULL bb field. */
2826 if (!BARRIER_P (x)
2827 && BLOCK_FOR_INSN (x) != NULL)
2828 {
2829 error ("insn %d outside of basic blocks has non-NULL bb field",
2830 INSN_UID (x));
2831 err = 1;
2832 }
2833 }
2834
2835 if (!x)
2836 {
2837 error ("end insn %d for block %d not found in the insn stream",
2838 INSN_UID (end), bb->index);
2839 err = 1;
2840 }
2841
2842 /* Work backwards from the end to the head of the basic block
2843 to verify the head is in the RTL chain. */
2844 for (; x != NULL_RTX; x = PREV_INSN (x))
2845 {
2846 /* While walking over the insn chain, verify insns appear
2847 in only one basic block. */
2848 if (bb_info[INSN_UID (x)] != NULL)
2849 {
2850 error ("insn %d is in multiple basic blocks (%d and %d)",
2851 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2852 err = 1;
2853 }
2854
2855 bb_info[INSN_UID (x)] = bb;
2856
2857 if (x == head)
2858 break;
2859 }
2860 if (!x)
2861 {
2862 error ("head insn %d for block %d not found in the insn stream",
2863 INSN_UID (head), bb->index);
2864 err = 1;
2865 }
2866
2867 last_head = PREV_INSN (x);
2868 }
2869
2870 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2871 {
2872 /* Check that the code before the first basic block has NULL
2873 bb field. */
2874 if (!BARRIER_P (x)
2875 && BLOCK_FOR_INSN (x) != NULL)
2876 {
2877 error ("insn %d outside of basic blocks has non-NULL bb field",
2878 INSN_UID (x));
2879 err = 1;
2880 }
2881 }
2882 free (bb_info);
2883
2884 return err;
2885 }
2886
2887 /* Verify that fallthru edges point to adjacent blocks in layout order and
2888 that barriers exist after non-fallthru blocks. */
2889
2890 static int
rtl_verify_fallthru(void)2891 rtl_verify_fallthru (void)
2892 {
2893 basic_block bb;
2894 int err = 0;
2895
2896 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2897 {
2898 edge e;
2899
2900 e = find_fallthru_edge (bb->succs);
2901 if (!e)
2902 {
2903 rtx_insn *insn;
2904
2905 /* Ensure existence of barrier in BB with no fallthru edges. */
2906 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2907 {
2908 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2909 {
2910 error ("missing barrier after block %i", bb->index);
2911 err = 1;
2912 break;
2913 }
2914 if (BARRIER_P (insn))
2915 break;
2916 }
2917 }
2918 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2919 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2920 {
2921 rtx_insn *insn;
2922
2923 if (e->src->next_bb != e->dest)
2924 {
2925 error
2926 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2927 e->src->index, e->dest->index);
2928 err = 1;
2929 }
2930 else
2931 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2932 insn = NEXT_INSN (insn))
2933 if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
2934 {
2935 error ("verify_flow_info: Incorrect fallthru %i->%i",
2936 e->src->index, e->dest->index);
2937 fatal_insn ("wrong insn in the fallthru edge", insn);
2938 err = 1;
2939 }
2940 }
2941 }
2942
2943 return err;
2944 }
2945
2946 /* Verify that blocks are laid out in consecutive order. While walking the
2947 instructions, verify that all expected instructions are inside the basic
2948 blocks, and that all returns are followed by barriers. */
2949
2950 static int
rtl_verify_bb_layout(void)2951 rtl_verify_bb_layout (void)
2952 {
2953 basic_block bb;
2954 int err = 0;
2955 rtx_insn *x, *y;
2956 int num_bb_notes;
2957 rtx_insn * const rtx_first = get_insns ();
2958 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2959
2960 num_bb_notes = 0;
2961 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2962
2963 for (x = rtx_first; x; x = NEXT_INSN (x))
2964 {
2965 if (NOTE_INSN_BASIC_BLOCK_P (x))
2966 {
2967 bb = NOTE_BASIC_BLOCK (x);
2968
2969 num_bb_notes++;
2970 if (bb != last_bb_seen->next_bb)
2971 internal_error ("basic blocks not laid down consecutively");
2972
2973 curr_bb = last_bb_seen = bb;
2974 }
2975
2976 if (!curr_bb)
2977 {
2978 switch (GET_CODE (x))
2979 {
2980 case BARRIER:
2981 case NOTE:
2982 break;
2983
2984 case CODE_LABEL:
2985 /* An ADDR_VEC is placed outside any basic block. */
2986 if (NEXT_INSN (x)
2987 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2988 x = NEXT_INSN (x);
2989
2990 /* But in any case, non-deletable labels can appear anywhere. */
2991 break;
2992
2993 default:
2994 fatal_insn ("insn outside basic block", x);
2995 }
2996 }
2997
2998 if (JUMP_P (x)
2999 && returnjump_p (x) && ! condjump_p (x)
3000 && ! ((y = next_nonnote_nondebug_insn (x))
3001 && BARRIER_P (y)))
3002 fatal_insn ("return not followed by barrier", x);
3003
3004 if (curr_bb && x == BB_END (curr_bb))
3005 curr_bb = NULL;
3006 }
3007
3008 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3009 internal_error
3010 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3011 num_bb_notes, n_basic_blocks_for_fn (cfun));
3012
3013 return err;
3014 }
3015
3016 /* Verify the CFG and RTL consistency common for both underlying RTL and
3017 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3018
3019 Currently it does following checks:
3020 - all checks of rtl_verify_flow_info_1
3021 - test head/end pointers
3022 - check that blocks are laid out in consecutive order
3023 - check that all insns are in the basic blocks
3024 (except the switch handling code, barriers and notes)
3025 - check that all returns are followed by barriers
3026 - check that all fallthru edge points to the adjacent blocks
3027 - verify that there is a single hot/cold partition boundary after bbro */
3028
3029 static int
rtl_verify_flow_info(void)3030 rtl_verify_flow_info (void)
3031 {
3032 int err = 0;
3033
3034 err |= rtl_verify_flow_info_1 ();
3035
3036 err |= rtl_verify_bb_insn_chain ();
3037
3038 err |= rtl_verify_fallthru ();
3039
3040 err |= rtl_verify_bb_layout ();
3041
3042 err |= verify_hot_cold_block_grouping ();
3043
3044 return err;
3045 }
3046
3047 /* Assume that the preceding pass has possibly eliminated jump instructions
3048 or converted the unconditional jumps. Eliminate the edges from CFG.
3049 Return true if any edges are eliminated. */
3050
3051 bool
purge_dead_edges(basic_block bb)3052 purge_dead_edges (basic_block bb)
3053 {
3054 edge e;
3055 rtx_insn *insn = BB_END (bb);
3056 rtx note;
3057 bool purged = false;
3058 bool found;
3059 edge_iterator ei;
3060
3061 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3062 do
3063 insn = PREV_INSN (insn);
3064 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3065
3066 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3067 if (NONJUMP_INSN_P (insn)
3068 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3069 {
3070 rtx eqnote;
3071
3072 if (! may_trap_p (PATTERN (insn))
3073 || ((eqnote = find_reg_equal_equiv_note (insn))
3074 && ! may_trap_p (XEXP (eqnote, 0))))
3075 remove_note (insn, note);
3076 }
3077
3078 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3079 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3080 {
3081 bool remove = false;
3082
3083 /* There are three types of edges we need to handle correctly here: EH
3084 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3085 latter can appear when nonlocal gotos are used. */
3086 if (e->flags & EDGE_ABNORMAL_CALL)
3087 {
3088 if (!CALL_P (insn))
3089 remove = true;
3090 else if (can_nonlocal_goto (insn))
3091 ;
3092 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3093 ;
3094 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3095 ;
3096 else
3097 remove = true;
3098 }
3099 else if (e->flags & EDGE_EH)
3100 remove = !can_throw_internal (insn);
3101
3102 if (remove)
3103 {
3104 remove_edge (e);
3105 df_set_bb_dirty (bb);
3106 purged = true;
3107 }
3108 else
3109 ei_next (&ei);
3110 }
3111
3112 if (JUMP_P (insn))
3113 {
3114 rtx note;
3115 edge b,f;
3116 edge_iterator ei;
3117
3118 /* We do care only about conditional jumps and simplejumps. */
3119 if (!any_condjump_p (insn)
3120 && !returnjump_p (insn)
3121 && !simplejump_p (insn))
3122 return purged;
3123
3124 /* Branch probability/prediction notes are defined only for
3125 condjumps. We've possibly turned condjump into simplejump. */
3126 if (simplejump_p (insn))
3127 {
3128 note = find_reg_note (insn, REG_BR_PROB, NULL);
3129 if (note)
3130 remove_note (insn, note);
3131 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3132 remove_note (insn, note);
3133 }
3134
3135 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3136 {
3137 /* Avoid abnormal flags to leak from computed jumps turned
3138 into simplejumps. */
3139
3140 e->flags &= ~EDGE_ABNORMAL;
3141
3142 /* See if this edge is one we should keep. */
3143 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3144 /* A conditional jump can fall through into the next
3145 block, so we should keep the edge. */
3146 {
3147 ei_next (&ei);
3148 continue;
3149 }
3150 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3151 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3152 /* If the destination block is the target of the jump,
3153 keep the edge. */
3154 {
3155 ei_next (&ei);
3156 continue;
3157 }
3158 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3159 && returnjump_p (insn))
3160 /* If the destination block is the exit block, and this
3161 instruction is a return, then keep the edge. */
3162 {
3163 ei_next (&ei);
3164 continue;
3165 }
3166 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3167 /* Keep the edges that correspond to exceptions thrown by
3168 this instruction and rematerialize the EDGE_ABNORMAL
3169 flag we just cleared above. */
3170 {
3171 e->flags |= EDGE_ABNORMAL;
3172 ei_next (&ei);
3173 continue;
3174 }
3175
3176 /* We do not need this edge. */
3177 df_set_bb_dirty (bb);
3178 purged = true;
3179 remove_edge (e);
3180 }
3181
3182 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3183 return purged;
3184
3185 if (dump_file)
3186 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3187
3188 if (!optimize)
3189 return purged;
3190
3191 /* Redistribute probabilities. */
3192 if (single_succ_p (bb))
3193 {
3194 single_succ_edge (bb)->probability = profile_probability::always ();
3195 }
3196 else
3197 {
3198 note = find_reg_note (insn, REG_BR_PROB, NULL);
3199 if (!note)
3200 return purged;
3201
3202 b = BRANCH_EDGE (bb);
3203 f = FALLTHRU_EDGE (bb);
3204 b->probability = profile_probability::from_reg_br_prob_note
3205 (XINT (note, 0));
3206 f->probability = b->probability.invert ();
3207 }
3208
3209 return purged;
3210 }
3211 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3212 {
3213 /* First, there should not be any EH or ABCALL edges resulting
3214 from non-local gotos and the like. If there were, we shouldn't
3215 have created the sibcall in the first place. Second, there
3216 should of course never have been a fallthru edge. */
3217 gcc_assert (single_succ_p (bb));
3218 gcc_assert (single_succ_edge (bb)->flags
3219 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3220
3221 return 0;
3222 }
3223
3224 /* If we don't see a jump insn, we don't know exactly why the block would
3225 have been broken at this point. Look for a simple, non-fallthru edge,
3226 as these are only created by conditional branches. If we find such an
3227 edge we know that there used to be a jump here and can then safely
3228 remove all non-fallthru edges. */
3229 found = false;
3230 FOR_EACH_EDGE (e, ei, bb->succs)
3231 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3232 {
3233 found = true;
3234 break;
3235 }
3236
3237 if (!found)
3238 return purged;
3239
3240 /* Remove all but the fake and fallthru edges. The fake edge may be
3241 the only successor for this block in the case of noreturn
3242 calls. */
3243 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3244 {
3245 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3246 {
3247 df_set_bb_dirty (bb);
3248 remove_edge (e);
3249 purged = true;
3250 }
3251 else
3252 ei_next (&ei);
3253 }
3254
3255 gcc_assert (single_succ_p (bb));
3256
3257 single_succ_edge (bb)->probability = profile_probability::always ();
3258
3259 if (dump_file)
3260 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3261 bb->index);
3262 return purged;
3263 }
3264
3265 /* Search all basic blocks for potentially dead edges and purge them. Return
3266 true if some edge has been eliminated. */
3267
3268 bool
purge_all_dead_edges(void)3269 purge_all_dead_edges (void)
3270 {
3271 int purged = false;
3272 basic_block bb;
3273
3274 FOR_EACH_BB_FN (bb, cfun)
3275 {
3276 bool purged_here = purge_dead_edges (bb);
3277
3278 purged |= purged_here;
3279 }
3280
3281 return purged;
3282 }
3283
3284 /* This is used by a few passes that emit some instructions after abnormal
3285 calls, moving the basic block's end, while they in fact do want to emit
3286 them on the fallthru edge. Look for abnormal call edges, find backward
3287 the call in the block and insert the instructions on the edge instead.
3288
3289 Similarly, handle instructions throwing exceptions internally.
3290
3291 Return true when instructions have been found and inserted on edges. */
3292
3293 bool
fixup_abnormal_edges(void)3294 fixup_abnormal_edges (void)
3295 {
3296 bool inserted = false;
3297 basic_block bb;
3298
3299 FOR_EACH_BB_FN (bb, cfun)
3300 {
3301 edge e;
3302 edge_iterator ei;
3303
3304 /* Look for cases we are interested in - calls or instructions causing
3305 exceptions. */
3306 FOR_EACH_EDGE (e, ei, bb->succs)
3307 if ((e->flags & EDGE_ABNORMAL_CALL)
3308 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3309 == (EDGE_ABNORMAL | EDGE_EH)))
3310 break;
3311
3312 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3313 {
3314 rtx_insn *insn;
3315
3316 /* Get past the new insns generated. Allow notes, as the insns
3317 may be already deleted. */
3318 insn = BB_END (bb);
3319 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3320 && !can_throw_internal (insn)
3321 && insn != BB_HEAD (bb))
3322 insn = PREV_INSN (insn);
3323
3324 if (CALL_P (insn) || can_throw_internal (insn))
3325 {
3326 rtx_insn *stop, *next;
3327
3328 e = find_fallthru_edge (bb->succs);
3329
3330 stop = NEXT_INSN (BB_END (bb));
3331 BB_END (bb) = insn;
3332
3333 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3334 {
3335 next = NEXT_INSN (insn);
3336 if (INSN_P (insn))
3337 {
3338 delete_insn (insn);
3339
3340 /* Sometimes there's still the return value USE.
3341 If it's placed after a trapping call (i.e. that
3342 call is the last insn anyway), we have no fallthru
3343 edge. Simply delete this use and don't try to insert
3344 on the non-existent edge.
3345 Similarly, sometimes a call that can throw is
3346 followed in the source with __builtin_unreachable (),
3347 meaning that there is UB if the call returns rather
3348 than throws. If there weren't any instructions
3349 following such calls before, supposedly even the ones
3350 we've deleted aren't significant and can be
3351 removed. */
3352 if (e)
3353 {
3354 /* We're not deleting it, we're moving it. */
3355 insn->set_undeleted ();
3356 SET_PREV_INSN (insn) = NULL_RTX;
3357 SET_NEXT_INSN (insn) = NULL_RTX;
3358
3359 insert_insn_on_edge (insn, e);
3360 inserted = true;
3361 }
3362 }
3363 else if (!BARRIER_P (insn))
3364 set_block_for_insn (insn, NULL);
3365 }
3366 }
3367
3368 /* It may be that we don't find any trapping insn. In this
3369 case we discovered quite late that the insn that had been
3370 marked as can_throw_internal in fact couldn't trap at all.
3371 So we should in fact delete the EH edges out of the block. */
3372 else
3373 purge_dead_edges (bb);
3374 }
3375 }
3376
3377 return inserted;
3378 }
3379
3380 /* Cut the insns from FIRST to LAST out of the insns stream. */
3381
3382 rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)3383 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3384 {
3385 rtx_insn *prevfirst = PREV_INSN (first);
3386 rtx_insn *nextlast = NEXT_INSN (last);
3387
3388 SET_PREV_INSN (first) = NULL;
3389 SET_NEXT_INSN (last) = NULL;
3390 if (prevfirst)
3391 SET_NEXT_INSN (prevfirst) = nextlast;
3392 if (nextlast)
3393 SET_PREV_INSN (nextlast) = prevfirst;
3394 else
3395 set_last_insn (prevfirst);
3396 if (!prevfirst)
3397 set_first_insn (nextlast);
3398 return first;
3399 }
3400
3401 /* Skip over inter-block insns occurring after BB which are typically
3402 associated with BB (e.g., barriers). If there are any such insns,
3403 we return the last one. Otherwise, we return the end of BB. */
3404
3405 static rtx_insn *
skip_insns_after_block(basic_block bb)3406 skip_insns_after_block (basic_block bb)
3407 {
3408 rtx_insn *insn, *last_insn, *next_head, *prev;
3409
3410 next_head = NULL;
3411 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3412 next_head = BB_HEAD (bb->next_bb);
3413
3414 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3415 {
3416 if (insn == next_head)
3417 break;
3418
3419 switch (GET_CODE (insn))
3420 {
3421 case BARRIER:
3422 last_insn = insn;
3423 continue;
3424
3425 case NOTE:
3426 switch (NOTE_KIND (insn))
3427 {
3428 case NOTE_INSN_BLOCK_END:
3429 gcc_unreachable ();
3430 continue;
3431 default:
3432 continue;
3433 break;
3434 }
3435 break;
3436
3437 case CODE_LABEL:
3438 if (NEXT_INSN (insn)
3439 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3440 {
3441 insn = NEXT_INSN (insn);
3442 last_insn = insn;
3443 continue;
3444 }
3445 break;
3446
3447 default:
3448 break;
3449 }
3450
3451 break;
3452 }
3453
3454 /* It is possible to hit contradictory sequence. For instance:
3455
3456 jump_insn
3457 NOTE_INSN_BLOCK_BEG
3458 barrier
3459
3460 Where barrier belongs to jump_insn, but the note does not. This can be
3461 created by removing the basic block originally following
3462 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3463
3464 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3465 {
3466 prev = PREV_INSN (insn);
3467 if (NOTE_P (insn))
3468 switch (NOTE_KIND (insn))
3469 {
3470 case NOTE_INSN_BLOCK_END:
3471 gcc_unreachable ();
3472 break;
3473 case NOTE_INSN_DELETED:
3474 case NOTE_INSN_DELETED_LABEL:
3475 case NOTE_INSN_DELETED_DEBUG_LABEL:
3476 continue;
3477 default:
3478 reorder_insns (insn, insn, last_insn);
3479 }
3480 }
3481
3482 return last_insn;
3483 }
3484
3485 /* Locate or create a label for a given basic block. */
3486
3487 static rtx_insn *
label_for_bb(basic_block bb)3488 label_for_bb (basic_block bb)
3489 {
3490 rtx_insn *label = BB_HEAD (bb);
3491
3492 if (!LABEL_P (label))
3493 {
3494 if (dump_file)
3495 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3496
3497 label = block_label (bb);
3498 }
3499
3500 return label;
3501 }
3502
3503 /* Locate the effective beginning and end of the insn chain for each
3504 block, as defined by skip_insns_after_block above. */
3505
3506 static void
record_effective_endpoints(void)3507 record_effective_endpoints (void)
3508 {
3509 rtx_insn *next_insn;
3510 basic_block bb;
3511 rtx_insn *insn;
3512
3513 for (insn = get_insns ();
3514 insn
3515 && NOTE_P (insn)
3516 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3517 insn = NEXT_INSN (insn))
3518 continue;
3519 /* No basic blocks at all? */
3520 gcc_assert (insn);
3521
3522 if (PREV_INSN (insn))
3523 cfg_layout_function_header =
3524 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3525 else
3526 cfg_layout_function_header = NULL;
3527
3528 next_insn = get_insns ();
3529 FOR_EACH_BB_FN (bb, cfun)
3530 {
3531 rtx_insn *end;
3532
3533 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3534 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3535 PREV_INSN (BB_HEAD (bb)));
3536 end = skip_insns_after_block (bb);
3537 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3538 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3539 next_insn = NEXT_INSN (BB_END (bb));
3540 }
3541
3542 cfg_layout_function_footer = next_insn;
3543 if (cfg_layout_function_footer)
3544 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3545 }
3546
3547 namespace {
3548
3549 const pass_data pass_data_into_cfg_layout_mode =
3550 {
3551 RTL_PASS, /* type */
3552 "into_cfglayout", /* name */
3553 OPTGROUP_NONE, /* optinfo_flags */
3554 TV_CFG, /* tv_id */
3555 0, /* properties_required */
3556 PROP_cfglayout, /* properties_provided */
3557 0, /* properties_destroyed */
3558 0, /* todo_flags_start */
3559 0, /* todo_flags_finish */
3560 };
3561
3562 class pass_into_cfg_layout_mode : public rtl_opt_pass
3563 {
3564 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3565 pass_into_cfg_layout_mode (gcc::context *ctxt)
3566 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3567 {}
3568
3569 /* opt_pass methods: */
execute(function *)3570 virtual unsigned int execute (function *)
3571 {
3572 cfg_layout_initialize (0);
3573 return 0;
3574 }
3575
3576 }; // class pass_into_cfg_layout_mode
3577
3578 } // anon namespace
3579
3580 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3581 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3582 {
3583 return new pass_into_cfg_layout_mode (ctxt);
3584 }
3585
3586 namespace {
3587
3588 const pass_data pass_data_outof_cfg_layout_mode =
3589 {
3590 RTL_PASS, /* type */
3591 "outof_cfglayout", /* name */
3592 OPTGROUP_NONE, /* optinfo_flags */
3593 TV_CFG, /* tv_id */
3594 0, /* properties_required */
3595 0, /* properties_provided */
3596 PROP_cfglayout, /* properties_destroyed */
3597 0, /* todo_flags_start */
3598 0, /* todo_flags_finish */
3599 };
3600
3601 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3602 {
3603 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3604 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3605 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3606 {}
3607
3608 /* opt_pass methods: */
3609 virtual unsigned int execute (function *);
3610
3611 }; // class pass_outof_cfg_layout_mode
3612
3613 unsigned int
execute(function * fun)3614 pass_outof_cfg_layout_mode::execute (function *fun)
3615 {
3616 basic_block bb;
3617
3618 FOR_EACH_BB_FN (bb, fun)
3619 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3620 bb->aux = bb->next_bb;
3621
3622 cfg_layout_finalize ();
3623
3624 return 0;
3625 }
3626
3627 } // anon namespace
3628
3629 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3630 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3631 {
3632 return new pass_outof_cfg_layout_mode (ctxt);
3633 }
3634
3635
3636 /* Link the basic blocks in the correct order, compacting the basic
3637 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3638 function also clears the basic block header and footer fields.
3639
3640 This function is usually called after a pass (e.g. tracer) finishes
3641 some transformations while in cfglayout mode. The required sequence
3642 of the basic blocks is in a linked list along the bb->aux field.
3643 This functions re-links the basic block prev_bb and next_bb pointers
3644 accordingly, and it compacts and renumbers the blocks.
3645
3646 FIXME: This currently works only for RTL, but the only RTL-specific
3647 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3648 to GIMPLE a long time ago, but it doesn't relink the basic block
3649 chain. It could do that (to give better initial RTL) if this function
3650 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3651
3652 void
relink_block_chain(bool stay_in_cfglayout_mode)3653 relink_block_chain (bool stay_in_cfglayout_mode)
3654 {
3655 basic_block bb, prev_bb;
3656 int index;
3657
3658 /* Maybe dump the re-ordered sequence. */
3659 if (dump_file)
3660 {
3661 fprintf (dump_file, "Reordered sequence:\n");
3662 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3663 NUM_FIXED_BLOCKS;
3664 bb;
3665 bb = (basic_block) bb->aux, index++)
3666 {
3667 fprintf (dump_file, " %i ", index);
3668 if (get_bb_original (bb))
3669 fprintf (dump_file, "duplicate of %i ",
3670 get_bb_original (bb)->index);
3671 else if (forwarder_block_p (bb)
3672 && !LABEL_P (BB_HEAD (bb)))
3673 fprintf (dump_file, "compensation ");
3674 else
3675 fprintf (dump_file, "bb %i ", bb->index);
3676 }
3677 }
3678
3679 /* Now reorder the blocks. */
3680 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3681 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3682 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3683 {
3684 bb->prev_bb = prev_bb;
3685 prev_bb->next_bb = bb;
3686 }
3687 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3688 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3689
3690 /* Then, clean up the aux fields. */
3691 FOR_ALL_BB_FN (bb, cfun)
3692 {
3693 bb->aux = NULL;
3694 if (!stay_in_cfglayout_mode)
3695 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3696 }
3697
3698 /* Maybe reset the original copy tables, they are not valid anymore
3699 when we renumber the basic blocks in compact_blocks. If we are
3700 are going out of cfglayout mode, don't re-allocate the tables. */
3701 if (original_copy_tables_initialized_p ())
3702 free_original_copy_tables ();
3703 if (stay_in_cfglayout_mode)
3704 initialize_original_copy_tables ();
3705
3706 /* Finally, put basic_block_info in the new order. */
3707 compact_blocks ();
3708 }
3709
3710
3711 /* Given a reorder chain, rearrange the code to match. */
3712
3713 static void
fixup_reorder_chain(void)3714 fixup_reorder_chain (void)
3715 {
3716 basic_block bb;
3717 rtx_insn *insn = NULL;
3718
3719 if (cfg_layout_function_header)
3720 {
3721 set_first_insn (cfg_layout_function_header);
3722 insn = cfg_layout_function_header;
3723 while (NEXT_INSN (insn))
3724 insn = NEXT_INSN (insn);
3725 }
3726
3727 /* First do the bulk reordering -- rechain the blocks without regard to
3728 the needed changes to jumps and labels. */
3729
3730 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3731 bb->aux)
3732 {
3733 if (BB_HEADER (bb))
3734 {
3735 if (insn)
3736 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3737 else
3738 set_first_insn (BB_HEADER (bb));
3739 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3740 insn = BB_HEADER (bb);
3741 while (NEXT_INSN (insn))
3742 insn = NEXT_INSN (insn);
3743 }
3744 if (insn)
3745 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3746 else
3747 set_first_insn (BB_HEAD (bb));
3748 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3749 insn = BB_END (bb);
3750 if (BB_FOOTER (bb))
3751 {
3752 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3753 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3754 while (NEXT_INSN (insn))
3755 insn = NEXT_INSN (insn);
3756 }
3757 }
3758
3759 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3760 if (cfg_layout_function_footer)
3761 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3762
3763 while (NEXT_INSN (insn))
3764 insn = NEXT_INSN (insn);
3765
3766 set_last_insn (insn);
3767 if (flag_checking)
3768 verify_insn_chain ();
3769
3770 /* Now add jumps and labels as needed to match the blocks new
3771 outgoing edges. */
3772
3773 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3774 bb->aux)
3775 {
3776 edge e_fall, e_taken, e;
3777 rtx_insn *bb_end_insn;
3778 rtx ret_label = NULL_RTX;
3779 basic_block nb;
3780 edge_iterator ei;
3781
3782 if (EDGE_COUNT (bb->succs) == 0)
3783 continue;
3784
3785 /* Find the old fallthru edge, and another non-EH edge for
3786 a taken jump. */
3787 e_taken = e_fall = NULL;
3788
3789 FOR_EACH_EDGE (e, ei, bb->succs)
3790 if (e->flags & EDGE_FALLTHRU)
3791 e_fall = e;
3792 else if (! (e->flags & EDGE_EH))
3793 e_taken = e;
3794
3795 bb_end_insn = BB_END (bb);
3796 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3797 {
3798 ret_label = JUMP_LABEL (bb_end_jump);
3799 if (any_condjump_p (bb_end_jump))
3800 {
3801 /* This might happen if the conditional jump has side
3802 effects and could therefore not be optimized away.
3803 Make the basic block to end with a barrier in order
3804 to prevent rtl_verify_flow_info from complaining. */
3805 if (!e_fall)
3806 {
3807 gcc_assert (!onlyjump_p (bb_end_jump)
3808 || returnjump_p (bb_end_jump)
3809 || (e_taken->flags & EDGE_CROSSING));
3810 emit_barrier_after (bb_end_jump);
3811 continue;
3812 }
3813
3814 /* If the old fallthru is still next, nothing to do. */
3815 if (bb->aux == e_fall->dest
3816 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3817 continue;
3818
3819 /* The degenerated case of conditional jump jumping to the next
3820 instruction can happen for jumps with side effects. We need
3821 to construct a forwarder block and this will be done just
3822 fine by force_nonfallthru below. */
3823 if (!e_taken)
3824 ;
3825
3826 /* There is another special case: if *neither* block is next,
3827 such as happens at the very end of a function, then we'll
3828 need to add a new unconditional jump. Choose the taken
3829 edge based on known or assumed probability. */
3830 else if (bb->aux != e_taken->dest)
3831 {
3832 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3833
3834 if (note
3835 && profile_probability::from_reg_br_prob_note
3836 (XINT (note, 0)) < profile_probability::even ()
3837 && invert_jump (bb_end_jump,
3838 (e_fall->dest
3839 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3840 ? NULL_RTX
3841 : label_for_bb (e_fall->dest)), 0))
3842 {
3843 e_fall->flags &= ~EDGE_FALLTHRU;
3844 gcc_checking_assert (could_fall_through
3845 (e_taken->src, e_taken->dest));
3846 e_taken->flags |= EDGE_FALLTHRU;
3847 update_br_prob_note (bb);
3848 e = e_fall, e_fall = e_taken, e_taken = e;
3849 }
3850 }
3851
3852 /* If the "jumping" edge is a crossing edge, and the fall
3853 through edge is non-crossing, leave things as they are. */
3854 else if ((e_taken->flags & EDGE_CROSSING)
3855 && !(e_fall->flags & EDGE_CROSSING))
3856 continue;
3857
3858 /* Otherwise we can try to invert the jump. This will
3859 basically never fail, however, keep up the pretense. */
3860 else if (invert_jump (bb_end_jump,
3861 (e_fall->dest
3862 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3863 ? NULL_RTX
3864 : label_for_bb (e_fall->dest)), 0))
3865 {
3866 e_fall->flags &= ~EDGE_FALLTHRU;
3867 gcc_checking_assert (could_fall_through
3868 (e_taken->src, e_taken->dest));
3869 e_taken->flags |= EDGE_FALLTHRU;
3870 update_br_prob_note (bb);
3871 if (LABEL_NUSES (ret_label) == 0
3872 && single_pred_p (e_taken->dest))
3873 delete_insn (as_a<rtx_insn *> (ret_label));
3874 continue;
3875 }
3876 }
3877 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3878 {
3879 /* If the old fallthru is still next or if
3880 asm goto doesn't have a fallthru (e.g. when followed by
3881 __builtin_unreachable ()), nothing to do. */
3882 if (! e_fall
3883 || bb->aux == e_fall->dest
3884 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3885 continue;
3886
3887 /* Otherwise we'll have to use the fallthru fixup below. */
3888 }
3889 else
3890 {
3891 /* Otherwise we have some return, switch or computed
3892 jump. In the 99% case, there should not have been a
3893 fallthru edge. */
3894 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3895 continue;
3896 }
3897 }
3898 else
3899 {
3900 /* No fallthru implies a noreturn function with EH edges, or
3901 something similarly bizarre. In any case, we don't need to
3902 do anything. */
3903 if (! e_fall)
3904 continue;
3905
3906 /* If the fallthru block is still next, nothing to do. */
3907 if (bb->aux == e_fall->dest)
3908 continue;
3909
3910 /* A fallthru to exit block. */
3911 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3912 continue;
3913 }
3914
3915 /* We got here if we need to add a new jump insn.
3916 Note force_nonfallthru can delete E_FALL and thus we have to
3917 save E_FALL->src prior to the call to force_nonfallthru. */
3918 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3919 if (nb)
3920 {
3921 nb->aux = bb->aux;
3922 bb->aux = nb;
3923 /* Don't process this new block. */
3924 bb = nb;
3925 }
3926 }
3927
3928 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3929
3930 /* Annoying special case - jump around dead jumptables left in the code. */
3931 FOR_EACH_BB_FN (bb, cfun)
3932 {
3933 edge e = find_fallthru_edge (bb->succs);
3934
3935 if (e && !can_fallthru (e->src, e->dest))
3936 force_nonfallthru (e);
3937 }
3938
3939 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3940 when not optimizing. */
3941 if (!optimize && !DECL_IGNORED_P (current_function_decl))
3942 FOR_EACH_BB_FN (bb, cfun)
3943 {
3944 edge e;
3945 edge_iterator ei;
3946
3947 FOR_EACH_EDGE (e, ei, bb->succs)
3948 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3949 && !(e->flags & EDGE_ABNORMAL))
3950 {
3951 edge e2;
3952 edge_iterator ei2;
3953 basic_block dest, nb;
3954 rtx_insn *end;
3955
3956 insn = BB_END (e->src);
3957 end = PREV_INSN (BB_HEAD (e->src));
3958 while (insn != end
3959 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3960 insn = PREV_INSN (insn);
3961 if (insn != end
3962 && INSN_LOCATION (insn) == e->goto_locus)
3963 continue;
3964 if (simplejump_p (BB_END (e->src))
3965 && !INSN_HAS_LOCATION (BB_END (e->src)))
3966 {
3967 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3968 continue;
3969 }
3970 dest = e->dest;
3971 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3972 {
3973 /* Non-fallthru edges to the exit block cannot be split. */
3974 if (!(e->flags & EDGE_FALLTHRU))
3975 continue;
3976 }
3977 else
3978 {
3979 insn = BB_HEAD (dest);
3980 end = NEXT_INSN (BB_END (dest));
3981 while (insn != end && !NONDEBUG_INSN_P (insn))
3982 insn = NEXT_INSN (insn);
3983 if (insn != end && INSN_HAS_LOCATION (insn)
3984 && INSN_LOCATION (insn) == e->goto_locus)
3985 continue;
3986 }
3987 nb = split_edge (e);
3988 if (!INSN_P (BB_END (nb)))
3989 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3990 nb);
3991 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3992
3993 /* If there are other incoming edges to the destination block
3994 with the same goto locus, redirect them to the new block as
3995 well, this can prevent other such blocks from being created
3996 in subsequent iterations of the loop. */
3997 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3998 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3999 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4000 && e->goto_locus == e2->goto_locus)
4001 redirect_edge_and_branch (e2, nb);
4002 else
4003 ei_next (&ei2);
4004 }
4005 }
4006 }
4007
4008 /* Perform sanity checks on the insn chain.
4009 1. Check that next/prev pointers are consistent in both the forward and
4010 reverse direction.
4011 2. Count insns in chain, going both directions, and check if equal.
4012 3. Check that get_last_insn () returns the actual end of chain. */
4013
4014 DEBUG_FUNCTION void
verify_insn_chain(void)4015 verify_insn_chain (void)
4016 {
4017 rtx_insn *x, *prevx, *nextx;
4018 int insn_cnt1, insn_cnt2;
4019
4020 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4021 x != 0;
4022 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4023 gcc_assert (PREV_INSN (x) == prevx);
4024
4025 gcc_assert (prevx == get_last_insn ());
4026
4027 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4028 x != 0;
4029 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4030 gcc_assert (NEXT_INSN (x) == nextx);
4031
4032 gcc_assert (insn_cnt1 == insn_cnt2);
4033 }
4034
4035 /* If we have assembler epilogues, the block falling through to exit must
4036 be the last one in the reordered chain when we reach final. Ensure
4037 that this condition is met. */
4038 static void
fixup_fallthru_exit_predecessor(void)4039 fixup_fallthru_exit_predecessor (void)
4040 {
4041 edge e;
4042 basic_block bb = NULL;
4043
4044 /* This transformation is not valid before reload, because we might
4045 separate a call from the instruction that copies the return
4046 value. */
4047 gcc_assert (reload_completed);
4048
4049 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4050 if (e)
4051 bb = e->src;
4052
4053 if (bb && bb->aux)
4054 {
4055 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4056
4057 /* If the very first block is the one with the fall-through exit
4058 edge, we have to split that block. */
4059 if (c == bb)
4060 {
4061 bb = split_block_after_labels (bb)->dest;
4062 bb->aux = c->aux;
4063 c->aux = bb;
4064 BB_FOOTER (bb) = BB_FOOTER (c);
4065 BB_FOOTER (c) = NULL;
4066 }
4067
4068 while (c->aux != bb)
4069 c = (basic_block) c->aux;
4070
4071 c->aux = bb->aux;
4072 while (c->aux)
4073 c = (basic_block) c->aux;
4074
4075 c->aux = bb;
4076 bb->aux = NULL;
4077 }
4078 }
4079
4080 /* In case there are more than one fallthru predecessors of exit, force that
4081 there is only one. */
4082
4083 static void
force_one_exit_fallthru(void)4084 force_one_exit_fallthru (void)
4085 {
4086 edge e, predecessor = NULL;
4087 bool more = false;
4088 edge_iterator ei;
4089 basic_block forwarder, bb;
4090
4091 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4092 if (e->flags & EDGE_FALLTHRU)
4093 {
4094 if (predecessor == NULL)
4095 predecessor = e;
4096 else
4097 {
4098 more = true;
4099 break;
4100 }
4101 }
4102
4103 if (!more)
4104 return;
4105
4106 /* Exit has several fallthru predecessors. Create a forwarder block for
4107 them. */
4108 forwarder = split_edge (predecessor);
4109 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4110 (e = ei_safe_edge (ei)); )
4111 {
4112 if (e->src == forwarder
4113 || !(e->flags & EDGE_FALLTHRU))
4114 ei_next (&ei);
4115 else
4116 redirect_edge_and_branch_force (e, forwarder);
4117 }
4118
4119 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4120 exit block. */
4121 FOR_EACH_BB_FN (bb, cfun)
4122 {
4123 if (bb->aux == NULL && bb != forwarder)
4124 {
4125 bb->aux = forwarder;
4126 break;
4127 }
4128 }
4129 }
4130
4131 /* Return true in case it is possible to duplicate the basic block BB. */
4132
4133 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4134 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4135 {
4136 /* Do not attempt to duplicate tablejumps, as we need to unshare
4137 the dispatch table. This is difficult to do, as the instructions
4138 computing jump destination may be hoisted outside the basic block. */
4139 if (tablejump_p (BB_END (bb), NULL, NULL))
4140 return false;
4141
4142 /* Do not duplicate blocks containing insns that can't be copied. */
4143 if (targetm.cannot_copy_insn_p)
4144 {
4145 rtx_insn *insn = BB_HEAD (bb);
4146 while (1)
4147 {
4148 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4149 return false;
4150 if (insn == BB_END (bb))
4151 break;
4152 insn = NEXT_INSN (insn);
4153 }
4154 }
4155
4156 return true;
4157 }
4158
4159 rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to)4160 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4161 {
4162 rtx_insn *insn, *next, *copy;
4163 rtx_note *last;
4164
4165 /* Avoid updating of boundaries of previous basic block. The
4166 note will get removed from insn stream in fixup. */
4167 last = emit_note (NOTE_INSN_DELETED);
4168
4169 /* Create copy at the end of INSN chain. The chain will
4170 be reordered later. */
4171 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4172 {
4173 switch (GET_CODE (insn))
4174 {
4175 case DEBUG_INSN:
4176 /* Don't duplicate label debug insns. */
4177 if (DEBUG_BIND_INSN_P (insn)
4178 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4179 break;
4180 /* FALLTHRU */
4181 case INSN:
4182 case CALL_INSN:
4183 case JUMP_INSN:
4184 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4185 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4186 && ANY_RETURN_P (JUMP_LABEL (insn)))
4187 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4188 maybe_copy_prologue_epilogue_insn (insn, copy);
4189 break;
4190
4191 case JUMP_TABLE_DATA:
4192 /* Avoid copying of dispatch tables. We never duplicate
4193 tablejumps, so this can hit only in case the table got
4194 moved far from original jump.
4195 Avoid copying following barrier as well if any
4196 (and debug insns in between). */
4197 for (next = NEXT_INSN (insn);
4198 next != NEXT_INSN (to);
4199 next = NEXT_INSN (next))
4200 if (!DEBUG_INSN_P (next))
4201 break;
4202 if (next != NEXT_INSN (to) && BARRIER_P (next))
4203 insn = next;
4204 break;
4205
4206 case CODE_LABEL:
4207 break;
4208
4209 case BARRIER:
4210 emit_barrier ();
4211 break;
4212
4213 case NOTE:
4214 switch (NOTE_KIND (insn))
4215 {
4216 /* In case prologue is empty and function contain label
4217 in first BB, we may want to copy the block. */
4218 case NOTE_INSN_PROLOGUE_END:
4219
4220 case NOTE_INSN_DELETED:
4221 case NOTE_INSN_DELETED_LABEL:
4222 case NOTE_INSN_DELETED_DEBUG_LABEL:
4223 /* No problem to strip these. */
4224 case NOTE_INSN_FUNCTION_BEG:
4225 /* There is always just single entry to function. */
4226 case NOTE_INSN_BASIC_BLOCK:
4227 /* We should only switch text sections once. */
4228 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4229 break;
4230
4231 case NOTE_INSN_EPILOGUE_BEG:
4232 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4233 emit_note_copy (as_a <rtx_note *> (insn));
4234 break;
4235
4236 default:
4237 /* All other notes should have already been eliminated. */
4238 gcc_unreachable ();
4239 }
4240 break;
4241 default:
4242 gcc_unreachable ();
4243 }
4244 }
4245 insn = NEXT_INSN (last);
4246 delete_insn (last);
4247 return insn;
4248 }
4249
4250 /* Create a duplicate of the basic block BB. */
4251
4252 static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data *)4253 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *)
4254 {
4255 rtx_insn *insn;
4256 basic_block new_bb;
4257
4258 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4259 new_bb = create_basic_block (insn,
4260 insn ? get_last_insn () : NULL,
4261 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4262
4263 BB_COPY_PARTITION (new_bb, bb);
4264 if (BB_HEADER (bb))
4265 {
4266 insn = BB_HEADER (bb);
4267 while (NEXT_INSN (insn))
4268 insn = NEXT_INSN (insn);
4269 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4270 if (insn)
4271 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4272 }
4273
4274 if (BB_FOOTER (bb))
4275 {
4276 insn = BB_FOOTER (bb);
4277 while (NEXT_INSN (insn))
4278 insn = NEXT_INSN (insn);
4279 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4280 if (insn)
4281 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4282 }
4283
4284 return new_bb;
4285 }
4286
4287
4288 /* Main entry point to this module - initialize the datastructures for
4289 CFG layout changes. It keeps LOOPS up-to-date if not null.
4290
4291 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4292
4293 void
cfg_layout_initialize(int flags)4294 cfg_layout_initialize (int flags)
4295 {
4296 rtx_insn_list *x;
4297 basic_block bb;
4298
4299 /* Once bb partitioning is complete, cfg layout mode should not be
4300 re-entered. Entering cfg layout mode may require fixups. As an
4301 example, if edge forwarding performed when optimizing the cfg
4302 layout required moving a block from the hot to the cold
4303 section. This would create an illegal partitioning unless some
4304 manual fixup was performed. */
4305 gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4306
4307 initialize_original_copy_tables ();
4308
4309 cfg_layout_rtl_register_cfg_hooks ();
4310
4311 record_effective_endpoints ();
4312
4313 /* Make sure that the targets of non local gotos are marked. */
4314 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4315 {
4316 bb = BLOCK_FOR_INSN (x->insn ());
4317 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4318 }
4319
4320 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4321 }
4322
4323 /* Splits superblocks. */
4324 void
break_superblocks(void)4325 break_superblocks (void)
4326 {
4327 bool need = false;
4328 basic_block bb;
4329
4330 auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4331 bitmap_clear (superblocks);
4332
4333 FOR_EACH_BB_FN (bb, cfun)
4334 if (bb->flags & BB_SUPERBLOCK)
4335 {
4336 bb->flags &= ~BB_SUPERBLOCK;
4337 bitmap_set_bit (superblocks, bb->index);
4338 need = true;
4339 }
4340
4341 if (need)
4342 {
4343 rebuild_jump_labels (get_insns ());
4344 find_many_sub_basic_blocks (superblocks);
4345 }
4346 }
4347
4348 /* Finalize the changes: reorder insn list according to the sequence specified
4349 by aux pointers, enter compensation code, rebuild scope forest. */
4350
4351 void
cfg_layout_finalize(void)4352 cfg_layout_finalize (void)
4353 {
4354 free_dominance_info (CDI_DOMINATORS);
4355 force_one_exit_fallthru ();
4356 rtl_register_cfg_hooks ();
4357 if (reload_completed && !targetm.have_epilogue ())
4358 fixup_fallthru_exit_predecessor ();
4359 fixup_reorder_chain ();
4360
4361 rebuild_jump_labels (get_insns ());
4362 delete_dead_jumptables ();
4363
4364 if (flag_checking)
4365 verify_insn_chain ();
4366 checking_verify_flow_info ();
4367 }
4368
4369
4370 /* Same as split_block but update cfg_layout structures. */
4371
4372 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4373 cfg_layout_split_block (basic_block bb, void *insnp)
4374 {
4375 rtx insn = (rtx) insnp;
4376 basic_block new_bb = rtl_split_block (bb, insn);
4377
4378 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4379 BB_FOOTER (bb) = NULL;
4380
4381 return new_bb;
4382 }
4383
4384 /* Redirect Edge to DEST. */
4385 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4386 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4387 {
4388 basic_block src = e->src;
4389 edge ret;
4390
4391 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4392 return NULL;
4393
4394 if (e->dest == dest)
4395 return e;
4396
4397 if (e->flags & EDGE_CROSSING
4398 && BB_PARTITION (e->src) == BB_PARTITION (dest)
4399 && simplejump_p (BB_END (src)))
4400 {
4401 if (dump_file)
4402 fprintf (dump_file,
4403 "Removing crossing jump while redirecting edge form %i to %i\n",
4404 e->src->index, dest->index);
4405 delete_insn (BB_END (src));
4406 remove_barriers_from_footer (src);
4407 e->flags |= EDGE_FALLTHRU;
4408 }
4409
4410 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4411 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4412 {
4413 df_set_bb_dirty (src);
4414 return ret;
4415 }
4416
4417 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4418 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4419 {
4420 if (dump_file)
4421 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4422 e->src->index, dest->index);
4423
4424 df_set_bb_dirty (e->src);
4425 redirect_edge_succ (e, dest);
4426 return e;
4427 }
4428
4429 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4430 in the case the basic block appears to be in sequence. Avoid this
4431 transformation. */
4432
4433 if (e->flags & EDGE_FALLTHRU)
4434 {
4435 /* Redirect any branch edges unified with the fallthru one. */
4436 if (JUMP_P (BB_END (src))
4437 && label_is_jump_target_p (BB_HEAD (e->dest),
4438 BB_END (src)))
4439 {
4440 edge redirected;
4441
4442 if (dump_file)
4443 fprintf (dump_file, "Fallthru edge unified with branch "
4444 "%i->%i redirected to %i\n",
4445 e->src->index, e->dest->index, dest->index);
4446 e->flags &= ~EDGE_FALLTHRU;
4447 redirected = redirect_branch_edge (e, dest);
4448 gcc_assert (redirected);
4449 redirected->flags |= EDGE_FALLTHRU;
4450 df_set_bb_dirty (redirected->src);
4451 return redirected;
4452 }
4453 /* In case we are redirecting fallthru edge to the branch edge
4454 of conditional jump, remove it. */
4455 if (EDGE_COUNT (src->succs) == 2)
4456 {
4457 /* Find the edge that is different from E. */
4458 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4459
4460 if (s->dest == dest
4461 && any_condjump_p (BB_END (src))
4462 && onlyjump_p (BB_END (src)))
4463 delete_insn (BB_END (src));
4464 }
4465 if (dump_file)
4466 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4467 e->src->index, e->dest->index, dest->index);
4468 ret = redirect_edge_succ_nodup (e, dest);
4469 }
4470 else
4471 ret = redirect_branch_edge (e, dest);
4472
4473 if (!ret)
4474 return NULL;
4475
4476 fixup_partition_crossing (ret);
4477 /* We don't want simplejumps in the insn stream during cfglayout. */
4478 gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4479
4480 df_set_bb_dirty (src);
4481 return ret;
4482 }
4483
4484 /* Simple wrapper as we always can redirect fallthru edges. */
4485 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4486 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4487 {
4488 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4489
4490 gcc_assert (redirected);
4491 return NULL;
4492 }
4493
4494 /* Same as delete_basic_block but update cfg_layout structures. */
4495
4496 static void
cfg_layout_delete_block(basic_block bb)4497 cfg_layout_delete_block (basic_block bb)
4498 {
4499 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4500 rtx_insn **to;
4501
4502 if (BB_HEADER (bb))
4503 {
4504 next = BB_HEAD (bb);
4505 if (prev)
4506 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4507 else
4508 set_first_insn (BB_HEADER (bb));
4509 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4510 insn = BB_HEADER (bb);
4511 while (NEXT_INSN (insn))
4512 insn = NEXT_INSN (insn);
4513 SET_NEXT_INSN (insn) = next;
4514 SET_PREV_INSN (next) = insn;
4515 }
4516 next = NEXT_INSN (BB_END (bb));
4517 if (BB_FOOTER (bb))
4518 {
4519 insn = BB_FOOTER (bb);
4520 while (insn)
4521 {
4522 if (BARRIER_P (insn))
4523 {
4524 if (PREV_INSN (insn))
4525 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4526 else
4527 BB_FOOTER (bb) = NEXT_INSN (insn);
4528 if (NEXT_INSN (insn))
4529 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4530 }
4531 if (LABEL_P (insn))
4532 break;
4533 insn = NEXT_INSN (insn);
4534 }
4535 if (BB_FOOTER (bb))
4536 {
4537 insn = BB_END (bb);
4538 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4539 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4540 while (NEXT_INSN (insn))
4541 insn = NEXT_INSN (insn);
4542 SET_NEXT_INSN (insn) = next;
4543 if (next)
4544 SET_PREV_INSN (next) = insn;
4545 else
4546 set_last_insn (insn);
4547 }
4548 }
4549 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4550 to = &BB_HEADER (bb->next_bb);
4551 else
4552 to = &cfg_layout_function_footer;
4553
4554 rtl_delete_block (bb);
4555
4556 if (prev)
4557 prev = NEXT_INSN (prev);
4558 else
4559 prev = get_insns ();
4560 if (next)
4561 next = PREV_INSN (next);
4562 else
4563 next = get_last_insn ();
4564
4565 if (next && NEXT_INSN (next) != prev)
4566 {
4567 remaints = unlink_insn_chain (prev, next);
4568 insn = remaints;
4569 while (NEXT_INSN (insn))
4570 insn = NEXT_INSN (insn);
4571 SET_NEXT_INSN (insn) = *to;
4572 if (*to)
4573 SET_PREV_INSN (*to) = insn;
4574 *to = remaints;
4575 }
4576 }
4577
4578 /* Return true when blocks A and B can be safely merged. */
4579
4580 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4581 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4582 {
4583 /* If we are partitioning hot/cold basic blocks, we don't want to
4584 mess up unconditional or indirect jumps that cross between hot
4585 and cold sections.
4586
4587 Basic block partitioning may result in some jumps that appear to
4588 be optimizable (or blocks that appear to be mergeable), but which really
4589 must be left untouched (they are required to make it safely across
4590 partition boundaries). See the comments at the top of
4591 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4592
4593 if (BB_PARTITION (a) != BB_PARTITION (b))
4594 return false;
4595
4596 /* Protect the loop latches. */
4597 if (current_loops && b->loop_father->latch == b)
4598 return false;
4599
4600 /* If we would end up moving B's instructions, make sure it doesn't fall
4601 through into the exit block, since we cannot recover from a fallthrough
4602 edge into the exit block occurring in the middle of a function. */
4603 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4604 {
4605 edge e = find_fallthru_edge (b->succs);
4606 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4607 return false;
4608 }
4609
4610 /* There must be exactly one edge in between the blocks. */
4611 return (single_succ_p (a)
4612 && single_succ (a) == b
4613 && single_pred_p (b) == 1
4614 && a != b
4615 /* Must be simple edge. */
4616 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4617 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4618 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4619 /* If the jump insn has side effects, we can't kill the edge.
4620 When not optimizing, try_redirect_by_replacing_jump will
4621 not allow us to redirect an edge by replacing a table jump. */
4622 && (!JUMP_P (BB_END (a))
4623 || ((!optimize || reload_completed)
4624 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4625 }
4626
4627 /* Merge block A and B. The blocks must be mergeable. */
4628
4629 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4630 cfg_layout_merge_blocks (basic_block a, basic_block b)
4631 {
4632 /* If B is a forwarder block whose outgoing edge has no location, we'll
4633 propagate the locus of the edge between A and B onto it. */
4634 const bool forward_edge_locus
4635 = (b->flags & BB_FORWARDER_BLOCK) != 0
4636 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4637 rtx_insn *insn;
4638
4639 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4640
4641 if (dump_file)
4642 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4643 a->index);
4644
4645 /* If there was a CODE_LABEL beginning B, delete it. */
4646 if (LABEL_P (BB_HEAD (b)))
4647 {
4648 delete_insn (BB_HEAD (b));
4649 }
4650
4651 /* We should have fallthru edge in a, or we can do dummy redirection to get
4652 it cleaned up. */
4653 if (JUMP_P (BB_END (a)))
4654 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4655 gcc_assert (!JUMP_P (BB_END (a)));
4656
4657 /* If not optimizing, preserve the locus of the single edge between
4658 blocks A and B if necessary by emitting a nop. */
4659 if (!optimize
4660 && !forward_edge_locus
4661 && !DECL_IGNORED_P (current_function_decl))
4662 emit_nop_for_unique_locus_between (a, b);
4663
4664 /* Move things from b->footer after a->footer. */
4665 if (BB_FOOTER (b))
4666 {
4667 if (!BB_FOOTER (a))
4668 BB_FOOTER (a) = BB_FOOTER (b);
4669 else
4670 {
4671 rtx_insn *last = BB_FOOTER (a);
4672
4673 while (NEXT_INSN (last))
4674 last = NEXT_INSN (last);
4675 SET_NEXT_INSN (last) = BB_FOOTER (b);
4676 SET_PREV_INSN (BB_FOOTER (b)) = last;
4677 }
4678 BB_FOOTER (b) = NULL;
4679 }
4680
4681 /* Move things from b->header before a->footer.
4682 Note that this may include dead tablejump data, but we don't clean
4683 those up until we go out of cfglayout mode. */
4684 if (BB_HEADER (b))
4685 {
4686 if (! BB_FOOTER (a))
4687 BB_FOOTER (a) = BB_HEADER (b);
4688 else
4689 {
4690 rtx_insn *last = BB_HEADER (b);
4691
4692 while (NEXT_INSN (last))
4693 last = NEXT_INSN (last);
4694 SET_NEXT_INSN (last) = BB_FOOTER (a);
4695 SET_PREV_INSN (BB_FOOTER (a)) = last;
4696 BB_FOOTER (a) = BB_HEADER (b);
4697 }
4698 BB_HEADER (b) = NULL;
4699 }
4700
4701 /* In the case basic blocks are not adjacent, move them around. */
4702 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4703 {
4704 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4705
4706 emit_insn_after_noloc (insn, BB_END (a), a);
4707 }
4708 /* Otherwise just re-associate the instructions. */
4709 else
4710 {
4711 insn = BB_HEAD (b);
4712 BB_END (a) = BB_END (b);
4713 }
4714
4715 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4716 We need to explicitly call. */
4717 update_bb_for_insn_chain (insn, BB_END (b), a);
4718
4719 /* Skip possible DELETED_LABEL insn. */
4720 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4721 insn = NEXT_INSN (insn);
4722 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4723 BB_HEAD (b) = BB_END (b) = NULL;
4724 delete_insn (insn);
4725
4726 df_bb_delete (b->index);
4727
4728 if (forward_edge_locus)
4729 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4730
4731 if (dump_file)
4732 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4733 }
4734
4735 /* Split edge E. */
4736
4737 static basic_block
cfg_layout_split_edge(edge e)4738 cfg_layout_split_edge (edge e)
4739 {
4740 basic_block new_bb =
4741 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4742 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4743 NULL_RTX, e->src);
4744
4745 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4746 BB_COPY_PARTITION (new_bb, e->src);
4747 else
4748 BB_COPY_PARTITION (new_bb, e->dest);
4749 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4750 redirect_edge_and_branch_force (e, new_bb);
4751
4752 return new_bb;
4753 }
4754
4755 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4756
4757 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4758 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4759 {
4760 }
4761
4762 /* Return true if BB contains only labels or non-executable
4763 instructions. */
4764
4765 static bool
rtl_block_empty_p(basic_block bb)4766 rtl_block_empty_p (basic_block bb)
4767 {
4768 rtx_insn *insn;
4769
4770 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4771 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4772 return true;
4773
4774 FOR_BB_INSNS (bb, insn)
4775 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4776 return false;
4777
4778 return true;
4779 }
4780
4781 /* Split a basic block if it ends with a conditional branch and if
4782 the other part of the block is not empty. */
4783
4784 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4785 rtl_split_block_before_cond_jump (basic_block bb)
4786 {
4787 rtx_insn *insn;
4788 rtx_insn *split_point = NULL;
4789 rtx_insn *last = NULL;
4790 bool found_code = false;
4791
4792 FOR_BB_INSNS (bb, insn)
4793 {
4794 if (any_condjump_p (insn))
4795 split_point = last;
4796 else if (NONDEBUG_INSN_P (insn))
4797 found_code = true;
4798 last = insn;
4799 }
4800
4801 /* Did not find everything. */
4802 if (found_code && split_point)
4803 return split_block (bb, split_point)->dest;
4804 else
4805 return NULL;
4806 }
4807
4808 /* Return 1 if BB ends with a call, possibly followed by some
4809 instructions that must stay with the call, 0 otherwise. */
4810
4811 static bool
rtl_block_ends_with_call_p(basic_block bb)4812 rtl_block_ends_with_call_p (basic_block bb)
4813 {
4814 rtx_insn *insn = BB_END (bb);
4815
4816 while (!CALL_P (insn)
4817 && insn != BB_HEAD (bb)
4818 && (keep_with_call_p (insn)
4819 || NOTE_P (insn)
4820 || DEBUG_INSN_P (insn)))
4821 insn = PREV_INSN (insn);
4822 return (CALL_P (insn));
4823 }
4824
4825 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4826
4827 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)4828 rtl_block_ends_with_condjump_p (const_basic_block bb)
4829 {
4830 return any_condjump_p (BB_END (bb));
4831 }
4832
4833 /* Return true if we need to add fake edge to exit.
4834 Helper function for rtl_flow_call_edges_add. */
4835
4836 static bool
need_fake_edge_p(const rtx_insn * insn)4837 need_fake_edge_p (const rtx_insn *insn)
4838 {
4839 if (!INSN_P (insn))
4840 return false;
4841
4842 if ((CALL_P (insn)
4843 && !SIBLING_CALL_P (insn)
4844 && !find_reg_note (insn, REG_NORETURN, NULL)
4845 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4846 return true;
4847
4848 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4849 && MEM_VOLATILE_P (PATTERN (insn)))
4850 || (GET_CODE (PATTERN (insn)) == PARALLEL
4851 && asm_noperands (insn) != -1
4852 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4853 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4854 }
4855
4856 /* Add fake edges to the function exit for any non constant and non noreturn
4857 calls, volatile inline assembly in the bitmap of blocks specified by
4858 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4859 that were split.
4860
4861 The goal is to expose cases in which entering a basic block does not imply
4862 that all subsequent instructions must be executed. */
4863
4864 static int
rtl_flow_call_edges_add(sbitmap blocks)4865 rtl_flow_call_edges_add (sbitmap blocks)
4866 {
4867 int i;
4868 int blocks_split = 0;
4869 int last_bb = last_basic_block_for_fn (cfun);
4870 bool check_last_block = false;
4871
4872 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4873 return 0;
4874
4875 if (! blocks)
4876 check_last_block = true;
4877 else
4878 check_last_block = bitmap_bit_p (blocks,
4879 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4880
4881 /* In the last basic block, before epilogue generation, there will be
4882 a fallthru edge to EXIT. Special care is required if the last insn
4883 of the last basic block is a call because make_edge folds duplicate
4884 edges, which would result in the fallthru edge also being marked
4885 fake, which would result in the fallthru edge being removed by
4886 remove_fake_edges, which would result in an invalid CFG.
4887
4888 Moreover, we can't elide the outgoing fake edge, since the block
4889 profiler needs to take this into account in order to solve the minimal
4890 spanning tree in the case that the call doesn't return.
4891
4892 Handle this by adding a dummy instruction in a new last basic block. */
4893 if (check_last_block)
4894 {
4895 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4896 rtx_insn *insn = BB_END (bb);
4897
4898 /* Back up past insns that must be kept in the same block as a call. */
4899 while (insn != BB_HEAD (bb)
4900 && keep_with_call_p (insn))
4901 insn = PREV_INSN (insn);
4902
4903 if (need_fake_edge_p (insn))
4904 {
4905 edge e;
4906
4907 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4908 if (e)
4909 {
4910 insert_insn_on_edge (gen_use (const0_rtx), e);
4911 commit_edge_insertions ();
4912 }
4913 }
4914 }
4915
4916 /* Now add fake edges to the function exit for any non constant
4917 calls since there is no way that we can determine if they will
4918 return or not... */
4919
4920 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4921 {
4922 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4923 rtx_insn *insn;
4924 rtx_insn *prev_insn;
4925
4926 if (!bb)
4927 continue;
4928
4929 if (blocks && !bitmap_bit_p (blocks, i))
4930 continue;
4931
4932 for (insn = BB_END (bb); ; insn = prev_insn)
4933 {
4934 prev_insn = PREV_INSN (insn);
4935 if (need_fake_edge_p (insn))
4936 {
4937 edge e;
4938 rtx_insn *split_at_insn = insn;
4939
4940 /* Don't split the block between a call and an insn that should
4941 remain in the same block as the call. */
4942 if (CALL_P (insn))
4943 while (split_at_insn != BB_END (bb)
4944 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4945 split_at_insn = NEXT_INSN (split_at_insn);
4946
4947 /* The handling above of the final block before the epilogue
4948 should be enough to verify that there is no edge to the exit
4949 block in CFG already. Calling make_edge in such case would
4950 cause us to mark that edge as fake and remove it later. */
4951
4952 if (flag_checking && split_at_insn == BB_END (bb))
4953 {
4954 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4955 gcc_assert (e == NULL);
4956 }
4957
4958 /* Note that the following may create a new basic block
4959 and renumber the existing basic blocks. */
4960 if (split_at_insn != BB_END (bb))
4961 {
4962 e = split_block (bb, split_at_insn);
4963 if (e)
4964 blocks_split++;
4965 }
4966
4967 edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4968 ne->probability = profile_probability::guessed_never ();
4969 }
4970
4971 if (insn == BB_HEAD (bb))
4972 break;
4973 }
4974 }
4975
4976 if (blocks_split)
4977 verify_flow_info ();
4978
4979 return blocks_split;
4980 }
4981
4982 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4983 the conditional branch target, SECOND_HEAD should be the fall-thru
4984 there is no need to handle this here the loop versioning code handles
4985 this. the reason for SECON_HEAD is that it is needed for condition
4986 in trees, and this should be of the same type since it is a hook. */
4987 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)4988 rtl_lv_add_condition_to_bb (basic_block first_head ,
4989 basic_block second_head ATTRIBUTE_UNUSED,
4990 basic_block cond_bb, void *comp_rtx)
4991 {
4992 rtx_code_label *label;
4993 rtx_insn *seq, *jump;
4994 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4995 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4996 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4997 machine_mode mode;
4998
4999
5000 label = block_label (first_head);
5001 mode = GET_MODE (op0);
5002 if (mode == VOIDmode)
5003 mode = GET_MODE (op1);
5004
5005 start_sequence ();
5006 op0 = force_operand (op0, NULL_RTX);
5007 op1 = force_operand (op1, NULL_RTX);
5008 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5009 profile_probability::uninitialized ());
5010 jump = get_last_insn ();
5011 JUMP_LABEL (jump) = label;
5012 LABEL_NUSES (label)++;
5013 seq = get_insns ();
5014 end_sequence ();
5015
5016 /* Add the new cond, in the new head. */
5017 emit_insn_after (seq, BB_END (cond_bb));
5018 }
5019
5020
5021 /* Given a block B with unconditional branch at its end, get the
5022 store the return the branch edge and the fall-thru edge in
5023 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5024 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)5025 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5026 edge *fallthru_edge)
5027 {
5028 edge e = EDGE_SUCC (b, 0);
5029
5030 if (e->flags & EDGE_FALLTHRU)
5031 {
5032 *fallthru_edge = e;
5033 *branch_edge = EDGE_SUCC (b, 1);
5034 }
5035 else
5036 {
5037 *branch_edge = e;
5038 *fallthru_edge = EDGE_SUCC (b, 1);
5039 }
5040 }
5041
5042 void
init_rtl_bb_info(basic_block bb)5043 init_rtl_bb_info (basic_block bb)
5044 {
5045 gcc_assert (!bb->il.x.rtl);
5046 bb->il.x.head_ = NULL;
5047 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5048 }
5049
5050 /* Returns true if it is possible to remove edge E by redirecting
5051 it to the destination of the other edge from E->src. */
5052
5053 static bool
rtl_can_remove_branch_p(const_edge e)5054 rtl_can_remove_branch_p (const_edge e)
5055 {
5056 const_basic_block src = e->src;
5057 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5058 const rtx_insn *insn = BB_END (src);
5059 rtx set;
5060
5061 /* The conditions are taken from try_redirect_by_replacing_jump. */
5062 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5063 return false;
5064
5065 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5066 return false;
5067
5068 if (BB_PARTITION (src) != BB_PARTITION (target))
5069 return false;
5070
5071 if (!onlyjump_p (insn)
5072 || tablejump_p (insn, NULL, NULL))
5073 return false;
5074
5075 set = single_set (insn);
5076 if (!set || side_effects_p (set))
5077 return false;
5078
5079 return true;
5080 }
5081
5082 static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)5083 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5084 {
5085 bb = cfg_layout_duplicate_bb (bb, id);
5086 bb->aux = NULL;
5087 return bb;
5088 }
5089
5090 /* Do book-keeping of basic block BB for the profile consistency checker.
5091 Store the counting in RECORD. */
5092 static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)5093 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5094 {
5095 rtx_insn *insn;
5096 FOR_BB_INSNS (bb, insn)
5097 if (INSN_P (insn))
5098 {
5099 record->size += insn_cost (insn, false);
5100 if (bb->count.initialized_p ())
5101 record->time
5102 += insn_cost (insn, true) * bb->count.to_gcov_type ();
5103 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5104 record->time
5105 += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5106 }
5107 }
5108
5109 /* Implementation of CFG manipulation for linearized RTL. */
5110 struct cfg_hooks rtl_cfg_hooks = {
5111 "rtl",
5112 rtl_verify_flow_info,
5113 rtl_dump_bb,
5114 rtl_dump_bb_for_graph,
5115 rtl_create_basic_block,
5116 rtl_redirect_edge_and_branch,
5117 rtl_redirect_edge_and_branch_force,
5118 rtl_can_remove_branch_p,
5119 rtl_delete_block,
5120 rtl_split_block,
5121 rtl_move_block_after,
5122 rtl_can_merge_blocks, /* can_merge_blocks_p */
5123 rtl_merge_blocks,
5124 rtl_predict_edge,
5125 rtl_predicted_by_p,
5126 cfg_layout_can_duplicate_bb_p,
5127 rtl_duplicate_bb,
5128 rtl_split_edge,
5129 rtl_make_forwarder_block,
5130 rtl_tidy_fallthru_edge,
5131 rtl_force_nonfallthru,
5132 rtl_block_ends_with_call_p,
5133 rtl_block_ends_with_condjump_p,
5134 rtl_flow_call_edges_add,
5135 NULL, /* execute_on_growing_pred */
5136 NULL, /* execute_on_shrinking_pred */
5137 NULL, /* duplicate loop for trees */
5138 NULL, /* lv_add_condition_to_bb */
5139 NULL, /* lv_adjust_loop_header_phi*/
5140 NULL, /* extract_cond_bb_edges */
5141 NULL, /* flush_pending_stmts */
5142 rtl_block_empty_p, /* block_empty_p */
5143 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5144 rtl_account_profile_record,
5145 };
5146
5147 /* Implementation of CFG manipulation for cfg layout RTL, where
5148 basic block connected via fallthru edges does not have to be adjacent.
5149 This representation will hopefully become the default one in future
5150 version of the compiler. */
5151
5152 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5153 "cfglayout mode",
5154 rtl_verify_flow_info_1,
5155 rtl_dump_bb,
5156 rtl_dump_bb_for_graph,
5157 cfg_layout_create_basic_block,
5158 cfg_layout_redirect_edge_and_branch,
5159 cfg_layout_redirect_edge_and_branch_force,
5160 rtl_can_remove_branch_p,
5161 cfg_layout_delete_block,
5162 cfg_layout_split_block,
5163 rtl_move_block_after,
5164 cfg_layout_can_merge_blocks_p,
5165 cfg_layout_merge_blocks,
5166 rtl_predict_edge,
5167 rtl_predicted_by_p,
5168 cfg_layout_can_duplicate_bb_p,
5169 cfg_layout_duplicate_bb,
5170 cfg_layout_split_edge,
5171 rtl_make_forwarder_block,
5172 NULL, /* tidy_fallthru_edge */
5173 rtl_force_nonfallthru,
5174 rtl_block_ends_with_call_p,
5175 rtl_block_ends_with_condjump_p,
5176 rtl_flow_call_edges_add,
5177 NULL, /* execute_on_growing_pred */
5178 NULL, /* execute_on_shrinking_pred */
5179 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5180 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5181 NULL, /* lv_adjust_loop_header_phi*/
5182 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5183 NULL, /* flush_pending_stmts */
5184 rtl_block_empty_p, /* block_empty_p */
5185 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5186 rtl_account_profile_record,
5187 };
5188
5189 #include "gt-cfgrtl.h"
5190