1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 #include "rtl-iter.h"
65 #include "gimplify.h"
66 
67 /* Disable warnings about missing quoting in GCC diagnostics.  */
68 #if __GNUC__ >= 10
69 #  pragma GCC diagnostic push
70 #  pragma GCC diagnostic ignored "-Wformat-diag"
71 #endif
72 
73 /* Holds the interesting leading and trailing notes for the function.
74    Only applicable if the CFG is in cfglayout mode.  */
75 static GTY(()) rtx_insn *cfg_layout_function_footer;
76 static GTY(()) rtx_insn *cfg_layout_function_header;
77 
78 static rtx_insn *skip_insns_after_block (basic_block);
79 static void record_effective_endpoints (void);
80 static void fixup_reorder_chain (void);
81 
82 void verify_insn_chain (void);
83 static void fixup_fallthru_exit_predecessor (void);
84 static int can_delete_note_p (const rtx_note *);
85 static int can_delete_label_p (const rtx_code_label *);
86 static basic_block rtl_split_edge (edge);
87 static bool rtl_move_block_after (basic_block, basic_block);
88 static int rtl_verify_flow_info (void);
89 static basic_block cfg_layout_split_block (basic_block, void *);
90 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
91 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
92 static void cfg_layout_delete_block (basic_block);
93 static void rtl_delete_block (basic_block);
94 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
95 static edge rtl_redirect_edge_and_branch (edge, basic_block);
96 static basic_block rtl_split_block (basic_block, void *);
97 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
98 static int rtl_verify_flow_info_1 (void);
99 static void rtl_make_forwarder_block (edge);
100 static bool rtl_bb_info_initialized_p (basic_block bb);
101 
102 /* Return true if NOTE is not one of the ones that must be kept paired,
103    so that we may simply delete it.  */
104 
105 static int
can_delete_note_p(const rtx_note * note)106 can_delete_note_p (const rtx_note *note)
107 {
108   switch (NOTE_KIND (note))
109     {
110     case NOTE_INSN_DELETED:
111     case NOTE_INSN_BASIC_BLOCK:
112     case NOTE_INSN_EPILOGUE_BEG:
113       return true;
114 
115     default:
116       return false;
117     }
118 }
119 
120 /* True if a given label can be deleted.  */
121 
122 static int
can_delete_label_p(const rtx_code_label * label)123 can_delete_label_p (const rtx_code_label *label)
124 {
125   return (!LABEL_PRESERVE_P (label)
126 	  /* User declared labels must be preserved.  */
127 	  && LABEL_NAME (label) == 0
128 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
129 					     const_cast<rtx_code_label *> (label)));
130 }
131 
132 /* Delete INSN by patching it out.  */
133 
134 void
delete_insn(rtx_insn * insn)135 delete_insn (rtx_insn *insn)
136 {
137   rtx note;
138   bool really_delete = true;
139 
140   if (LABEL_P (insn))
141     {
142       /* Some labels can't be directly removed from the INSN chain, as they
143 	 might be references via variables, constant pool etc.
144 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
145       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
146 	{
147 	  const char *name = LABEL_NAME (insn);
148 	  basic_block bb = BLOCK_FOR_INSN (insn);
149 	  rtx_insn *bb_note = NEXT_INSN (insn);
150 
151 	  really_delete = false;
152 	  PUT_CODE (insn, NOTE);
153 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
154 	  NOTE_DELETED_LABEL_NAME (insn) = name;
155 
156 	  /* If the note following the label starts a basic block, and the
157 	     label is a member of the same basic block, interchange the two.  */
158 	  if (bb_note != NULL_RTX
159 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
160 	      && bb != NULL
161 	      && bb == BLOCK_FOR_INSN (bb_note))
162 	    {
163 	      reorder_insns_nobb (insn, insn, bb_note);
164 	      BB_HEAD (bb) = bb_note;
165 	      if (BB_END (bb) == bb_note)
166 		BB_END (bb) = insn;
167 	    }
168 	}
169 
170       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
171     }
172 
173   if (really_delete)
174     {
175       /* If this insn has already been deleted, something is very wrong.  */
176       gcc_assert (!insn->deleted ());
177       if (INSN_P (insn))
178 	df_insn_delete (insn);
179       remove_insn (insn);
180       insn->set_deleted ();
181     }
182 
183   /* If deleting a jump, decrement the use count of the label.  Deleting
184      the label itself should happen in the normal course of block merging.  */
185   if (JUMP_P (insn))
186     {
187       if (JUMP_LABEL (insn)
188 	  && LABEL_P (JUMP_LABEL (insn)))
189 	LABEL_NUSES (JUMP_LABEL (insn))--;
190 
191       /* If there are more targets, remove them too.  */
192       while ((note
193 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
194 	     && LABEL_P (XEXP (note, 0)))
195 	{
196 	  LABEL_NUSES (XEXP (note, 0))--;
197 	  remove_note (insn, note);
198 	}
199     }
200 
201   /* Also if deleting any insn that references a label as an operand.  */
202   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
203 	 && LABEL_P (XEXP (note, 0)))
204     {
205       LABEL_NUSES (XEXP (note, 0))--;
206       remove_note (insn, note);
207     }
208 
209   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
210     {
211       rtvec vec = table->get_labels ();
212       int len = GET_NUM_ELEM (vec);
213       int i;
214 
215       for (i = 0; i < len; i++)
216 	{
217 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
218 
219 	  /* When deleting code in bulk (e.g. removing many unreachable
220 	     blocks) we can delete a label that's a target of the vector
221 	     before deleting the vector itself.  */
222 	  if (!NOTE_P (label))
223 	    LABEL_NUSES (label)--;
224 	}
225     }
226 }
227 
228 /* Like delete_insn but also purge dead edges from BB.
229    Return true if any edges are eliminated.  */
230 
231 bool
delete_insn_and_edges(rtx_insn * insn)232 delete_insn_and_edges (rtx_insn *insn)
233 {
234   bool purge = false;
235 
236   if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
237     {
238       basic_block bb = BLOCK_FOR_INSN (insn);
239       if (BB_END (bb) == insn)
240 	purge = true;
241       else if (DEBUG_INSN_P (BB_END (bb)))
242 	for (rtx_insn *dinsn = NEXT_INSN (insn);
243 	     DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
244 	  if (BB_END (bb) == dinsn)
245 	    {
246 	      purge = true;
247 	      break;
248 	    }
249     }
250   delete_insn (insn);
251   if (purge)
252     return purge_dead_edges (BLOCK_FOR_INSN (insn));
253   return false;
254 }
255 
256 /* Unlink a chain of insns between START and FINISH, leaving notes
257    that must be paired.  If CLEAR_BB is true, we set bb field for
258    insns that cannot be removed to NULL.  */
259 
260 void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)261 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
262 {
263   /* Unchain the insns one by one.  It would be quicker to delete all of these
264      with a single unchaining, rather than one at a time, but we need to keep
265      the NOTE's.  */
266   rtx_insn *current = finish;
267   while (1)
268     {
269       rtx_insn *prev = PREV_INSN (current);
270       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
271 	;
272       else
273 	delete_insn (current);
274 
275       if (clear_bb && !current->deleted ())
276 	set_block_for_insn (current, NULL);
277 
278       if (current == start)
279 	break;
280       current = prev;
281     }
282 }
283 
284 /* Create a new basic block consisting of the instructions between HEAD and END
285    inclusive.  This function is designed to allow fast BB construction - reuses
286    the note and basic block struct in BB_NOTE, if any and do not grow
287    BASIC_BLOCK chain and should be used directly only by CFG construction code.
288    END can be NULL in to create new empty basic block before HEAD.  Both END
289    and HEAD can be NULL to create basic block at the end of INSN chain.
290    AFTER is the basic block we should be put after.  */
291 
292 basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)293 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
294 			      basic_block after)
295 {
296   basic_block bb;
297 
298   if (bb_note
299       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
300       && bb->aux == NULL)
301     {
302       /* If we found an existing note, thread it back onto the chain.  */
303 
304       rtx_insn *after;
305 
306       if (LABEL_P (head))
307 	after = head;
308       else
309 	{
310 	  after = PREV_INSN (head);
311 	  head = bb_note;
312 	}
313 
314       if (after != bb_note && NEXT_INSN (after) != bb_note)
315 	reorder_insns_nobb (bb_note, bb_note, after);
316     }
317   else
318     {
319       /* Otherwise we must create a note and a basic block structure.  */
320 
321       bb = alloc_block ();
322 
323       init_rtl_bb_info (bb);
324       if (!head && !end)
325 	head = end = bb_note
326 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
327       else if (LABEL_P (head) && end)
328 	{
329 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
330 	  if (head == end)
331 	    end = bb_note;
332 	}
333       else
334 	{
335 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
336 	  head = bb_note;
337 	  if (!end)
338 	    end = head;
339 	}
340 
341       NOTE_BASIC_BLOCK (bb_note) = bb;
342     }
343 
344   /* Always include the bb note in the block.  */
345   if (NEXT_INSN (end) == bb_note)
346     end = bb_note;
347 
348   BB_HEAD (bb) = head;
349   BB_END (bb) = end;
350   bb->index = last_basic_block_for_fn (cfun)++;
351   bb->flags = BB_NEW | BB_RTL;
352   link_block (bb, after);
353   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
354   df_bb_refs_record (bb->index, false);
355   update_bb_for_insn (bb);
356   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
357 
358   /* Tag the block so that we know it has been used when considering
359      other basic block notes.  */
360   bb->aux = bb;
361 
362   return bb;
363 }
364 
365 /* Create new basic block consisting of instructions in between HEAD and END
366    and place it to the BB chain after block AFTER.  END can be NULL to
367    create a new empty basic block before HEAD.  Both END and HEAD can be
368    NULL to create basic block at the end of INSN chain.  */
369 
370 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)371 rtl_create_basic_block (void *headp, void *endp, basic_block after)
372 {
373   rtx_insn *head = (rtx_insn *) headp;
374   rtx_insn *end = (rtx_insn *) endp;
375   basic_block bb;
376 
377   /* Grow the basic block array if needed.  */
378   if ((size_t) last_basic_block_for_fn (cfun)
379       >= basic_block_info_for_fn (cfun)->length ())
380     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
381 			   last_basic_block_for_fn (cfun) + 1);
382 
383   n_basic_blocks_for_fn (cfun)++;
384 
385   bb = create_basic_block_structure (head, end, NULL, after);
386   bb->aux = NULL;
387   return bb;
388 }
389 
390 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)391 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
392 {
393   basic_block newbb = rtl_create_basic_block (head, end, after);
394 
395   return newbb;
396 }
397 
398 /* Delete the insns in a (non-live) block.  We physically delete every
399    non-deleted-note insn, and update the flow graph appropriately.
400 
401    Return nonzero if we deleted an exception handler.  */
402 
403 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
404    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
405 
406 static void
rtl_delete_block(basic_block b)407 rtl_delete_block (basic_block b)
408 {
409   rtx_insn *insn, *end;
410 
411   /* If the head of this block is a CODE_LABEL, then it might be the
412      label for an exception handler which can't be reached.  We need
413      to remove the label from the exception_handler_label list.  */
414   insn = BB_HEAD (b);
415 
416   end = get_last_bb_insn (b);
417 
418   /* Selectively delete the entire chain.  */
419   BB_HEAD (b) = NULL;
420   delete_insn_chain (insn, end, true);
421 
422 
423   if (dump_file)
424     fprintf (dump_file, "deleting block %d\n", b->index);
425   df_bb_delete (b->index);
426 }
427 
428 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
429 
430 void
compute_bb_for_insn(void)431 compute_bb_for_insn (void)
432 {
433   basic_block bb;
434 
435   FOR_EACH_BB_FN (bb, cfun)
436     {
437       rtx_insn *end = BB_END (bb);
438       rtx_insn *insn;
439 
440       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
441 	{
442 	  BLOCK_FOR_INSN (insn) = bb;
443 	  if (insn == end)
444 	    break;
445 	}
446     }
447 }
448 
449 /* Release the basic_block_for_insn array.  */
450 
451 unsigned int
free_bb_for_insn(void)452 free_bb_for_insn (void)
453 {
454   rtx_insn *insn;
455   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
456     if (!BARRIER_P (insn))
457       BLOCK_FOR_INSN (insn) = NULL;
458   return 0;
459 }
460 
461 namespace {
462 
463 const pass_data pass_data_free_cfg =
464 {
465   RTL_PASS, /* type */
466   "*free_cfg", /* name */
467   OPTGROUP_NONE, /* optinfo_flags */
468   TV_NONE, /* tv_id */
469   0, /* properties_required */
470   0, /* properties_provided */
471   PROP_cfg, /* properties_destroyed */
472   0, /* todo_flags_start */
473   0, /* todo_flags_finish */
474 };
475 
476 class pass_free_cfg : public rtl_opt_pass
477 {
478 public:
pass_free_cfg(gcc::context * ctxt)479   pass_free_cfg (gcc::context *ctxt)
480     : rtl_opt_pass (pass_data_free_cfg, ctxt)
481   {}
482 
483   /* opt_pass methods: */
484   virtual unsigned int execute (function *);
485 
486 }; // class pass_free_cfg
487 
488 unsigned int
execute(function *)489 pass_free_cfg::execute (function *)
490 {
491   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
492      valid at that point so it would be too late to call df_analyze.  */
493   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
494     {
495       df_note_add_problem ();
496       df_analyze ();
497     }
498 
499   if (crtl->has_bb_partition)
500     insert_section_boundary_note ();
501 
502   free_bb_for_insn ();
503   return 0;
504 }
505 
506 } // anon namespace
507 
508 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)509 make_pass_free_cfg (gcc::context *ctxt)
510 {
511   return new pass_free_cfg (ctxt);
512 }
513 
514 /* Return RTX to emit after when we want to emit code on the entry of function.  */
515 rtx_insn *
entry_of_function(void)516 entry_of_function (void)
517 {
518   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
519 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
520 }
521 
522 /* Emit INSN at the entry point of the function, ensuring that it is only
523    executed once per function.  */
524 void
emit_insn_at_entry(rtx insn)525 emit_insn_at_entry (rtx insn)
526 {
527   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
528   edge e = ei_safe_edge (ei);
529   gcc_assert (e->flags & EDGE_FALLTHRU);
530 
531   insert_insn_on_edge (insn, e);
532   commit_edge_insertions ();
533 }
534 
535 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
536    (or BARRIER if found) and notify df of the bb change.
537    The insn chain range is inclusive
538    (i.e. both BEGIN and END will be updated. */
539 
540 static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)541 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
542 {
543   rtx_insn *insn;
544 
545   end = NEXT_INSN (end);
546   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
547     if (!BARRIER_P (insn))
548       df_insn_change_bb (insn, bb);
549 }
550 
551 /* Update BLOCK_FOR_INSN of insns in BB to BB,
552    and notify df of the change.  */
553 
554 void
update_bb_for_insn(basic_block bb)555 update_bb_for_insn (basic_block bb)
556 {
557   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
558 }
559 
560 
561 /* Like active_insn_p, except keep the return value use or clobber around
562    even after reload.  */
563 
564 static bool
flow_active_insn_p(const rtx_insn * insn)565 flow_active_insn_p (const rtx_insn *insn)
566 {
567   if (active_insn_p (insn))
568     return true;
569 
570   /* A clobber of the function return value exists for buggy
571      programs that fail to return a value.  Its effect is to
572      keep the return value from being live across the entire
573      function.  If we allow it to be skipped, we introduce the
574      possibility for register lifetime confusion.
575      Similarly, keep a USE of the function return value, otherwise
576      the USE is dropped and we could fail to thread jump if USE
577      appears on some paths and not on others, see PR90257.  */
578   if ((GET_CODE (PATTERN (insn)) == CLOBBER
579        || GET_CODE (PATTERN (insn)) == USE)
580       && REG_P (XEXP (PATTERN (insn), 0))
581       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
582     return true;
583 
584   return false;
585 }
586 
587 /* Return true if the block has no effect and only forwards control flow to
588    its single destination.  */
589 
590 bool
contains_no_active_insn_p(const_basic_block bb)591 contains_no_active_insn_p (const_basic_block bb)
592 {
593   rtx_insn *insn;
594 
595   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
596       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
597       || !single_succ_p (bb)
598       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
599     return false;
600 
601   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
602     if (INSN_P (insn) && flow_active_insn_p (insn))
603       return false;
604 
605   return (!INSN_P (insn)
606 	  || (JUMP_P (insn) && simplejump_p (insn))
607 	  || !flow_active_insn_p (insn));
608 }
609 
610 /* Likewise, but protect loop latches, headers and preheaders.  */
611 /* FIXME: Make this a cfg hook.  */
612 
613 bool
forwarder_block_p(const_basic_block bb)614 forwarder_block_p (const_basic_block bb)
615 {
616   if (!contains_no_active_insn_p (bb))
617     return false;
618 
619   /* Protect loop latches, headers and preheaders.  */
620   if (current_loops)
621     {
622       basic_block dest;
623       if (bb->loop_father->header == bb)
624 	return false;
625       dest = EDGE_SUCC (bb, 0)->dest;
626       if (dest->loop_father->header == dest)
627 	return false;
628     }
629 
630   return true;
631 }
632 
633 /* Return nonzero if we can reach target from src by falling through.  */
634 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
635 
636 bool
can_fallthru(basic_block src,basic_block target)637 can_fallthru (basic_block src, basic_block target)
638 {
639   rtx_insn *insn = BB_END (src);
640   rtx_insn *insn2;
641   edge e;
642   edge_iterator ei;
643 
644   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
645     return true;
646   if (src->next_bb != target)
647     return false;
648 
649   /* ??? Later we may add code to move jump tables offline.  */
650   if (tablejump_p (insn, NULL, NULL))
651     return false;
652 
653   FOR_EACH_EDGE (e, ei, src->succs)
654     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
655 	&& e->flags & EDGE_FALLTHRU)
656       return false;
657 
658   insn2 = BB_HEAD (target);
659   if (!active_insn_p (insn2))
660     insn2 = next_active_insn (insn2);
661 
662   return next_active_insn (insn) == insn2;
663 }
664 
665 /* Return nonzero if we could reach target from src by falling through,
666    if the target was made adjacent.  If we already have a fall-through
667    edge to the exit block, we can't do that.  */
668 static bool
could_fall_through(basic_block src,basic_block target)669 could_fall_through (basic_block src, basic_block target)
670 {
671   edge e;
672   edge_iterator ei;
673 
674   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
675     return true;
676   FOR_EACH_EDGE (e, ei, src->succs)
677     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
678 	&& e->flags & EDGE_FALLTHRU)
679       return 0;
680   return true;
681 }
682 
683 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
684 rtx_note *
bb_note(basic_block bb)685 bb_note (basic_block bb)
686 {
687   rtx_insn *note;
688 
689   note = BB_HEAD (bb);
690   if (LABEL_P (note))
691     note = NEXT_INSN (note);
692 
693   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
694   return as_a <rtx_note *> (note);
695 }
696 
697 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
698    note associated with the BLOCK.  */
699 
700 static rtx_insn *
first_insn_after_basic_block_note(basic_block block)701 first_insn_after_basic_block_note (basic_block block)
702 {
703   rtx_insn *insn;
704 
705   /* Get the first instruction in the block.  */
706   insn = BB_HEAD (block);
707 
708   if (insn == NULL_RTX)
709     return NULL;
710   if (LABEL_P (insn))
711     insn = NEXT_INSN (insn);
712   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
713 
714   return NEXT_INSN (insn);
715 }
716 
717 /* Creates a new basic block just after basic block BB by splitting
718    everything after specified instruction INSNP.  */
719 
720 static basic_block
rtl_split_block(basic_block bb,void * insnp)721 rtl_split_block (basic_block bb, void *insnp)
722 {
723   basic_block new_bb;
724   rtx_insn *insn = (rtx_insn *) insnp;
725   edge e;
726   edge_iterator ei;
727 
728   if (!insn)
729     {
730       insn = first_insn_after_basic_block_note (bb);
731 
732       if (insn)
733 	{
734 	  rtx_insn *next = insn;
735 
736 	  insn = PREV_INSN (insn);
737 
738 	  /* If the block contains only debug insns, insn would have
739 	     been NULL in a non-debug compilation, and then we'd end
740 	     up emitting a DELETED note.  For -fcompare-debug
741 	     stability, emit the note too.  */
742 	  if (insn != BB_END (bb)
743 	      && DEBUG_INSN_P (next)
744 	      && DEBUG_INSN_P (BB_END (bb)))
745 	    {
746 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
747 		next = NEXT_INSN (next);
748 
749 	      if (next == BB_END (bb))
750 		emit_note_after (NOTE_INSN_DELETED, next);
751 	    }
752 	}
753       else
754 	insn = get_last_insn ();
755     }
756 
757   /* We probably should check type of the insn so that we do not create
758      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
759      bother.  */
760   if (insn == BB_END (bb))
761     emit_note_after (NOTE_INSN_DELETED, insn);
762 
763   /* Create the new basic block.  */
764   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
765   BB_COPY_PARTITION (new_bb, bb);
766   BB_END (bb) = insn;
767 
768   /* Redirect the outgoing edges.  */
769   new_bb->succs = bb->succs;
770   bb->succs = NULL;
771   FOR_EACH_EDGE (e, ei, new_bb->succs)
772     e->src = new_bb;
773 
774   /* The new block starts off being dirty.  */
775   df_set_bb_dirty (bb);
776   return new_bb;
777 }
778 
779 /* Return true if the single edge between blocks A and B is the only place
780    in RTL which holds some unique locus.  */
781 
782 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)783 unique_locus_on_edge_between_p (basic_block a, basic_block b)
784 {
785   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
786   rtx_insn *insn, *end;
787 
788   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
789     return false;
790 
791   /* First scan block A backward.  */
792   insn = BB_END (a);
793   end = PREV_INSN (BB_HEAD (a));
794   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
795     insn = PREV_INSN (insn);
796 
797   if (insn != end && INSN_LOCATION (insn) == goto_locus)
798     return false;
799 
800   /* Then scan block B forward.  */
801   insn = BB_HEAD (b);
802   if (insn)
803     {
804       end = NEXT_INSN (BB_END (b));
805       while (insn != end && !NONDEBUG_INSN_P (insn))
806 	insn = NEXT_INSN (insn);
807 
808       if (insn != end && INSN_HAS_LOCATION (insn)
809 	  && INSN_LOCATION (insn) == goto_locus)
810 	return false;
811     }
812 
813   return true;
814 }
815 
816 /* If the single edge between blocks A and B is the only place in RTL which
817    holds some unique locus, emit a nop with that locus between the blocks.  */
818 
819 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)820 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
821 {
822   if (!unique_locus_on_edge_between_p (a, b))
823     return;
824 
825   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
826   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
827 }
828 
829 /* Blocks A and B are to be merged into a single block A.  The insns
830    are already contiguous.  */
831 
832 static void
rtl_merge_blocks(basic_block a,basic_block b)833 rtl_merge_blocks (basic_block a, basic_block b)
834 {
835   /* If B is a forwarder block whose outgoing edge has no location, we'll
836      propagate the locus of the edge between A and B onto it.  */
837   const bool forward_edge_locus
838     = (b->flags & BB_FORWARDER_BLOCK) != 0
839       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
840   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
841   rtx_insn *del_first = NULL, *del_last = NULL;
842   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
843   int b_empty = 0;
844 
845   if (dump_file)
846     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
847 	     a->index);
848 
849   while (DEBUG_INSN_P (b_end))
850     b_end = PREV_INSN (b_debug_start = b_end);
851 
852   /* If there was a CODE_LABEL beginning B, delete it.  */
853   if (LABEL_P (b_head))
854     {
855       /* Detect basic blocks with nothing but a label.  This can happen
856 	 in particular at the end of a function.  */
857       if (b_head == b_end)
858 	b_empty = 1;
859 
860       del_first = del_last = b_head;
861       b_head = NEXT_INSN (b_head);
862     }
863 
864   /* Delete the basic block note and handle blocks containing just that
865      note.  */
866   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
867     {
868       if (b_head == b_end)
869 	b_empty = 1;
870       if (! del_last)
871 	del_first = b_head;
872 
873       del_last = b_head;
874       b_head = NEXT_INSN (b_head);
875     }
876 
877   /* If there was a jump out of A, delete it.  */
878   if (JUMP_P (a_end))
879     {
880       rtx_insn *prev;
881 
882       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
883 	if (!NOTE_P (prev)
884 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
885 	    || prev == BB_HEAD (a))
886 	  break;
887 
888       del_first = a_end;
889 
890       /* If this was a conditional jump, we need to also delete
891 	 the insn that set cc0.  */
892       if (HAVE_cc0 && only_sets_cc0_p (prev))
893 	{
894 	  rtx_insn *tmp = prev;
895 
896 	  prev = prev_nonnote_insn (prev);
897 	  if (!prev)
898 	    prev = BB_HEAD (a);
899 	  del_first = tmp;
900 	}
901 
902       a_end = PREV_INSN (del_first);
903     }
904   else if (BARRIER_P (NEXT_INSN (a_end)))
905     del_first = NEXT_INSN (a_end);
906 
907   /* Delete everything marked above as well as crap that might be
908      hanging out between the two blocks.  */
909   BB_END (a) = a_end;
910   BB_HEAD (b) = b_empty ? NULL : b_head;
911   delete_insn_chain (del_first, del_last, true);
912 
913   /* If not optimizing, preserve the locus of the single edge between
914      blocks A and B if necessary by emitting a nop.  */
915   if (!optimize
916       && !forward_edge_locus
917       && !DECL_IGNORED_P (current_function_decl))
918     {
919       emit_nop_for_unique_locus_between (a, b);
920       a_end = BB_END (a);
921     }
922 
923   /* Reassociate the insns of B with A.  */
924   if (!b_empty)
925     {
926       update_bb_for_insn_chain (a_end, b_debug_end, a);
927 
928       BB_END (a) = b_debug_end;
929       BB_HEAD (b) = NULL;
930     }
931   else if (b_end != b_debug_end)
932     {
933       /* Move any deleted labels and other notes between the end of A
934 	 and the debug insns that make up B after the debug insns,
935 	 bringing the debug insns into A while keeping the notes after
936 	 the end of A.  */
937       if (NEXT_INSN (a_end) != b_debug_start)
938 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
939 			    b_debug_end);
940       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
941       BB_END (a) = b_debug_end;
942     }
943 
944   df_bb_delete (b->index);
945 
946   if (forward_edge_locus)
947     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
948 
949   if (dump_file)
950     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
951 }
952 
953 
954 /* Return true when block A and B can be merged.  */
955 
956 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)957 rtl_can_merge_blocks (basic_block a, basic_block b)
958 {
959   /* If we are partitioning hot/cold basic blocks, we don't want to
960      mess up unconditional or indirect jumps that cross between hot
961      and cold sections.
962 
963      Basic block partitioning may result in some jumps that appear to
964      be optimizable (or blocks that appear to be mergeable), but which really
965      must be left untouched (they are required to make it safely across
966      partition boundaries).  See  the comments at the top of
967      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
968 
969   if (BB_PARTITION (a) != BB_PARTITION (b))
970     return false;
971 
972   /* Protect the loop latches.  */
973   if (current_loops && b->loop_father->latch == b)
974     return false;
975 
976   /* There must be exactly one edge in between the blocks.  */
977   return (single_succ_p (a)
978 	  && single_succ (a) == b
979 	  && single_pred_p (b)
980 	  && a != b
981 	  /* Must be simple edge.  */
982 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
983 	  && a->next_bb == b
984 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
985 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
986 	  /* If the jump insn has side effects,
987 	     we can't kill the edge.  */
988 	  && (!JUMP_P (BB_END (a))
989 	      || (reload_completed
990 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
991 }
992 
993 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
994    exist.  */
995 
996 rtx_code_label *
block_label(basic_block block)997 block_label (basic_block block)
998 {
999   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1000     return NULL;
1001 
1002   if (!LABEL_P (BB_HEAD (block)))
1003     {
1004       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1005     }
1006 
1007   return as_a <rtx_code_label *> (BB_HEAD (block));
1008 }
1009 
1010 /* Remove all barriers from BB_FOOTER of a BB.  */
1011 
1012 static void
remove_barriers_from_footer(basic_block bb)1013 remove_barriers_from_footer (basic_block bb)
1014 {
1015   rtx_insn *insn = BB_FOOTER (bb);
1016 
1017   /* Remove barriers but keep jumptables.  */
1018   while (insn)
1019     {
1020       if (BARRIER_P (insn))
1021 	{
1022 	  if (PREV_INSN (insn))
1023 	    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1024 	  else
1025 	    BB_FOOTER (bb) = NEXT_INSN (insn);
1026 	  if (NEXT_INSN (insn))
1027 	    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1028 	}
1029       if (LABEL_P (insn))
1030 	return;
1031       insn = NEXT_INSN (insn);
1032     }
1033 }
1034 
1035 /* Attempt to perform edge redirection by replacing possibly complex jump
1036    instruction by unconditional jump or removing jump completely.  This can
1037    apply only if all edges now point to the same block.  The parameters and
1038    return values are equivalent to redirect_edge_and_branch.  */
1039 
1040 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)1041 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1042 {
1043   basic_block src = e->src;
1044   rtx_insn *insn = BB_END (src), *kill_from;
1045   rtx set;
1046   int fallthru = 0;
1047 
1048   /* If we are partitioning hot/cold basic blocks, we don't want to
1049      mess up unconditional or indirect jumps that cross between hot
1050      and cold sections.
1051 
1052      Basic block partitioning may result in some jumps that appear to
1053      be optimizable (or blocks that appear to be mergeable), but which really
1054      must be left untouched (they are required to make it safely across
1055      partition boundaries).  See  the comments at the top of
1056      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1057 
1058   if (BB_PARTITION (src) != BB_PARTITION (target))
1059     return NULL;
1060 
1061   /* We can replace or remove a complex jump only when we have exactly
1062      two edges.  Also, if we have exactly one outgoing edge, we can
1063      redirect that.  */
1064   if (EDGE_COUNT (src->succs) >= 3
1065       /* Verify that all targets will be TARGET.  Specifically, the
1066 	 edge that is not E must also go to TARGET.  */
1067       || (EDGE_COUNT (src->succs) == 2
1068 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1069     return NULL;
1070 
1071   if (!onlyjump_p (insn))
1072     return NULL;
1073   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1074     return NULL;
1075 
1076   /* Avoid removing branch with side effects.  */
1077   set = single_set (insn);
1078   if (!set || side_effects_p (set))
1079     return NULL;
1080 
1081   /* In case we zap a conditional jump, we'll need to kill
1082      the cc0 setter too.  */
1083   kill_from = insn;
1084   if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1085       && only_sets_cc0_p (PREV_INSN (insn)))
1086     kill_from = PREV_INSN (insn);
1087 
1088   /* See if we can create the fallthru edge.  */
1089   if (in_cfglayout || can_fallthru (src, target))
1090     {
1091       if (dump_file)
1092 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1093       fallthru = 1;
1094 
1095       /* Selectively unlink whole insn chain.  */
1096       if (in_cfglayout)
1097 	{
1098 	  delete_insn_chain (kill_from, BB_END (src), false);
1099 	  remove_barriers_from_footer (src);
1100 	}
1101       else
1102 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1103 			   false);
1104     }
1105 
1106   /* If this already is simplejump, redirect it.  */
1107   else if (simplejump_p (insn))
1108     {
1109       if (e->dest == target)
1110 	return NULL;
1111       if (dump_file)
1112 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1113 		 INSN_UID (insn), e->dest->index, target->index);
1114       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1115 			  block_label (target), 0))
1116 	{
1117 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1118 	  return NULL;
1119 	}
1120     }
1121 
1122   /* Cannot do anything for target exit block.  */
1123   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1124     return NULL;
1125 
1126   /* Or replace possibly complicated jump insn by simple jump insn.  */
1127   else
1128     {
1129       rtx_code_label *target_label = block_label (target);
1130       rtx_insn *barrier;
1131       rtx_insn *label;
1132       rtx_jump_table_data *table;
1133 
1134       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1135       JUMP_LABEL (BB_END (src)) = target_label;
1136       LABEL_NUSES (target_label)++;
1137       if (dump_file)
1138 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1139 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1140 
1141 
1142       delete_insn_chain (kill_from, insn, false);
1143 
1144       /* Recognize a tablejump that we are converting to a
1145 	 simple jump and remove its associated CODE_LABEL
1146 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1147       if (tablejump_p (insn, &label, &table))
1148 	delete_insn_chain (label, table, false);
1149 
1150       barrier = next_nonnote_nondebug_insn (BB_END (src));
1151       if (!barrier || !BARRIER_P (barrier))
1152 	emit_barrier_after (BB_END (src));
1153       else
1154 	{
1155 	  if (barrier != NEXT_INSN (BB_END (src)))
1156 	    {
1157 	      /* Move the jump before barrier so that the notes
1158 		 which originally were or were created before jump table are
1159 		 inside the basic block.  */
1160 	      rtx_insn *new_insn = BB_END (src);
1161 
1162 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1163 				        PREV_INSN (barrier), src);
1164 
1165 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1166 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1167 
1168 	      SET_NEXT_INSN (new_insn) = barrier;
1169 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1170 
1171 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1172 	      SET_PREV_INSN (barrier) = new_insn;
1173 	    }
1174 	}
1175     }
1176 
1177   /* Keep only one edge out and set proper flags.  */
1178   if (!single_succ_p (src))
1179     remove_edge (e);
1180   gcc_assert (single_succ_p (src));
1181 
1182   e = single_succ_edge (src);
1183   if (fallthru)
1184     e->flags = EDGE_FALLTHRU;
1185   else
1186     e->flags = 0;
1187 
1188   e->probability = profile_probability::always ();
1189 
1190   if (e->dest != target)
1191     redirect_edge_succ (e, target);
1192   return e;
1193 }
1194 
1195 /* Subroutine of redirect_branch_edge that tries to patch the jump
1196    instruction INSN so that it reaches block NEW.  Do this
1197    only when it originally reached block OLD.  Return true if this
1198    worked or the original target wasn't OLD, return false if redirection
1199    doesn't work.  */
1200 
1201 static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)1202 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1203 {
1204   rtx_jump_table_data *table;
1205   rtx tmp;
1206   /* Recognize a tablejump and adjust all matching cases.  */
1207   if (tablejump_p (insn, NULL, &table))
1208     {
1209       rtvec vec;
1210       int j;
1211       rtx_code_label *new_label = block_label (new_bb);
1212 
1213       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1214 	return false;
1215       vec = table->get_labels ();
1216 
1217       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1218 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1219 	  {
1220 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1221 	    --LABEL_NUSES (old_label);
1222 	    ++LABEL_NUSES (new_label);
1223 	  }
1224 
1225       /* Handle casesi dispatch insns.  */
1226       if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1227 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1228 	{
1229 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1230 						       new_label);
1231 	  --LABEL_NUSES (old_label);
1232 	  ++LABEL_NUSES (new_label);
1233 	}
1234     }
1235   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1236     {
1237       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1238       rtx note;
1239 
1240       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1241 	return false;
1242       rtx_code_label *new_label = block_label (new_bb);
1243 
1244       for (i = 0; i < n; ++i)
1245 	{
1246 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1247 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1248 	  if (XEXP (old_ref, 0) == old_label)
1249 	    {
1250 	      ASM_OPERANDS_LABEL (tmp, i)
1251 		= gen_rtx_LABEL_REF (Pmode, new_label);
1252 	      --LABEL_NUSES (old_label);
1253 	      ++LABEL_NUSES (new_label);
1254 	    }
1255 	}
1256 
1257       if (JUMP_LABEL (insn) == old_label)
1258 	{
1259 	  JUMP_LABEL (insn) = new_label;
1260 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1261 	  if (note)
1262 	    remove_note (insn, note);
1263 	}
1264       else
1265 	{
1266 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1267 	  if (note)
1268 	    remove_note (insn, note);
1269 	  if (JUMP_LABEL (insn) != new_label
1270 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1271 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1272 	}
1273       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1274 	     != NULL_RTX)
1275 	XEXP (note, 0) = new_label;
1276     }
1277   else
1278     {
1279       /* ?? We may play the games with moving the named labels from
1280 	 one basic block to the other in case only one computed_jump is
1281 	 available.  */
1282       if (computed_jump_p (insn)
1283 	  /* A return instruction can't be redirected.  */
1284 	  || returnjump_p (insn))
1285 	return false;
1286 
1287       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1288 	{
1289 	  /* If the insn doesn't go where we think, we're confused.  */
1290 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1291 
1292 	  /* If the substitution doesn't succeed, die.  This can happen
1293 	     if the back end emitted unrecognizable instructions or if
1294 	     target is exit block on some arches.  Or for crossing
1295 	     jumps.  */
1296 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1297 			      block_label (new_bb), 0))
1298 	    {
1299 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1300 			  || CROSSING_JUMP_P (insn));
1301 	      return false;
1302 	    }
1303 	}
1304     }
1305   return true;
1306 }
1307 
1308 
1309 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1310    NULL on failure  */
1311 static edge
redirect_branch_edge(edge e,basic_block target)1312 redirect_branch_edge (edge e, basic_block target)
1313 {
1314   rtx_insn *old_label = BB_HEAD (e->dest);
1315   basic_block src = e->src;
1316   rtx_insn *insn = BB_END (src);
1317 
1318   /* We can only redirect non-fallthru edges of jump insn.  */
1319   if (e->flags & EDGE_FALLTHRU)
1320     return NULL;
1321   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1322     return NULL;
1323 
1324   if (!currently_expanding_to_rtl)
1325     {
1326       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1327 	return NULL;
1328     }
1329   else
1330     /* When expanding this BB might actually contain multiple
1331        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1332        Redirect all of those that match our label.  */
1333     FOR_BB_INSNS (src, insn)
1334       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1335 					     old_label, target))
1336 	return NULL;
1337 
1338   if (dump_file)
1339     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1340 	     e->src->index, e->dest->index, target->index);
1341 
1342   if (e->dest != target)
1343     e = redirect_edge_succ_nodup (e, target);
1344 
1345   return e;
1346 }
1347 
1348 /* Called when edge E has been redirected to a new destination,
1349    in order to update the region crossing flag on the edge and
1350    jump.  */
1351 
1352 static void
fixup_partition_crossing(edge e)1353 fixup_partition_crossing (edge e)
1354 {
1355   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1356       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1357     return;
1358   /* If we redirected an existing edge, it may already be marked
1359      crossing, even though the new src is missing a reg crossing note.
1360      But make sure reg crossing note doesn't already exist before
1361      inserting.  */
1362   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1363     {
1364       e->flags |= EDGE_CROSSING;
1365       if (JUMP_P (BB_END (e->src)))
1366 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1367     }
1368   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1369     {
1370       e->flags &= ~EDGE_CROSSING;
1371       /* Remove the section crossing note from jump at end of
1372          src if it exists, and if no other successors are
1373          still crossing.  */
1374       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1375         {
1376           bool has_crossing_succ = false;
1377           edge e2;
1378           edge_iterator ei;
1379           FOR_EACH_EDGE (e2, ei, e->src->succs)
1380             {
1381               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1382               if (has_crossing_succ)
1383                 break;
1384             }
1385           if (!has_crossing_succ)
1386 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1387         }
1388     }
1389 }
1390 
1391 /* Called when block BB has been reassigned to the cold partition,
1392    because it is now dominated by another cold block,
1393    to ensure that the region crossing attributes are updated.  */
1394 
1395 static void
fixup_new_cold_bb(basic_block bb)1396 fixup_new_cold_bb (basic_block bb)
1397 {
1398   edge e;
1399   edge_iterator ei;
1400 
1401   /* This is called when a hot bb is found to now be dominated
1402      by a cold bb and therefore needs to become cold. Therefore,
1403      its preds will no longer be region crossing. Any non-dominating
1404      preds that were previously hot would also have become cold
1405      in the caller for the same region. Any preds that were previously
1406      region-crossing will be adjusted in fixup_partition_crossing.  */
1407   FOR_EACH_EDGE (e, ei, bb->preds)
1408     {
1409       fixup_partition_crossing (e);
1410     }
1411 
1412   /* Possibly need to make bb's successor edges region crossing,
1413      or remove stale region crossing.  */
1414   FOR_EACH_EDGE (e, ei, bb->succs)
1415     {
1416       /* We can't have fall-through edges across partition boundaries.
1417          Note that force_nonfallthru will do any necessary partition
1418          boundary fixup by calling fixup_partition_crossing itself.  */
1419       if ((e->flags & EDGE_FALLTHRU)
1420           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1421 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1422         force_nonfallthru (e);
1423       else
1424         fixup_partition_crossing (e);
1425     }
1426 }
1427 
1428 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1429    expense of adding new instructions or reordering basic blocks.
1430 
1431    Function can be also called with edge destination equivalent to the TARGET.
1432    Then it should try the simplifications and do nothing if none is possible.
1433 
1434    Return edge representing the branch if transformation succeeded.  Return NULL
1435    on failure.
1436    We still return NULL in case E already destinated TARGET and we didn't
1437    managed to simplify instruction stream.  */
1438 
1439 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1440 rtl_redirect_edge_and_branch (edge e, basic_block target)
1441 {
1442   edge ret;
1443   basic_block src = e->src;
1444   basic_block dest = e->dest;
1445 
1446   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1447     return NULL;
1448 
1449   if (dest == target)
1450     return e;
1451 
1452   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1453     {
1454       df_set_bb_dirty (src);
1455       fixup_partition_crossing (ret);
1456       return ret;
1457     }
1458 
1459   ret = redirect_branch_edge (e, target);
1460   if (!ret)
1461     return NULL;
1462 
1463   df_set_bb_dirty (src);
1464   fixup_partition_crossing (ret);
1465   return ret;
1466 }
1467 
1468 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1469 
1470 void
emit_barrier_after_bb(basic_block bb)1471 emit_barrier_after_bb (basic_block bb)
1472 {
1473   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1474   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1475               || current_ir_type () == IR_RTL_CFGLAYOUT);
1476   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1477     {
1478       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1479 
1480       if (BB_FOOTER (bb))
1481 	{
1482           rtx_insn *footer_tail = BB_FOOTER (bb);
1483 
1484           while (NEXT_INSN (footer_tail))
1485             footer_tail = NEXT_INSN (footer_tail);
1486           if (!BARRIER_P (footer_tail))
1487             {
1488               SET_NEXT_INSN (footer_tail) = insn;
1489               SET_PREV_INSN (insn) = footer_tail;
1490             }
1491 	}
1492       else
1493         BB_FOOTER (bb) = insn;
1494     }
1495 }
1496 
1497 /* Like force_nonfallthru below, but additionally performs redirection
1498    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1499    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1500    simple_return_rtx, indicating which kind of returnjump to create.
1501    It should be NULL otherwise.  */
1502 
1503 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1504 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1505 {
1506   basic_block jump_block, new_bb = NULL, src = e->src;
1507   rtx note;
1508   edge new_edge;
1509   int abnormal_edge_flags = 0;
1510   bool asm_goto_edge = false;
1511   int loc;
1512 
1513   /* In the case the last instruction is conditional jump to the next
1514      instruction, first redirect the jump itself and then continue
1515      by creating a basic block afterwards to redirect fallthru edge.  */
1516   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1517       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1518       && any_condjump_p (BB_END (e->src))
1519       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1520     {
1521       rtx note;
1522       edge b = unchecked_make_edge (e->src, target, 0);
1523       bool redirected;
1524 
1525       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1526 				  block_label (target), 0);
1527       gcc_assert (redirected);
1528 
1529       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1530       if (note)
1531 	{
1532 	  int prob = XINT (note, 0);
1533 
1534 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
1535 	  e->probability -= e->probability;
1536 	}
1537     }
1538 
1539   if (e->flags & EDGE_ABNORMAL)
1540     {
1541       /* Irritating special case - fallthru edge to the same block as abnormal
1542 	 edge.
1543 	 We can't redirect abnormal edge, but we still can split the fallthru
1544 	 one and create separate abnormal edge to original destination.
1545 	 This allows bb-reorder to make such edge non-fallthru.  */
1546       gcc_assert (e->dest == target);
1547       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1548       e->flags &= EDGE_FALLTHRU;
1549     }
1550   else
1551     {
1552       gcc_assert (e->flags & EDGE_FALLTHRU);
1553       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1554 	{
1555 	  /* We can't redirect the entry block.  Create an empty block
1556 	     at the start of the function which we use to add the new
1557 	     jump.  */
1558 	  edge tmp;
1559 	  edge_iterator ei;
1560 	  bool found = false;
1561 
1562 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1563 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1564 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1565 
1566 	  /* Make sure new block ends up in correct hot/cold section.  */
1567 	  BB_COPY_PARTITION (bb, e->dest);
1568 
1569 	  /* Change the existing edge's source to be the new block, and add
1570 	     a new edge from the entry block to the new block.  */
1571 	  e->src = bb;
1572 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1573 	       (tmp = ei_safe_edge (ei)); )
1574 	    {
1575 	      if (tmp == e)
1576 		{
1577 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1578 		  found = true;
1579 		  break;
1580 		}
1581 	      else
1582 		ei_next (&ei);
1583 	    }
1584 
1585 	  gcc_assert (found);
1586 
1587 	  vec_safe_push (bb->succs, e);
1588 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1589 				 EDGE_FALLTHRU);
1590 	}
1591     }
1592 
1593   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1594      don't point to the target or fallthru label.  */
1595   if (JUMP_P (BB_END (e->src))
1596       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1597       && (e->flags & EDGE_FALLTHRU)
1598       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1599     {
1600       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1601       bool adjust_jump_target = false;
1602 
1603       for (i = 0; i < n; ++i)
1604 	{
1605 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1606 	    {
1607 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1608 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1609 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1610 	      adjust_jump_target = true;
1611 	    }
1612 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1613 	    asm_goto_edge = true;
1614 	}
1615       if (adjust_jump_target)
1616 	{
1617 	  rtx_insn *insn = BB_END (e->src);
1618 	  rtx note;
1619 	  rtx_insn *old_label = BB_HEAD (e->dest);
1620 	  rtx_insn *new_label = BB_HEAD (target);
1621 
1622 	  if (JUMP_LABEL (insn) == old_label)
1623 	    {
1624 	      JUMP_LABEL (insn) = new_label;
1625 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1626 	      if (note)
1627 		remove_note (insn, note);
1628 	    }
1629 	  else
1630 	    {
1631 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1632 	      if (note)
1633 		remove_note (insn, note);
1634 	      if (JUMP_LABEL (insn) != new_label
1635 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1636 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1637 	    }
1638 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1639 		 != NULL_RTX)
1640 	    XEXP (note, 0) = new_label;
1641 	}
1642     }
1643 
1644   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1645     {
1646       rtx_insn *new_head;
1647       profile_count count = e->count ();
1648       profile_probability probability = e->probability;
1649       /* Create the new structures.  */
1650 
1651       /* If the old block ended with a tablejump, skip its table
1652 	 by searching forward from there.  Otherwise start searching
1653 	 forward from the last instruction of the old block.  */
1654       rtx_jump_table_data *table;
1655       if (tablejump_p (BB_END (e->src), NULL, &table))
1656 	new_head = table;
1657       else
1658 	new_head = BB_END (e->src);
1659       new_head = NEXT_INSN (new_head);
1660 
1661       jump_block = create_basic_block (new_head, NULL, e->src);
1662       jump_block->count = count;
1663 
1664       /* Make sure new block ends up in correct hot/cold section.  */
1665 
1666       BB_COPY_PARTITION (jump_block, e->src);
1667 
1668       /* Wire edge in.  */
1669       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1670       new_edge->probability = probability;
1671 
1672       /* Redirect old edge.  */
1673       redirect_edge_pred (e, jump_block);
1674       e->probability = profile_probability::always ();
1675 
1676       /* If e->src was previously region crossing, it no longer is
1677          and the reg crossing note should be removed.  */
1678       fixup_partition_crossing (new_edge);
1679 
1680       /* If asm goto has any label refs to target's label,
1681 	 add also edge from asm goto bb to target.  */
1682       if (asm_goto_edge)
1683 	{
1684 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1685 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1686 	  edge new_edge2 = make_edge (new_edge->src, target,
1687 				      e->flags & ~EDGE_FALLTHRU);
1688 	  new_edge2->probability = probability - new_edge->probability;
1689 	}
1690 
1691       new_bb = jump_block;
1692     }
1693   else
1694     jump_block = e->src;
1695 
1696   loc = e->goto_locus;
1697   e->flags &= ~EDGE_FALLTHRU;
1698   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1699     {
1700       if (jump_label == ret_rtx)
1701 	emit_jump_insn_after_setloc (targetm.gen_return (),
1702 				     BB_END (jump_block), loc);
1703       else
1704 	{
1705 	  gcc_assert (jump_label == simple_return_rtx);
1706 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1707 				       BB_END (jump_block), loc);
1708 	}
1709       set_return_jump_label (BB_END (jump_block));
1710     }
1711   else
1712     {
1713       rtx_code_label *label = block_label (target);
1714       emit_jump_insn_after_setloc (targetm.gen_jump (label),
1715 				   BB_END (jump_block), loc);
1716       JUMP_LABEL (BB_END (jump_block)) = label;
1717       LABEL_NUSES (label)++;
1718     }
1719 
1720   /* We might be in cfg layout mode, and if so, the following routine will
1721      insert the barrier correctly.  */
1722   emit_barrier_after_bb (jump_block);
1723   redirect_edge_succ_nodup (e, target);
1724 
1725   if (abnormal_edge_flags)
1726     make_edge (src, target, abnormal_edge_flags);
1727 
1728   df_mark_solutions_dirty ();
1729   fixup_partition_crossing (e);
1730   return new_bb;
1731 }
1732 
1733 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1734    (and possibly create new basic block) to make edge non-fallthru.
1735    Return newly created BB or NULL if none.  */
1736 
1737 static basic_block
rtl_force_nonfallthru(edge e)1738 rtl_force_nonfallthru (edge e)
1739 {
1740   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1741 }
1742 
1743 /* Redirect edge even at the expense of creating new jump insn or
1744    basic block.  Return new basic block if created, NULL otherwise.
1745    Conversion must be possible.  */
1746 
1747 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1748 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1749 {
1750   if (redirect_edge_and_branch (e, target)
1751       || e->dest == target)
1752     return NULL;
1753 
1754   /* In case the edge redirection failed, try to force it to be non-fallthru
1755      and redirect newly created simplejump.  */
1756   df_set_bb_dirty (e->src);
1757   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1758 }
1759 
1760 /* The given edge should potentially be a fallthru edge.  If that is in
1761    fact true, delete the jump and barriers that are in the way.  */
1762 
1763 static void
rtl_tidy_fallthru_edge(edge e)1764 rtl_tidy_fallthru_edge (edge e)
1765 {
1766   rtx_insn *q;
1767   basic_block b = e->src, c = b->next_bb;
1768 
1769   /* ??? In a late-running flow pass, other folks may have deleted basic
1770      blocks by nopping out blocks, leaving multiple BARRIERs between here
1771      and the target label. They ought to be chastised and fixed.
1772 
1773      We can also wind up with a sequence of undeletable labels between
1774      one block and the next.
1775 
1776      So search through a sequence of barriers, labels, and notes for
1777      the head of block C and assert that we really do fall through.  */
1778 
1779   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1780     if (NONDEBUG_INSN_P (q))
1781       return;
1782 
1783   /* Remove what will soon cease being the jump insn from the source block.
1784      If block B consisted only of this single jump, turn it into a deleted
1785      note.  */
1786   q = BB_END (b);
1787   if (JUMP_P (q)
1788       && onlyjump_p (q)
1789       && (any_uncondjump_p (q)
1790 	  || single_succ_p (b)))
1791     {
1792       rtx_insn *label;
1793       rtx_jump_table_data *table;
1794 
1795       if (tablejump_p (q, &label, &table))
1796 	{
1797 	  /* The label is likely mentioned in some instruction before
1798 	     the tablejump and might not be DCEd, so turn it into
1799 	     a note instead and move before the tablejump that is going to
1800 	     be deleted.  */
1801 	  const char *name = LABEL_NAME (label);
1802 	  PUT_CODE (label, NOTE);
1803 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1804 	  NOTE_DELETED_LABEL_NAME (label) = name;
1805 	  reorder_insns (label, label, PREV_INSN (q));
1806 	  delete_insn (table);
1807 	}
1808 
1809       /* If this was a conditional jump, we need to also delete
1810 	 the insn that set cc0.  */
1811       if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1812 	q = PREV_INSN (q);
1813 
1814       q = PREV_INSN (q);
1815     }
1816   /* Unconditional jumps with side-effects (i.e. which we can't just delete
1817      together with the barrier) should never have a fallthru edge.  */
1818   else if (JUMP_P (q) && any_uncondjump_p (q))
1819     return;
1820 
1821   /* Selectively unlink the sequence.  */
1822   if (q != PREV_INSN (BB_HEAD (c)))
1823     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1824 
1825   e->flags |= EDGE_FALLTHRU;
1826 }
1827 
1828 /* Should move basic block BB after basic block AFTER.  NIY.  */
1829 
1830 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1831 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1832 		      basic_block after ATTRIBUTE_UNUSED)
1833 {
1834   return false;
1835 }
1836 
1837 /* Locate the last bb in the same partition as START_BB.  */
1838 
1839 static basic_block
last_bb_in_partition(basic_block start_bb)1840 last_bb_in_partition (basic_block start_bb)
1841 {
1842   basic_block bb;
1843   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1844     {
1845       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1846         return bb;
1847     }
1848   /* Return bb before the exit block.  */
1849   return bb->prev_bb;
1850 }
1851 
1852 /* Split a (typically critical) edge.  Return the new block.
1853    The edge must not be abnormal.
1854 
1855    ??? The code generally expects to be called on critical edges.
1856    The case of a block ending in an unconditional jump to a
1857    block with multiple predecessors is not handled optimally.  */
1858 
1859 static basic_block
rtl_split_edge(edge edge_in)1860 rtl_split_edge (edge edge_in)
1861 {
1862   basic_block bb, new_bb;
1863   rtx_insn *before;
1864 
1865   /* Abnormal edges cannot be split.  */
1866   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1867 
1868   /* We are going to place the new block in front of edge destination.
1869      Avoid existence of fallthru predecessors.  */
1870   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1871     {
1872       edge e = find_fallthru_edge (edge_in->dest->preds);
1873 
1874       if (e)
1875 	force_nonfallthru (e);
1876     }
1877 
1878   /* Create the basic block note.  */
1879   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1880     before = BB_HEAD (edge_in->dest);
1881   else
1882     before = NULL;
1883 
1884   /* If this is a fall through edge to the exit block, the blocks might be
1885      not adjacent, and the right place is after the source.  */
1886   if ((edge_in->flags & EDGE_FALLTHRU)
1887       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1888     {
1889       before = NEXT_INSN (BB_END (edge_in->src));
1890       bb = create_basic_block (before, NULL, edge_in->src);
1891       BB_COPY_PARTITION (bb, edge_in->src);
1892     }
1893   else
1894     {
1895       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1896         {
1897           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1898           BB_COPY_PARTITION (bb, edge_in->dest);
1899         }
1900       else
1901         {
1902           basic_block after = edge_in->dest->prev_bb;
1903           /* If this is post-bb reordering, and the edge crosses a partition
1904              boundary, the new block needs to be inserted in the bb chain
1905              at the end of the src partition (since we put the new bb into
1906              that partition, see below). Otherwise we may end up creating
1907              an extra partition crossing in the chain, which is illegal.
1908              It can't go after the src, because src may have a fall-through
1909              to a different block.  */
1910           if (crtl->bb_reorder_complete
1911               && (edge_in->flags & EDGE_CROSSING))
1912             {
1913               after = last_bb_in_partition (edge_in->src);
1914               before = get_last_bb_insn (after);
1915               /* The instruction following the last bb in partition should
1916                  be a barrier, since it cannot end in a fall-through.  */
1917               gcc_checking_assert (BARRIER_P (before));
1918               before = NEXT_INSN (before);
1919             }
1920           bb = create_basic_block (before, NULL, after);
1921           /* Put the split bb into the src partition, to avoid creating
1922              a situation where a cold bb dominates a hot bb, in the case
1923              where src is cold and dest is hot. The src will dominate
1924              the new bb (whereas it might not have dominated dest).  */
1925           BB_COPY_PARTITION (bb, edge_in->src);
1926         }
1927     }
1928 
1929   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1930 
1931   /* Can't allow a region crossing edge to be fallthrough.  */
1932   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1933       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1934     {
1935       new_bb = force_nonfallthru (single_succ_edge (bb));
1936       gcc_assert (!new_bb);
1937     }
1938 
1939   /* For non-fallthru edges, we must adjust the predecessor's
1940      jump instruction to target our new block.  */
1941   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1942     {
1943       edge redirected = redirect_edge_and_branch (edge_in, bb);
1944       gcc_assert (redirected);
1945     }
1946   else
1947     {
1948       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1949 	{
1950 	  /* For asm goto even splitting of fallthru edge might
1951 	     need insn patching, as other labels might point to the
1952 	     old label.  */
1953 	  rtx_insn *last = BB_END (edge_in->src);
1954 	  if (last
1955 	      && JUMP_P (last)
1956 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1957 	      && (extract_asm_operands (PATTERN (last))
1958 		  || JUMP_LABEL (last) == before)
1959 	      && patch_jump_insn (last, before, bb))
1960 	    df_set_bb_dirty (edge_in->src);
1961 	}
1962       redirect_edge_succ (edge_in, bb);
1963     }
1964 
1965   return bb;
1966 }
1967 
1968 /* Queue instructions for insertion on an edge between two basic blocks.
1969    The new instructions and basic blocks (if any) will not appear in the
1970    CFG until commit_edge_insertions is called.  */
1971 
1972 void
insert_insn_on_edge(rtx pattern,edge e)1973 insert_insn_on_edge (rtx pattern, edge e)
1974 {
1975   /* We cannot insert instructions on an abnormal critical edge.
1976      It will be easier to find the culprit if we die now.  */
1977   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1978 
1979   if (e->insns.r == NULL_RTX)
1980     start_sequence ();
1981   else
1982     push_to_sequence (e->insns.r);
1983 
1984   emit_insn (pattern);
1985 
1986   e->insns.r = get_insns ();
1987   end_sequence ();
1988 }
1989 
1990 /* Update the CFG for the instructions queued on edge E.  */
1991 
1992 void
commit_one_edge_insertion(edge e)1993 commit_one_edge_insertion (edge e)
1994 {
1995   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1996   basic_block bb;
1997 
1998   /* Pull the insns off the edge now since the edge might go away.  */
1999   insns = e->insns.r;
2000   e->insns.r = NULL;
2001 
2002   /* Figure out where to put these insns.  If the destination has
2003      one predecessor, insert there.  Except for the exit block.  */
2004   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2005     {
2006       bb = e->dest;
2007 
2008       /* Get the location correct wrt a code label, and "nice" wrt
2009 	 a basic block note, and before everything else.  */
2010       tmp = BB_HEAD (bb);
2011       if (LABEL_P (tmp))
2012 	tmp = NEXT_INSN (tmp);
2013       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2014 	tmp = NEXT_INSN (tmp);
2015       if (tmp == BB_HEAD (bb))
2016 	before = tmp;
2017       else if (tmp)
2018 	after = PREV_INSN (tmp);
2019       else
2020 	after = get_last_insn ();
2021     }
2022 
2023   /* If the source has one successor and the edge is not abnormal,
2024      insert there.  Except for the entry block.
2025      Don't do this if the predecessor ends in a jump other than
2026      unconditional simple jump.  E.g. for asm goto that points all
2027      its labels at the fallthru basic block, we can't insert instructions
2028      before the asm goto, as the asm goto can have various of side effects,
2029      and can't emit instructions after the asm goto, as it must end
2030      the basic block.  */
2031   else if ((e->flags & EDGE_ABNORMAL) == 0
2032 	   && single_succ_p (e->src)
2033 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2034 	   && (!JUMP_P (BB_END (e->src))
2035 	       || simplejump_p (BB_END (e->src))))
2036     {
2037       bb = e->src;
2038 
2039       /* It is possible to have a non-simple jump here.  Consider a target
2040 	 where some forms of unconditional jumps clobber a register.  This
2041 	 happens on the fr30 for example.
2042 
2043 	 We know this block has a single successor, so we can just emit
2044 	 the queued insns before the jump.  */
2045       if (JUMP_P (BB_END (bb)))
2046 	before = BB_END (bb);
2047       else
2048 	{
2049 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2050 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2051 
2052 	  after = BB_END (bb);
2053 	}
2054     }
2055 
2056   /* Otherwise we must split the edge.  */
2057   else
2058     {
2059       bb = split_edge (e);
2060 
2061       /* If E crossed a partition boundary, we needed to make bb end in
2062          a region-crossing jump, even though it was originally fallthru.  */
2063       if (JUMP_P (BB_END (bb)))
2064 	before = BB_END (bb);
2065       else
2066         after = BB_END (bb);
2067     }
2068 
2069   /* Now that we've found the spot, do the insertion.  */
2070   if (before)
2071     {
2072       emit_insn_before_noloc (insns, before, bb);
2073       last = prev_nonnote_insn (before);
2074     }
2075   else
2076     last = emit_insn_after_noloc (insns, after, bb);
2077 
2078   if (returnjump_p (last))
2079     {
2080       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2081 	 This is not currently a problem because this only happens
2082 	 for the (single) epilogue, which already has a fallthru edge
2083 	 to EXIT.  */
2084 
2085       e = single_succ_edge (bb);
2086       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2087 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2088 
2089       e->flags &= ~EDGE_FALLTHRU;
2090       emit_barrier_after (last);
2091 
2092       if (before)
2093 	delete_insn (before);
2094     }
2095   else
2096     gcc_assert (!JUMP_P (last));
2097 }
2098 
2099 /* Update the CFG for all queued instructions.  */
2100 
2101 void
commit_edge_insertions(void)2102 commit_edge_insertions (void)
2103 {
2104   basic_block bb;
2105 
2106   /* Optimization passes that invoke this routine can cause hot blocks
2107      previously reached by both hot and cold blocks to become dominated only
2108      by cold blocks. This will cause the verification below to fail,
2109      and lead to now cold code in the hot section. In some cases this
2110      may only be visible after newly unreachable blocks are deleted,
2111      which will be done by fixup_partitions.  */
2112   fixup_partitions ();
2113 
2114   if (!currently_expanding_to_rtl)
2115     checking_verify_flow_info ();
2116 
2117   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2118 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2119     {
2120       edge e;
2121       edge_iterator ei;
2122 
2123       FOR_EACH_EDGE (e, ei, bb->succs)
2124 	if (e->insns.r)
2125 	  {
2126 	    if (currently_expanding_to_rtl)
2127 	      rebuild_jump_labels_chain (e->insns.r);
2128 	    commit_one_edge_insertion (e);
2129 	  }
2130     }
2131 }
2132 
2133 
2134 /* Print out RTL-specific basic block information (live information
2135    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2136    documented in dumpfile.h.  */
2137 
2138 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2139 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2140 {
2141   char *s_indent;
2142 
2143   s_indent = (char *) alloca ((size_t) indent + 1);
2144   memset (s_indent, ' ', (size_t) indent);
2145   s_indent[indent] = '\0';
2146 
2147   if (df && (flags & TDF_DETAILS))
2148     {
2149       df_dump_top (bb, outf);
2150       putc ('\n', outf);
2151     }
2152 
2153   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK
2154       && rtl_bb_info_initialized_p (bb))
2155     {
2156       rtx_insn *last = BB_END (bb);
2157       if (last)
2158 	last = NEXT_INSN (last);
2159       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2160 	{
2161 	  if (flags & TDF_DETAILS)
2162 	    df_dump_insn_top (insn, outf);
2163 	  if (! (flags & TDF_SLIM))
2164 	    print_rtl_single (outf, insn);
2165 	  else
2166 	    dump_insn_slim (outf, insn);
2167 	  if (flags & TDF_DETAILS)
2168 	    df_dump_insn_bottom (insn, outf);
2169 	}
2170     }
2171 
2172   if (df && (flags & TDF_DETAILS))
2173     {
2174       df_dump_bottom (bb, outf);
2175       putc ('\n', outf);
2176     }
2177 
2178 }
2179 
2180 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2181    for the start of each basic block.  FLAGS are the TDF_* masks documented
2182    in dumpfile.h.  */
2183 
2184 void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2185 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2186 {
2187   const rtx_insn *tmp_rtx;
2188   if (rtx_first == 0)
2189     fprintf (outf, "(nil)\n");
2190   else
2191     {
2192       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2193       int max_uid = get_max_uid ();
2194       basic_block *start = XCNEWVEC (basic_block, max_uid);
2195       basic_block *end = XCNEWVEC (basic_block, max_uid);
2196       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2197       basic_block bb;
2198 
2199       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2200 	 insns, but the CFG is not maintained so the basic block info
2201 	 is not reliable.  Therefore it's omitted from the dumps.  */
2202       if (! (cfun->curr_properties & PROP_cfg))
2203         flags &= ~TDF_BLOCKS;
2204 
2205       if (df)
2206 	df_dump_start (outf);
2207 
2208       if (cfun->curr_properties & PROP_cfg)
2209 	{
2210 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 	    {
2212 	      rtx_insn *x;
2213 
2214 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2215 	      end[INSN_UID (BB_END (bb))] = bb;
2216 	      if (flags & TDF_BLOCKS)
2217 		{
2218 		  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2219 		    {
2220 		      enum bb_state state = IN_MULTIPLE_BB;
2221 
2222 		      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2223 			state = IN_ONE_BB;
2224 		      in_bb_p[INSN_UID (x)] = state;
2225 
2226 		      if (x == BB_END (bb))
2227 			break;
2228 		    }
2229 		}
2230 	    }
2231 	}
2232 
2233       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2234 	{
2235 	  if (flags & TDF_BLOCKS)
2236 	    {
2237 	      bb = start[INSN_UID (tmp_rtx)];
2238 	      if (bb != NULL)
2239 		{
2240 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
2241 		  if (df && (flags & TDF_DETAILS))
2242 		    df_dump_top (bb, outf);
2243 		}
2244 
2245 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2246 		  && !NOTE_P (tmp_rtx)
2247 		  && !BARRIER_P (tmp_rtx))
2248 		fprintf (outf, ";; Insn is not within a basic block\n");
2249 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2250 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2251 	    }
2252 
2253 	  if (flags & TDF_DETAILS)
2254 	    df_dump_insn_top (tmp_rtx, outf);
2255 	  if (! (flags & TDF_SLIM))
2256 	    print_rtl_single (outf, tmp_rtx);
2257 	  else
2258 	    dump_insn_slim (outf, tmp_rtx);
2259 	  if (flags & TDF_DETAILS)
2260 	    df_dump_insn_bottom (tmp_rtx, outf);
2261 
2262 	  bb = end[INSN_UID (tmp_rtx)];
2263 	  if (bb != NULL)
2264 	    {
2265 	      if (flags & TDF_BLOCKS)
2266 		{
2267 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
2268 		  if (df && (flags & TDF_DETAILS))
2269 		    df_dump_bottom (bb, outf);
2270 		  putc ('\n', outf);
2271 		}
2272 	      /* Emit a hint if the fallthrough target of current basic block
2273 	         isn't the one placed right next.  */
2274 	      else if (EDGE_COUNT (bb->succs) > 0)
2275 		{
2276 		  gcc_assert (BB_END (bb) == tmp_rtx);
2277 		  const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2278 		  /* Bypass intervening deleted-insn notes and debug insns.  */
2279 		  while (ninsn
2280 			 && !NONDEBUG_INSN_P (ninsn)
2281 			 && !start[INSN_UID (ninsn)])
2282 		    ninsn = NEXT_INSN (ninsn);
2283 		  edge e = find_fallthru_edge (bb->succs);
2284 		  if (e && ninsn)
2285 		    {
2286 		      basic_block dest = e->dest;
2287 		      if (start[INSN_UID (ninsn)] != dest)
2288 			fprintf (outf, "%s      ; pc falls through to BB %d\n",
2289 				 print_rtx_head, dest->index);
2290 		    }
2291 		}
2292 	    }
2293 	}
2294 
2295       free (start);
2296       free (end);
2297       free (in_bb_p);
2298     }
2299 }
2300 
2301 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2302 
2303 void
update_br_prob_note(basic_block bb)2304 update_br_prob_note (basic_block bb)
2305 {
2306   rtx note;
2307   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2308   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309     {
2310       if (note)
2311 	{
2312 	  rtx *note_link, this_rtx;
2313 
2314 	  note_link = &REG_NOTES (BB_END (bb));
2315 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2316 	    if (this_rtx == note)
2317 	      {
2318 		*note_link = XEXP (this_rtx, 1);
2319 		break;
2320 	      }
2321 	}
2322       return;
2323     }
2324   if (!note
2325       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2326     return;
2327   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2328 }
2329 
2330 /* Get the last insn associated with block BB (that includes barriers and
2331    tablejumps after BB).  */
2332 rtx_insn *
get_last_bb_insn(basic_block bb)2333 get_last_bb_insn (basic_block bb)
2334 {
2335   rtx_jump_table_data *table;
2336   rtx_insn *tmp;
2337   rtx_insn *end = BB_END (bb);
2338 
2339   /* Include any jump table following the basic block.  */
2340   if (tablejump_p (end, NULL, &table))
2341     end = table;
2342 
2343   /* Include any barriers that may follow the basic block.  */
2344   tmp = next_nonnote_nondebug_insn_bb (end);
2345   while (tmp && BARRIER_P (tmp))
2346     {
2347       end = tmp;
2348       tmp = next_nonnote_nondebug_insn_bb (end);
2349     }
2350 
2351   return end;
2352 }
2353 
2354 /* Add all BBs reachable from entry via hot paths into the SET.  */
2355 
2356 void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2357 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358 {
2359   auto_vec<basic_block, 64> worklist;
2360 
2361   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2362   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363 
2364   while (worklist.length () > 0)
2365     {
2366       basic_block bb = worklist.pop ();
2367       edge_iterator ei;
2368       edge e;
2369 
2370       FOR_EACH_EDGE (e, ei, bb->succs)
2371 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2372 	    && !set->add (e->dest))
2373 	  worklist.safe_push (e->dest);
2374     }
2375 }
2376 
2377 /* Sanity check partition hotness to ensure that basic blocks in
2378    the cold partition don't dominate basic blocks in the hot partition.
2379    If FLAG_ONLY is true, report violations as errors. Otherwise
2380    re-mark the dominated blocks as cold, since this is run after
2381    cfg optimizations that may make hot blocks previously reached
2382    by both hot and cold blocks now only reachable along cold paths.  */
2383 
2384 static auto_vec<basic_block>
find_partition_fixes(bool flag_only)2385 find_partition_fixes (bool flag_only)
2386 {
2387   basic_block bb;
2388   auto_vec<basic_block> bbs_to_fix;
2389   hash_set<basic_block> set;
2390 
2391   /* Callers check this.  */
2392   gcc_checking_assert (crtl->has_bb_partition);
2393 
2394   find_bbs_reachable_by_hot_paths (&set);
2395 
2396   FOR_EACH_BB_FN (bb, cfun)
2397     if (!set.contains (bb)
2398 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
2399       {
2400 	if (flag_only)
2401 	  error ("non-cold basic block %d reachable only "
2402 		 "by paths crossing the cold partition", bb->index);
2403 	else
2404 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2405 	bbs_to_fix.safe_push (bb);
2406       }
2407 
2408   return bbs_to_fix;
2409 }
2410 
2411 /* Perform cleanup on the hot/cold bb partitioning after optimization
2412    passes that modify the cfg.  */
2413 
2414 void
fixup_partitions(void)2415 fixup_partitions (void)
2416 {
2417   if (!crtl->has_bb_partition)
2418     return;
2419 
2420   /* Delete any blocks that became unreachable and weren't
2421      already cleaned up, for example during edge forwarding
2422      and convert_jumps_to_returns. This will expose more
2423      opportunities for fixing the partition boundaries here.
2424      Also, the calculation of the dominance graph during verification
2425      will assert if there are unreachable nodes.  */
2426   delete_unreachable_blocks ();
2427 
2428   /* If there are partitions, do a sanity check on them: A basic block in
2429      a cold partition cannot dominate a basic block in a hot partition.
2430      Fixup any that now violate this requirement, as a result of edge
2431      forwarding and unreachable block deletion.  */
2432   auto_vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2433 
2434   /* Do the partition fixup after all necessary blocks have been converted to
2435      cold, so that we only update the region crossings the minimum number of
2436      places, which can require forcing edges to be non fallthru.  */
2437   if (! bbs_to_fix.is_empty ())
2438     {
2439       do
2440 	{
2441 	  basic_block bb = bbs_to_fix.pop ();
2442 	  fixup_new_cold_bb (bb);
2443 	}
2444       while (! bbs_to_fix.is_empty ());
2445 
2446       /* Fix up hot cold block grouping if needed.  */
2447       if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2448 	{
2449 	  basic_block bb, first = NULL, second = NULL;
2450 	  int current_partition = BB_UNPARTITIONED;
2451 
2452 	  FOR_EACH_BB_FN (bb, cfun)
2453 	    {
2454 	      if (current_partition != BB_UNPARTITIONED
2455 		  && BB_PARTITION (bb) != current_partition)
2456 		{
2457 		  if (first == NULL)
2458 		    first = bb;
2459 		  else if (second == NULL)
2460 		    second = bb;
2461 		  else
2462 		    {
2463 		      /* If we switch partitions for the 3rd, 5th etc. time,
2464 			 move bbs first (inclusive) .. second (exclusive) right
2465 			 before bb.  */
2466 		      basic_block prev_first = first->prev_bb;
2467 		      basic_block prev_second = second->prev_bb;
2468 		      basic_block prev_bb = bb->prev_bb;
2469 		      prev_first->next_bb = second;
2470 		      second->prev_bb = prev_first;
2471 		      prev_second->next_bb = bb;
2472 		      bb->prev_bb = prev_second;
2473 		      prev_bb->next_bb = first;
2474 		      first->prev_bb = prev_bb;
2475 		      rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2476 		      rtx_insn *prev_second_insn
2477 			= PREV_INSN (BB_HEAD (second));
2478 		      rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2479 		      SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2480 		      SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2481 		      SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2482 		      SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2483 		      SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2484 		      SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2485 		      second = NULL;
2486 		    }
2487 		}
2488 	      current_partition = BB_PARTITION (bb);
2489 	    }
2490 	  gcc_assert (!second);
2491 	}
2492     }
2493 }
2494 
2495 /* Verify, in the basic block chain, that there is at most one switch
2496    between hot/cold partitions. This condition will not be true until
2497    after reorder_basic_blocks is called.  */
2498 
2499 static int
verify_hot_cold_block_grouping(void)2500 verify_hot_cold_block_grouping (void)
2501 {
2502   basic_block bb;
2503   int err = 0;
2504   bool switched_sections = false;
2505   int current_partition = BB_UNPARTITIONED;
2506 
2507   /* Even after bb reordering is complete, we go into cfglayout mode
2508      again (in compgoto). Ensure we don't call this before going back
2509      into linearized RTL when any layout fixes would have been committed.  */
2510   if (!crtl->bb_reorder_complete
2511       || current_ir_type () != IR_RTL_CFGRTL)
2512     return err;
2513 
2514   FOR_EACH_BB_FN (bb, cfun)
2515     {
2516       if (current_partition != BB_UNPARTITIONED
2517           && BB_PARTITION (bb) != current_partition)
2518 	{
2519 	  if (switched_sections)
2520 	    {
2521 	      error ("multiple hot/cold transitions found (bb %i)",
2522 		     bb->index);
2523 	      err = 1;
2524 	    }
2525 	  else
2526             switched_sections = true;
2527 
2528           if (!crtl->has_bb_partition)
2529             error ("partition found but function partition flag not set");
2530 	}
2531       current_partition = BB_PARTITION (bb);
2532     }
2533 
2534   return err;
2535 }
2536 
2537 
2538 /* Perform several checks on the edges out of each block, such as
2539    the consistency of the branch probabilities, the correctness
2540    of hot/cold partition crossing edges, and the number of expected
2541    successor edges.  Also verify that the dominance relationship
2542    between hot/cold blocks is sane.  */
2543 
2544 static int
rtl_verify_edges(void)2545 rtl_verify_edges (void)
2546 {
2547   int err = 0;
2548   basic_block bb;
2549 
2550   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2551     {
2552       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2553       int n_eh = 0, n_abnormal = 0;
2554       edge e, fallthru = NULL;
2555       edge_iterator ei;
2556       rtx note;
2557       bool has_crossing_edge = false;
2558 
2559       if (JUMP_P (BB_END (bb))
2560 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2561 	  && EDGE_COUNT (bb->succs) >= 2
2562 	  && any_condjump_p (BB_END (bb)))
2563 	{
2564 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2565 	    {
2566 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2567 		{
2568 		  error ("verify_flow_info: "
2569 			 "REG_BR_PROB is set but cfg probability is not");
2570 		  err = 1;
2571 		}
2572 	    }
2573 	  else if (XINT (note, 0)
2574 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2575 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2576 	    {
2577 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2578 		     XINT (note, 0),
2579 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2580 	      err = 1;
2581 	    }
2582 	}
2583 
2584       FOR_EACH_EDGE (e, ei, bb->succs)
2585 	{
2586 	  bool is_crossing;
2587 
2588 	  if (e->flags & EDGE_FALLTHRU)
2589 	    n_fallthru++, fallthru = e;
2590 
2591 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2592 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2593 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2594           has_crossing_edge |= is_crossing;
2595 	  if (e->flags & EDGE_CROSSING)
2596 	    {
2597 	      if (!is_crossing)
2598 		{
2599 		  error ("EDGE_CROSSING incorrectly set across same section");
2600 		  err = 1;
2601 		}
2602 	      if (e->flags & EDGE_FALLTHRU)
2603 		{
2604 		  error ("fallthru edge crosses section boundary in bb %i",
2605 			 e->src->index);
2606 		  err = 1;
2607 		}
2608 	      if (e->flags & EDGE_EH)
2609 		{
2610 		  error ("EH edge crosses section boundary in bb %i",
2611 			 e->src->index);
2612 		  err = 1;
2613 		}
2614               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2615 		{
2616 		  error ("No region crossing jump at section boundary in bb %i",
2617 			 bb->index);
2618 		  err = 1;
2619 		}
2620 	    }
2621 	  else if (is_crossing)
2622 	    {
2623 	      error ("EDGE_CROSSING missing across section boundary");
2624 	      err = 1;
2625 	    }
2626 
2627 	  if ((e->flags & ~(EDGE_DFS_BACK
2628 			    | EDGE_CAN_FALLTHRU
2629 			    | EDGE_IRREDUCIBLE_LOOP
2630 			    | EDGE_LOOP_EXIT
2631 			    | EDGE_CROSSING
2632 			    | EDGE_PRESERVE)) == 0)
2633 	    n_branch++;
2634 
2635 	  if (e->flags & EDGE_ABNORMAL_CALL)
2636 	    n_abnormal_call++;
2637 
2638 	  if (e->flags & EDGE_SIBCALL)
2639 	    n_sibcall++;
2640 
2641 	  if (e->flags & EDGE_EH)
2642 	    n_eh++;
2643 
2644 	  if (e->flags & EDGE_ABNORMAL)
2645 	    n_abnormal++;
2646 	}
2647 
2648       if (!has_crossing_edge
2649 	  && JUMP_P (BB_END (bb))
2650 	  && CROSSING_JUMP_P (BB_END (bb)))
2651 	{
2652 	  print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2653 	  error ("Region crossing jump across same section in bb %i",
2654 		 bb->index);
2655 	  err = 1;
2656 	}
2657 
2658       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2659 	{
2660 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2661 	  err = 1;
2662 	}
2663       if (n_eh > 1)
2664 	{
2665 	  error ("too many exception handling edges in bb %i", bb->index);
2666 	  err = 1;
2667 	}
2668       if (n_branch
2669 	  && (!JUMP_P (BB_END (bb))
2670 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2671 				   || any_condjump_p (BB_END (bb))))))
2672 	{
2673 	  error ("too many outgoing branch edges from bb %i", bb->index);
2674 	  err = 1;
2675 	}
2676       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2677 	{
2678 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2679 	  err = 1;
2680 	}
2681       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2682 	{
2683 	  error ("wrong number of branch edges after unconditional jump"
2684 		 " in bb %i", bb->index);
2685 	  err = 1;
2686 	}
2687       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2688 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2689 	{
2690 	  error ("wrong amount of branch edges after conditional jump"
2691 		 " in bb %i", bb->index);
2692 	  err = 1;
2693 	}
2694       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2695 	{
2696 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2697 	  err = 1;
2698 	}
2699       if (n_sibcall && !CALL_P (BB_END (bb)))
2700 	{
2701 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2702 	  err = 1;
2703 	}
2704       if (n_abnormal > n_eh
2705 	  && !(CALL_P (BB_END (bb))
2706 	       && n_abnormal == n_abnormal_call + n_sibcall)
2707 	  && (!JUMP_P (BB_END (bb))
2708 	      || any_condjump_p (BB_END (bb))
2709 	      || any_uncondjump_p (BB_END (bb))))
2710 	{
2711 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2712 	  err = 1;
2713 	}
2714 
2715       int has_eh = -1;
2716       FOR_EACH_EDGE (e, ei, bb->preds)
2717 	{
2718 	  if (has_eh == -1)
2719 	    has_eh = (e->flags & EDGE_EH);
2720 	  if ((e->flags & EDGE_EH) == has_eh)
2721 	    continue;
2722 	  error ("EH incoming edge mixed with non-EH incoming edges "
2723 		 "in bb %i", bb->index);
2724 	  err = 1;
2725 	  break;
2726 	}
2727     }
2728 
2729   /* If there are partitions, do a sanity check on them: A basic block in
2730      a cold partition cannot dominate a basic block in a hot partition.  */
2731   if (crtl->has_bb_partition && !err
2732       && current_ir_type () == IR_RTL_CFGLAYOUT)
2733     {
2734       auto_vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2735       err = !bbs_to_fix.is_empty ();
2736     }
2737 
2738   /* Clean up.  */
2739   return err;
2740 }
2741 
2742 /* Checks on the instructions within blocks. Currently checks that each
2743    block starts with a basic block note, and that basic block notes and
2744    control flow jumps are not found in the middle of the block.  */
2745 
2746 static int
rtl_verify_bb_insns(void)2747 rtl_verify_bb_insns (void)
2748 {
2749   rtx_insn *x;
2750   int err = 0;
2751   basic_block bb;
2752 
2753   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2754     {
2755       /* Now check the header of basic
2756 	 block.  It ought to contain optional CODE_LABEL followed
2757 	 by NOTE_BASIC_BLOCK.  */
2758       x = BB_HEAD (bb);
2759       if (LABEL_P (x))
2760 	{
2761 	  if (BB_END (bb) == x)
2762 	    {
2763 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2764 		     bb->index);
2765 	      err = 1;
2766 	    }
2767 
2768 	  x = NEXT_INSN (x);
2769 	}
2770 
2771       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2772 	{
2773 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2774 		 bb->index);
2775 	  err = 1;
2776 	}
2777 
2778       if (BB_END (bb) == x)
2779 	/* Do checks for empty blocks here.  */
2780 	;
2781       else
2782 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2783 	  {
2784 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2785 	      {
2786 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2787 		       INSN_UID (x), bb->index);
2788 		err = 1;
2789 	      }
2790 
2791 	    if (x == BB_END (bb))
2792 	      break;
2793 
2794 	    if (control_flow_insn_p (x))
2795 	      {
2796 		error ("in basic block %d:", bb->index);
2797 		fatal_insn ("flow control insn inside a basic block", x);
2798 	      }
2799 	  }
2800     }
2801 
2802   /* Clean up.  */
2803   return err;
2804 }
2805 
2806 /* Verify that block pointers for instructions in basic blocks, headers and
2807    footers are set appropriately.  */
2808 
2809 static int
rtl_verify_bb_pointers(void)2810 rtl_verify_bb_pointers (void)
2811 {
2812   int err = 0;
2813   basic_block bb;
2814 
2815   /* Check the general integrity of the basic blocks.  */
2816   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2817     {
2818       rtx_insn *insn;
2819 
2820       if (!(bb->flags & BB_RTL))
2821 	{
2822 	  error ("BB_RTL flag not set for block %d", bb->index);
2823 	  err = 1;
2824 	}
2825 
2826       FOR_BB_INSNS (bb, insn)
2827 	if (BLOCK_FOR_INSN (insn) != bb)
2828 	  {
2829 	    error ("insn %d basic block pointer is %d, should be %d",
2830 		   INSN_UID (insn),
2831 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2832 		   bb->index);
2833 	    err = 1;
2834 	  }
2835 
2836       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2837 	if (!BARRIER_P (insn)
2838 	    && BLOCK_FOR_INSN (insn) != NULL)
2839 	  {
2840 	    error ("insn %d in header of bb %d has non-NULL basic block",
2841 		   INSN_UID (insn), bb->index);
2842 	    err = 1;
2843 	  }
2844       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2845 	if (!BARRIER_P (insn)
2846 	    && BLOCK_FOR_INSN (insn) != NULL)
2847 	  {
2848 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2849 		   INSN_UID (insn), bb->index);
2850 	    err = 1;
2851 	  }
2852     }
2853 
2854   /* Clean up.  */
2855   return err;
2856 }
2857 
2858 /* Verify the CFG and RTL consistency common for both underlying RTL and
2859    cfglayout RTL.
2860 
2861    Currently it does following checks:
2862 
2863    - overlapping of basic blocks
2864    - insns with wrong BLOCK_FOR_INSN pointers
2865    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2866    - tails of basic blocks (ensure that boundary is necessary)
2867    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2868      and NOTE_INSN_BASIC_BLOCK
2869    - verify that no fall_thru edge crosses hot/cold partition boundaries
2870    - verify that there are no pending RTL branch predictions
2871    - verify that hot blocks are not dominated by cold blocks
2872 
2873    In future it can be extended check a lot of other stuff as well
2874    (reachability of basic blocks, life information, etc. etc.).  */
2875 
2876 static int
rtl_verify_flow_info_1(void)2877 rtl_verify_flow_info_1 (void)
2878 {
2879   int err = 0;
2880 
2881   err |= rtl_verify_bb_pointers ();
2882 
2883   err |= rtl_verify_bb_insns ();
2884 
2885   err |= rtl_verify_edges ();
2886 
2887   return err;
2888 }
2889 
2890 /* Walk the instruction chain and verify that bb head/end pointers
2891   are correct, and that instructions are in exactly one bb and have
2892   correct block pointers.  */
2893 
2894 static int
rtl_verify_bb_insn_chain(void)2895 rtl_verify_bb_insn_chain (void)
2896 {
2897   basic_block bb;
2898   int err = 0;
2899   rtx_insn *x;
2900   rtx_insn *last_head = get_last_insn ();
2901   basic_block *bb_info;
2902   const int max_uid = get_max_uid ();
2903 
2904   bb_info = XCNEWVEC (basic_block, max_uid);
2905 
2906   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2907     {
2908       rtx_insn *head = BB_HEAD (bb);
2909       rtx_insn *end = BB_END (bb);
2910 
2911       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2912 	{
2913 	  /* Verify the end of the basic block is in the INSN chain.  */
2914 	  if (x == end)
2915 	    break;
2916 
2917             /* And that the code outside of basic blocks has NULL bb field.  */
2918           if (!BARRIER_P (x)
2919               && BLOCK_FOR_INSN (x) != NULL)
2920             {
2921               error ("insn %d outside of basic blocks has non-NULL bb field",
2922                      INSN_UID (x));
2923               err = 1;
2924             }
2925 	}
2926 
2927       if (!x)
2928 	{
2929 	  error ("end insn %d for block %d not found in the insn stream",
2930 		 INSN_UID (end), bb->index);
2931 	  err = 1;
2932 	}
2933 
2934       /* Work backwards from the end to the head of the basic block
2935 	 to verify the head is in the RTL chain.  */
2936       for (; x != NULL_RTX; x = PREV_INSN (x))
2937 	{
2938 	  /* While walking over the insn chain, verify insns appear
2939 	     in only one basic block.  */
2940 	  if (bb_info[INSN_UID (x)] != NULL)
2941 	    {
2942 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2943 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2944 	      err = 1;
2945 	    }
2946 
2947 	  bb_info[INSN_UID (x)] = bb;
2948 
2949 	  if (x == head)
2950 	    break;
2951 	}
2952       if (!x)
2953 	{
2954 	  error ("head insn %d for block %d not found in the insn stream",
2955 		 INSN_UID (head), bb->index);
2956 	  err = 1;
2957 	}
2958 
2959       last_head = PREV_INSN (x);
2960     }
2961 
2962   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2963     {
2964       /* Check that the code before the first basic block has NULL
2965 	 bb field.  */
2966       if (!BARRIER_P (x)
2967 	  && BLOCK_FOR_INSN (x) != NULL)
2968 	{
2969 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2970 		 INSN_UID (x));
2971 	  err = 1;
2972 	}
2973     }
2974   free (bb_info);
2975 
2976   return err;
2977 }
2978 
2979 /* Verify that fallthru edges point to adjacent blocks in layout order and
2980    that barriers exist after non-fallthru blocks.  */
2981 
2982 static int
rtl_verify_fallthru(void)2983 rtl_verify_fallthru (void)
2984 {
2985   basic_block bb;
2986   int err = 0;
2987 
2988   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2989     {
2990       edge e;
2991 
2992       e = find_fallthru_edge (bb->succs);
2993       if (!e)
2994 	{
2995 	  rtx_insn *insn;
2996 
2997 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2998 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2999 	    {
3000 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3001 		{
3002 		  error ("missing barrier after block %i", bb->index);
3003 		  err = 1;
3004 		  break;
3005 		}
3006 	      if (BARRIER_P (insn))
3007 		break;
3008 	    }
3009 	}
3010       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3011 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3012 	{
3013 	  rtx_insn *insn;
3014 
3015 	  if (e->src->next_bb != e->dest)
3016 	    {
3017 	      error
3018 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3019 		 e->src->index, e->dest->index);
3020 	      err = 1;
3021 	    }
3022 	  else
3023 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3024 		 insn = NEXT_INSN (insn))
3025 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3026 		{
3027 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
3028 			 e->src->index, e->dest->index);
3029 		  fatal_insn ("wrong insn in the fallthru edge", insn);
3030 		  err = 1;
3031 		}
3032 	}
3033     }
3034 
3035    return err;
3036 }
3037 
3038 /* Verify that blocks are laid out in consecutive order. While walking the
3039    instructions, verify that all expected instructions are inside the basic
3040    blocks, and that all returns are followed by barriers.  */
3041 
3042 static int
rtl_verify_bb_layout(void)3043 rtl_verify_bb_layout (void)
3044 {
3045   basic_block bb;
3046   int err = 0;
3047   rtx_insn *x, *y;
3048   int num_bb_notes;
3049   rtx_insn * const rtx_first = get_insns ();
3050   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3051 
3052   num_bb_notes = 0;
3053 
3054   for (x = rtx_first; x; x = NEXT_INSN (x))
3055     {
3056       if (NOTE_INSN_BASIC_BLOCK_P (x))
3057 	{
3058 	  bb = NOTE_BASIC_BLOCK (x);
3059 
3060 	  num_bb_notes++;
3061 	  if (bb != last_bb_seen->next_bb)
3062 	    internal_error ("basic blocks not laid down consecutively");
3063 
3064 	  curr_bb = last_bb_seen = bb;
3065 	}
3066 
3067       if (!curr_bb)
3068 	{
3069 	  switch (GET_CODE (x))
3070 	    {
3071 	    case BARRIER:
3072 	    case NOTE:
3073 	      break;
3074 
3075 	    case CODE_LABEL:
3076 	      /* An ADDR_VEC is placed outside any basic block.  */
3077 	      if (NEXT_INSN (x)
3078 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3079 		x = NEXT_INSN (x);
3080 
3081 	      /* But in any case, non-deletable labels can appear anywhere.  */
3082 	      break;
3083 
3084 	    default:
3085 	      fatal_insn ("insn outside basic block", x);
3086 	    }
3087 	}
3088 
3089       if (JUMP_P (x)
3090 	  && returnjump_p (x) && ! condjump_p (x)
3091 	  && ! ((y = next_nonnote_nondebug_insn (x))
3092 		&& BARRIER_P (y)))
3093 	    fatal_insn ("return not followed by barrier", x);
3094 
3095       if (curr_bb && x == BB_END (curr_bb))
3096 	curr_bb = NULL;
3097     }
3098 
3099   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3100     internal_error
3101       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3102        num_bb_notes, n_basic_blocks_for_fn (cfun));
3103 
3104    return err;
3105 }
3106 
3107 /* Verify the CFG and RTL consistency common for both underlying RTL and
3108    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3109 
3110    Currently it does following checks:
3111    - all checks of rtl_verify_flow_info_1
3112    - test head/end pointers
3113    - check that blocks are laid out in consecutive order
3114    - check that all insns are in the basic blocks
3115      (except the switch handling code, barriers and notes)
3116    - check that all returns are followed by barriers
3117    - check that all fallthru edge points to the adjacent blocks
3118    - verify that there is a single hot/cold partition boundary after bbro  */
3119 
3120 static int
rtl_verify_flow_info(void)3121 rtl_verify_flow_info (void)
3122 {
3123   int err = 0;
3124 
3125   err |= rtl_verify_flow_info_1 ();
3126 
3127   err |= rtl_verify_bb_insn_chain ();
3128 
3129   err |= rtl_verify_fallthru ();
3130 
3131   err |= rtl_verify_bb_layout ();
3132 
3133   err |= verify_hot_cold_block_grouping ();
3134 
3135   return err;
3136 }
3137 
3138 /* Assume that the preceding pass has possibly eliminated jump instructions
3139    or converted the unconditional jumps.  Eliminate the edges from CFG.
3140    Return true if any edges are eliminated.  */
3141 
3142 bool
purge_dead_edges(basic_block bb)3143 purge_dead_edges (basic_block bb)
3144 {
3145   edge e;
3146   rtx_insn *insn = BB_END (bb);
3147   rtx note;
3148   bool purged = false;
3149   bool found;
3150   edge_iterator ei;
3151 
3152   if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3153     do
3154       insn = PREV_INSN (insn);
3155     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3156 
3157   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3158   if (NONJUMP_INSN_P (insn)
3159       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3160     {
3161       rtx eqnote;
3162 
3163       if (! may_trap_p (PATTERN (insn))
3164 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3165 	      && ! may_trap_p (XEXP (eqnote, 0))))
3166 	remove_note (insn, note);
3167     }
3168 
3169   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3170   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3171     {
3172       bool remove = false;
3173 
3174       /* There are three types of edges we need to handle correctly here: EH
3175 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3176 	 latter can appear when nonlocal gotos are used.  */
3177       if (e->flags & EDGE_ABNORMAL_CALL)
3178 	{
3179 	  if (!CALL_P (insn))
3180 	    remove = true;
3181 	  else if (can_nonlocal_goto (insn))
3182 	    ;
3183 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3184 	    ;
3185 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3186 	    ;
3187 	  else
3188 	    remove = true;
3189 	}
3190       else if (e->flags & EDGE_EH)
3191 	remove = !can_throw_internal (insn);
3192 
3193       if (remove)
3194 	{
3195 	  remove_edge (e);
3196 	  df_set_bb_dirty (bb);
3197 	  purged = true;
3198 	}
3199       else
3200 	ei_next (&ei);
3201     }
3202 
3203   if (JUMP_P (insn))
3204     {
3205       rtx note;
3206       edge b,f;
3207       edge_iterator ei;
3208 
3209       /* We do care only about conditional jumps and simplejumps.  */
3210       if (!any_condjump_p (insn)
3211 	  && !returnjump_p (insn)
3212 	  && !simplejump_p (insn))
3213 	return purged;
3214 
3215       /* Branch probability/prediction notes are defined only for
3216 	 condjumps.  We've possibly turned condjump into simplejump.  */
3217       if (simplejump_p (insn))
3218 	{
3219 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3220 	  if (note)
3221 	    remove_note (insn, note);
3222 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3223 	    remove_note (insn, note);
3224 	}
3225 
3226       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3227 	{
3228 	  /* Avoid abnormal flags to leak from computed jumps turned
3229 	     into simplejumps.  */
3230 
3231 	  e->flags &= ~EDGE_ABNORMAL;
3232 
3233 	  /* See if this edge is one we should keep.  */
3234 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3235 	    /* A conditional jump can fall through into the next
3236 	       block, so we should keep the edge.  */
3237 	    {
3238 	      ei_next (&ei);
3239 	      continue;
3240 	    }
3241 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3242 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3243 	    /* If the destination block is the target of the jump,
3244 	       keep the edge.  */
3245 	    {
3246 	      ei_next (&ei);
3247 	      continue;
3248 	    }
3249 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3250 		   && returnjump_p (insn))
3251 	    /* If the destination block is the exit block, and this
3252 	       instruction is a return, then keep the edge.  */
3253 	    {
3254 	      ei_next (&ei);
3255 	      continue;
3256 	    }
3257 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3258 	    /* Keep the edges that correspond to exceptions thrown by
3259 	       this instruction and rematerialize the EDGE_ABNORMAL
3260 	       flag we just cleared above.  */
3261 	    {
3262 	      e->flags |= EDGE_ABNORMAL;
3263 	      ei_next (&ei);
3264 	      continue;
3265 	    }
3266 
3267 	  /* We do not need this edge.  */
3268 	  df_set_bb_dirty (bb);
3269 	  purged = true;
3270 	  remove_edge (e);
3271 	}
3272 
3273       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3274 	return purged;
3275 
3276       if (dump_file)
3277 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3278 
3279       if (!optimize)
3280 	return purged;
3281 
3282       /* Redistribute probabilities.  */
3283       if (single_succ_p (bb))
3284 	{
3285 	  single_succ_edge (bb)->probability = profile_probability::always ();
3286 	}
3287       else
3288 	{
3289 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3290 	  if (!note)
3291 	    return purged;
3292 
3293 	  b = BRANCH_EDGE (bb);
3294 	  f = FALLTHRU_EDGE (bb);
3295 	  b->probability = profile_probability::from_reg_br_prob_note
3296 					 (XINT (note, 0));
3297 	  f->probability = b->probability.invert ();
3298 	}
3299 
3300       return purged;
3301     }
3302   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3303     {
3304       /* First, there should not be any EH or ABCALL edges resulting
3305 	 from non-local gotos and the like.  If there were, we shouldn't
3306 	 have created the sibcall in the first place.  Second, there
3307 	 should of course never have been a fallthru edge.  */
3308       gcc_assert (single_succ_p (bb));
3309       gcc_assert (single_succ_edge (bb)->flags
3310 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3311 
3312       return 0;
3313     }
3314 
3315   /* If we don't see a jump insn, we don't know exactly why the block would
3316      have been broken at this point.  Look for a simple, non-fallthru edge,
3317      as these are only created by conditional branches.  If we find such an
3318      edge we know that there used to be a jump here and can then safely
3319      remove all non-fallthru edges.  */
3320   found = false;
3321   FOR_EACH_EDGE (e, ei, bb->succs)
3322     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3323       {
3324 	found = true;
3325 	break;
3326       }
3327 
3328   if (!found)
3329     return purged;
3330 
3331   /* Remove all but the fake and fallthru edges.  The fake edge may be
3332      the only successor for this block in the case of noreturn
3333      calls.  */
3334   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3335     {
3336       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3337 	{
3338 	  df_set_bb_dirty (bb);
3339 	  remove_edge (e);
3340 	  purged = true;
3341 	}
3342       else
3343 	ei_next (&ei);
3344     }
3345 
3346   gcc_assert (single_succ_p (bb));
3347 
3348   single_succ_edge (bb)->probability = profile_probability::always ();
3349 
3350   if (dump_file)
3351     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3352 	     bb->index);
3353   return purged;
3354 }
3355 
3356 /* Search all basic blocks for potentially dead edges and purge them.  Return
3357    true if some edge has been eliminated.  */
3358 
3359 bool
purge_all_dead_edges(void)3360 purge_all_dead_edges (void)
3361 {
3362   int purged = false;
3363   basic_block bb;
3364 
3365   FOR_EACH_BB_FN (bb, cfun)
3366     {
3367       bool purged_here = purge_dead_edges (bb);
3368 
3369       purged |= purged_here;
3370     }
3371 
3372   return purged;
3373 }
3374 
3375 /* This is used by a few passes that emit some instructions after abnormal
3376    calls, moving the basic block's end, while they in fact do want to emit
3377    them on the fallthru edge.  Look for abnormal call edges, find backward
3378    the call in the block and insert the instructions on the edge instead.
3379 
3380    Similarly, handle instructions throwing exceptions internally.
3381 
3382    Return true when instructions have been found and inserted on edges.  */
3383 
3384 bool
fixup_abnormal_edges(void)3385 fixup_abnormal_edges (void)
3386 {
3387   bool inserted = false;
3388   basic_block bb;
3389 
3390   FOR_EACH_BB_FN (bb, cfun)
3391     {
3392       edge e;
3393       edge_iterator ei;
3394 
3395       /* Look for cases we are interested in - calls or instructions causing
3396          exceptions.  */
3397       FOR_EACH_EDGE (e, ei, bb->succs)
3398 	if ((e->flags & EDGE_ABNORMAL_CALL)
3399 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3400 		== (EDGE_ABNORMAL | EDGE_EH)))
3401 	  break;
3402 
3403       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3404 	{
3405 	  rtx_insn *insn;
3406 
3407 	  /* Get past the new insns generated.  Allow notes, as the insns
3408 	     may be already deleted.  */
3409 	  insn = BB_END (bb);
3410 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3411 		 && !can_throw_internal (insn)
3412 		 && insn != BB_HEAD (bb))
3413 	    insn = PREV_INSN (insn);
3414 
3415 	  if (CALL_P (insn) || can_throw_internal (insn))
3416 	    {
3417 	      rtx_insn *stop, *next;
3418 
3419 	      e = find_fallthru_edge (bb->succs);
3420 
3421 	      stop = NEXT_INSN (BB_END (bb));
3422 	      BB_END (bb) = insn;
3423 
3424 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3425 		{
3426 		  next = NEXT_INSN (insn);
3427 		  if (INSN_P (insn))
3428 		    {
3429 		      delete_insn (insn);
3430 
3431 		      /* Sometimes there's still the return value USE.
3432 			 If it's placed after a trapping call (i.e. that
3433 			 call is the last insn anyway), we have no fallthru
3434 			 edge.  Simply delete this use and don't try to insert
3435 			 on the non-existent edge.
3436 			 Similarly, sometimes a call that can throw is
3437 			 followed in the source with __builtin_unreachable (),
3438 			 meaning that there is UB if the call returns rather
3439 			 than throws.  If there weren't any instructions
3440 			 following such calls before, supposedly even the ones
3441 			 we've deleted aren't significant and can be
3442 			 removed.  */
3443 		      if (e)
3444 			{
3445 			  /* We're not deleting it, we're moving it.  */
3446 			  insn->set_undeleted ();
3447 			  SET_PREV_INSN (insn) = NULL_RTX;
3448 			  SET_NEXT_INSN (insn) = NULL_RTX;
3449 
3450 			  insert_insn_on_edge (insn, e);
3451 			  inserted = true;
3452 			}
3453 		    }
3454 		  else if (!BARRIER_P (insn))
3455 		    set_block_for_insn (insn, NULL);
3456 		}
3457 	    }
3458 
3459 	  /* It may be that we don't find any trapping insn.  In this
3460 	     case we discovered quite late that the insn that had been
3461 	     marked as can_throw_internal in fact couldn't trap at all.
3462 	     So we should in fact delete the EH edges out of the block.  */
3463 	  else
3464 	    purge_dead_edges (bb);
3465 	}
3466     }
3467 
3468   return inserted;
3469 }
3470 
3471 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
3472    Note that the INSN should be deleted *after* removing dead edges, so
3473    that the kept edge is the fallthrough edge for a (set (pc) (pc))
3474    but not for a (set (pc) (label_ref FOO)).  */
3475 
3476 void
update_cfg_for_uncondjump(rtx_insn * insn)3477 update_cfg_for_uncondjump (rtx_insn *insn)
3478 {
3479   basic_block bb = BLOCK_FOR_INSN (insn);
3480   gcc_assert (BB_END (bb) == insn);
3481 
3482   purge_dead_edges (bb);
3483 
3484   if (current_ir_type () != IR_RTL_CFGLAYOUT)
3485     {
3486       if (!find_fallthru_edge (bb->succs))
3487 	{
3488 	  auto barrier = next_nonnote_nondebug_insn (insn);
3489 	  if (!barrier || !BARRIER_P (barrier))
3490 	    emit_barrier_after (insn);
3491 	}
3492       return;
3493     }
3494 
3495   delete_insn (insn);
3496   if (EDGE_COUNT (bb->succs) == 1)
3497     {
3498       rtx_insn *insn;
3499 
3500       single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3501 
3502       /* Remove barriers from the footer if there are any.  */
3503       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
3504 	if (BARRIER_P (insn))
3505 	  {
3506 	    if (PREV_INSN (insn))
3507 	      SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3508 	    else
3509 	      BB_FOOTER (bb) = NEXT_INSN (insn);
3510 	    if (NEXT_INSN (insn))
3511 	      SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3512 	  }
3513 	else if (LABEL_P (insn))
3514 	  break;
3515     }
3516 }
3517 
3518 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3519 
3520 rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)3521 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3522 {
3523   rtx_insn *prevfirst = PREV_INSN (first);
3524   rtx_insn *nextlast = NEXT_INSN (last);
3525 
3526   SET_PREV_INSN (first) = NULL;
3527   SET_NEXT_INSN (last) = NULL;
3528   if (prevfirst)
3529     SET_NEXT_INSN (prevfirst) = nextlast;
3530   if (nextlast)
3531     SET_PREV_INSN (nextlast) = prevfirst;
3532   else
3533     set_last_insn (prevfirst);
3534   if (!prevfirst)
3535     set_first_insn (nextlast);
3536   return first;
3537 }
3538 
3539 /* Skip over inter-block insns occurring after BB which are typically
3540    associated with BB (e.g., barriers). If there are any such insns,
3541    we return the last one. Otherwise, we return the end of BB.  */
3542 
3543 static rtx_insn *
skip_insns_after_block(basic_block bb)3544 skip_insns_after_block (basic_block bb)
3545 {
3546   rtx_insn *insn, *last_insn, *next_head, *prev;
3547 
3548   next_head = NULL;
3549   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3550     next_head = BB_HEAD (bb->next_bb);
3551 
3552   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3553     {
3554       if (insn == next_head)
3555 	break;
3556 
3557       switch (GET_CODE (insn))
3558 	{
3559 	case BARRIER:
3560 	  last_insn = insn;
3561 	  continue;
3562 
3563 	case NOTE:
3564 	  switch (NOTE_KIND (insn))
3565 	    {
3566 	    case NOTE_INSN_BLOCK_END:
3567 	      gcc_unreachable ();
3568 	      continue;
3569 	    default:
3570 	      continue;
3571 	      break;
3572 	    }
3573 	  break;
3574 
3575 	case CODE_LABEL:
3576 	  if (NEXT_INSN (insn)
3577 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3578 	    {
3579 	      insn = NEXT_INSN (insn);
3580 	      last_insn = insn;
3581 	      continue;
3582 	    }
3583 	  break;
3584 
3585 	default:
3586 	  break;
3587 	}
3588 
3589       break;
3590     }
3591 
3592   /* It is possible to hit contradictory sequence.  For instance:
3593 
3594      jump_insn
3595      NOTE_INSN_BLOCK_BEG
3596      barrier
3597 
3598      Where barrier belongs to jump_insn, but the note does not.  This can be
3599      created by removing the basic block originally following
3600      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3601 
3602   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3603     {
3604       prev = PREV_INSN (insn);
3605       if (NOTE_P (insn))
3606 	switch (NOTE_KIND (insn))
3607 	  {
3608 	  case NOTE_INSN_BLOCK_END:
3609 	    gcc_unreachable ();
3610 	    break;
3611 	  case NOTE_INSN_DELETED:
3612 	  case NOTE_INSN_DELETED_LABEL:
3613 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3614 	    continue;
3615 	  default:
3616 	    reorder_insns (insn, insn, last_insn);
3617 	  }
3618     }
3619 
3620   return last_insn;
3621 }
3622 
3623 /* Locate or create a label for a given basic block.  */
3624 
3625 static rtx_insn *
label_for_bb(basic_block bb)3626 label_for_bb (basic_block bb)
3627 {
3628   rtx_insn *label = BB_HEAD (bb);
3629 
3630   if (!LABEL_P (label))
3631     {
3632       if (dump_file)
3633 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3634 
3635       label = block_label (bb);
3636     }
3637 
3638   return label;
3639 }
3640 
3641 /* Locate the effective beginning and end of the insn chain for each
3642    block, as defined by skip_insns_after_block above.  */
3643 
3644 static void
record_effective_endpoints(void)3645 record_effective_endpoints (void)
3646 {
3647   rtx_insn *next_insn;
3648   basic_block bb;
3649   rtx_insn *insn;
3650 
3651   for (insn = get_insns ();
3652        insn
3653        && NOTE_P (insn)
3654        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3655        insn = NEXT_INSN (insn))
3656     continue;
3657   /* No basic blocks at all?  */
3658   gcc_assert (insn);
3659 
3660   if (PREV_INSN (insn))
3661     cfg_layout_function_header =
3662 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3663   else
3664     cfg_layout_function_header = NULL;
3665 
3666   next_insn = get_insns ();
3667   FOR_EACH_BB_FN (bb, cfun)
3668     {
3669       rtx_insn *end;
3670 
3671       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3672 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3673 						PREV_INSN (BB_HEAD (bb)));
3674       end = skip_insns_after_block (bb);
3675       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3676 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3677       next_insn = NEXT_INSN (BB_END (bb));
3678     }
3679 
3680   cfg_layout_function_footer = next_insn;
3681   if (cfg_layout_function_footer)
3682     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3683 }
3684 
3685 namespace {
3686 
3687 const pass_data pass_data_into_cfg_layout_mode =
3688 {
3689   RTL_PASS, /* type */
3690   "into_cfglayout", /* name */
3691   OPTGROUP_NONE, /* optinfo_flags */
3692   TV_CFG, /* tv_id */
3693   0, /* properties_required */
3694   PROP_cfglayout, /* properties_provided */
3695   0, /* properties_destroyed */
3696   0, /* todo_flags_start */
3697   0, /* todo_flags_finish */
3698 };
3699 
3700 class pass_into_cfg_layout_mode : public rtl_opt_pass
3701 {
3702 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3703   pass_into_cfg_layout_mode (gcc::context *ctxt)
3704     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3705   {}
3706 
3707   /* opt_pass methods: */
execute(function *)3708   virtual unsigned int execute (function *)
3709     {
3710       cfg_layout_initialize (0);
3711       return 0;
3712     }
3713 
3714 }; // class pass_into_cfg_layout_mode
3715 
3716 } // anon namespace
3717 
3718 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3719 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3720 {
3721   return new pass_into_cfg_layout_mode (ctxt);
3722 }
3723 
3724 namespace {
3725 
3726 const pass_data pass_data_outof_cfg_layout_mode =
3727 {
3728   RTL_PASS, /* type */
3729   "outof_cfglayout", /* name */
3730   OPTGROUP_NONE, /* optinfo_flags */
3731   TV_CFG, /* tv_id */
3732   0, /* properties_required */
3733   0, /* properties_provided */
3734   PROP_cfglayout, /* properties_destroyed */
3735   0, /* todo_flags_start */
3736   0, /* todo_flags_finish */
3737 };
3738 
3739 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3740 {
3741 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3742   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3743     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3744   {}
3745 
3746   /* opt_pass methods: */
3747   virtual unsigned int execute (function *);
3748 
3749 }; // class pass_outof_cfg_layout_mode
3750 
3751 unsigned int
execute(function * fun)3752 pass_outof_cfg_layout_mode::execute (function *fun)
3753 {
3754   basic_block bb;
3755 
3756   FOR_EACH_BB_FN (bb, fun)
3757     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3758       bb->aux = bb->next_bb;
3759 
3760   cfg_layout_finalize ();
3761 
3762   return 0;
3763 }
3764 
3765 } // anon namespace
3766 
3767 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3768 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3769 {
3770   return new pass_outof_cfg_layout_mode (ctxt);
3771 }
3772 
3773 
3774 /* Link the basic blocks in the correct order, compacting the basic
3775    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3776    function also clears the basic block header and footer fields.
3777 
3778    This function is usually called after a pass (e.g. tracer) finishes
3779    some transformations while in cfglayout mode.  The required sequence
3780    of the basic blocks is in a linked list along the bb->aux field.
3781    This functions re-links the basic block prev_bb and next_bb pointers
3782    accordingly, and it compacts and renumbers the blocks.
3783 
3784    FIXME: This currently works only for RTL, but the only RTL-specific
3785    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3786    to GIMPLE a long time ago, but it doesn't relink the basic block
3787    chain.  It could do that (to give better initial RTL) if this function
3788    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
3789 
3790 void
relink_block_chain(bool stay_in_cfglayout_mode)3791 relink_block_chain (bool stay_in_cfglayout_mode)
3792 {
3793   basic_block bb, prev_bb;
3794   int index;
3795 
3796   /* Maybe dump the re-ordered sequence.  */
3797   if (dump_file)
3798     {
3799       fprintf (dump_file, "Reordered sequence:\n");
3800       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3801 	   NUM_FIXED_BLOCKS;
3802 	   bb;
3803 	   bb = (basic_block) bb->aux, index++)
3804 	{
3805 	  fprintf (dump_file, " %i ", index);
3806 	  if (get_bb_original (bb))
3807 	    fprintf (dump_file, "duplicate of %i\n",
3808 		     get_bb_original (bb)->index);
3809 	  else if (forwarder_block_p (bb)
3810 		   && !LABEL_P (BB_HEAD (bb)))
3811 	    fprintf (dump_file, "compensation\n");
3812 	  else
3813 	    fprintf (dump_file, "bb %i\n", bb->index);
3814 	}
3815     }
3816 
3817   /* Now reorder the blocks.  */
3818   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3819   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3820   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3821     {
3822       bb->prev_bb = prev_bb;
3823       prev_bb->next_bb = bb;
3824     }
3825   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3826   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3827 
3828   /* Then, clean up the aux fields.  */
3829   FOR_ALL_BB_FN (bb, cfun)
3830     {
3831       bb->aux = NULL;
3832       if (!stay_in_cfglayout_mode)
3833 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3834     }
3835 
3836   /* Maybe reset the original copy tables, they are not valid anymore
3837      when we renumber the basic blocks in compact_blocks.  If we are
3838      are going out of cfglayout mode, don't re-allocate the tables.  */
3839   if (original_copy_tables_initialized_p ())
3840     free_original_copy_tables ();
3841   if (stay_in_cfglayout_mode)
3842     initialize_original_copy_tables ();
3843 
3844   /* Finally, put basic_block_info in the new order.  */
3845   compact_blocks ();
3846 }
3847 
3848 
3849 /* Given a reorder chain, rearrange the code to match.  */
3850 
3851 static void
fixup_reorder_chain(void)3852 fixup_reorder_chain (void)
3853 {
3854   basic_block bb;
3855   rtx_insn *insn = NULL;
3856 
3857   if (cfg_layout_function_header)
3858     {
3859       set_first_insn (cfg_layout_function_header);
3860       insn = cfg_layout_function_header;
3861       while (NEXT_INSN (insn))
3862 	insn = NEXT_INSN (insn);
3863     }
3864 
3865   /* First do the bulk reordering -- rechain the blocks without regard to
3866      the needed changes to jumps and labels.  */
3867 
3868   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3869        bb->aux)
3870     {
3871       if (BB_HEADER (bb))
3872 	{
3873 	  if (insn)
3874 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
3875 	  else
3876 	    set_first_insn (BB_HEADER (bb));
3877 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
3878 	  insn = BB_HEADER (bb);
3879 	  while (NEXT_INSN (insn))
3880 	    insn = NEXT_INSN (insn);
3881 	}
3882       if (insn)
3883 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
3884       else
3885 	set_first_insn (BB_HEAD (bb));
3886       SET_PREV_INSN (BB_HEAD (bb)) = insn;
3887       insn = BB_END (bb);
3888       if (BB_FOOTER (bb))
3889 	{
3890 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3891 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3892 	  while (NEXT_INSN (insn))
3893 	    insn = NEXT_INSN (insn);
3894 	}
3895     }
3896 
3897   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3898   if (cfg_layout_function_footer)
3899     SET_PREV_INSN (cfg_layout_function_footer) = insn;
3900 
3901   while (NEXT_INSN (insn))
3902     insn = NEXT_INSN (insn);
3903 
3904   set_last_insn (insn);
3905   if (flag_checking)
3906     verify_insn_chain ();
3907 
3908   /* Now add jumps and labels as needed to match the blocks new
3909      outgoing edges.  */
3910 
3911   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3912        bb->aux)
3913     {
3914       edge e_fall, e_taken, e;
3915       rtx_insn *bb_end_insn;
3916       rtx ret_label = NULL_RTX;
3917       basic_block nb;
3918       edge_iterator ei;
3919 
3920       if (EDGE_COUNT (bb->succs) == 0)
3921 	continue;
3922 
3923       /* Find the old fallthru edge, and another non-EH edge for
3924 	 a taken jump.  */
3925       e_taken = e_fall = NULL;
3926 
3927       FOR_EACH_EDGE (e, ei, bb->succs)
3928 	if (e->flags & EDGE_FALLTHRU)
3929 	  e_fall = e;
3930 	else if (! (e->flags & EDGE_EH))
3931 	  e_taken = e;
3932 
3933       bb_end_insn = BB_END (bb);
3934       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3935 	{
3936 	  ret_label = JUMP_LABEL (bb_end_jump);
3937 	  if (any_condjump_p (bb_end_jump))
3938 	    {
3939 	      /* This might happen if the conditional jump has side
3940 		 effects and could therefore not be optimized away.
3941 		 Make the basic block to end with a barrier in order
3942 		 to prevent rtl_verify_flow_info from complaining.  */
3943 	      if (!e_fall)
3944 		{
3945 		  gcc_assert (!onlyjump_p (bb_end_jump)
3946 			      || returnjump_p (bb_end_jump)
3947                               || (e_taken->flags & EDGE_CROSSING));
3948 		  emit_barrier_after (bb_end_jump);
3949 		  continue;
3950 		}
3951 
3952 	      /* If the old fallthru is still next, nothing to do.  */
3953 	      if (bb->aux == e_fall->dest
3954 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3955 		continue;
3956 
3957 	      /* The degenerated case of conditional jump jumping to the next
3958 		 instruction can happen for jumps with side effects.  We need
3959 		 to construct a forwarder block and this will be done just
3960 		 fine by force_nonfallthru below.  */
3961 	      if (!e_taken)
3962 		;
3963 
3964 	      /* There is another special case: if *neither* block is next,
3965 		 such as happens at the very end of a function, then we'll
3966 		 need to add a new unconditional jump.  Choose the taken
3967 		 edge based on known or assumed probability.  */
3968 	      else if (bb->aux != e_taken->dest)
3969 		{
3970 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3971 
3972 		  if (note
3973 		      && profile_probability::from_reg_br_prob_note
3974 				 (XINT (note, 0)) < profile_probability::even ()
3975 		      && invert_jump (bb_end_jump,
3976 				      (e_fall->dest
3977 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3978 				       ? NULL_RTX
3979 				       : label_for_bb (e_fall->dest)), 0))
3980 		    {
3981 		      e_fall->flags &= ~EDGE_FALLTHRU;
3982 		      gcc_checking_assert (could_fall_through
3983 					   (e_taken->src, e_taken->dest));
3984 		      e_taken->flags |= EDGE_FALLTHRU;
3985 		      update_br_prob_note (bb);
3986 		      e = e_fall, e_fall = e_taken, e_taken = e;
3987 		    }
3988 		}
3989 
3990 	      /* If the "jumping" edge is a crossing edge, and the fall
3991 		 through edge is non-crossing, leave things as they are.  */
3992 	      else if ((e_taken->flags & EDGE_CROSSING)
3993 		       && !(e_fall->flags & EDGE_CROSSING))
3994 		continue;
3995 
3996 	      /* Otherwise we can try to invert the jump.  This will
3997 		 basically never fail, however, keep up the pretense.  */
3998 	      else if (invert_jump (bb_end_jump,
3999 				    (e_fall->dest
4000 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
4001 				     ? NULL_RTX
4002 				     : label_for_bb (e_fall->dest)), 0))
4003 		{
4004 		  e_fall->flags &= ~EDGE_FALLTHRU;
4005 		  gcc_checking_assert (could_fall_through
4006 				       (e_taken->src, e_taken->dest));
4007 		  e_taken->flags |= EDGE_FALLTHRU;
4008 		  update_br_prob_note (bb);
4009 		  if (LABEL_NUSES (ret_label) == 0
4010 		      && single_pred_p (e_taken->dest))
4011 		    delete_insn (as_a<rtx_insn *> (ret_label));
4012 		  continue;
4013 		}
4014 	    }
4015 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
4016 	    {
4017 	      /* If the old fallthru is still next or if
4018 		 asm goto doesn't have a fallthru (e.g. when followed by
4019 		 __builtin_unreachable ()), nothing to do.  */
4020 	      if (! e_fall
4021 		  || bb->aux == e_fall->dest
4022 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4023 		continue;
4024 
4025 	      /* Otherwise we'll have to use the fallthru fixup below.  */
4026 	    }
4027 	  else
4028 	    {
4029 	      /* Otherwise we have some return, switch or computed
4030 		 jump.  In the 99% case, there should not have been a
4031 		 fallthru edge.  */
4032 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
4033 	      continue;
4034 	    }
4035 	}
4036       else
4037 	{
4038 	  /* No fallthru implies a noreturn function with EH edges, or
4039 	     something similarly bizarre.  In any case, we don't need to
4040 	     do anything.  */
4041 	  if (! e_fall)
4042 	    continue;
4043 
4044 	  /* If the fallthru block is still next, nothing to do.  */
4045 	  if (bb->aux == e_fall->dest)
4046 	    continue;
4047 
4048 	  /* A fallthru to exit block.  */
4049 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4050 	    continue;
4051 	}
4052 
4053       /* We got here if we need to add a new jump insn.
4054 	 Note force_nonfallthru can delete E_FALL and thus we have to
4055 	 save E_FALL->src prior to the call to force_nonfallthru.  */
4056       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
4057       if (nb)
4058 	{
4059 	  nb->aux = bb->aux;
4060 	  bb->aux = nb;
4061 	  /* Don't process this new block.  */
4062 	  bb = nb;
4063 	}
4064     }
4065 
4066   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4067 
4068   /* Annoying special case - jump around dead jumptables left in the code.  */
4069   FOR_EACH_BB_FN (bb, cfun)
4070     {
4071       edge e = find_fallthru_edge (bb->succs);
4072 
4073       if (e && !can_fallthru (e->src, e->dest))
4074 	force_nonfallthru (e);
4075     }
4076 
4077   /* Ensure goto_locus from edges has some instructions with that locus in RTL
4078      when not optimizing.  */
4079   if (!optimize && !DECL_IGNORED_P (current_function_decl))
4080     FOR_EACH_BB_FN (bb, cfun)
4081       {
4082         edge e;
4083         edge_iterator ei;
4084 
4085         FOR_EACH_EDGE (e, ei, bb->succs)
4086 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4087 	      && !(e->flags & EDGE_ABNORMAL))
4088 	    {
4089 	      edge e2;
4090 	      edge_iterator ei2;
4091 	      basic_block dest, nb;
4092 	      rtx_insn *end;
4093 
4094 	      insn = BB_END (e->src);
4095 	      end = PREV_INSN (BB_HEAD (e->src));
4096 	      while (insn != end
4097 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4098 		insn = PREV_INSN (insn);
4099 	      if (insn != end
4100 		  && INSN_LOCATION (insn) == e->goto_locus)
4101 		continue;
4102 	      if (simplejump_p (BB_END (e->src))
4103 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
4104 		{
4105 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4106 		  continue;
4107 		}
4108 	      dest = e->dest;
4109 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4110 		{
4111 		  /* Non-fallthru edges to the exit block cannot be split.  */
4112 		  if (!(e->flags & EDGE_FALLTHRU))
4113 		    continue;
4114 		}
4115 	      else
4116 		{
4117 		  insn = BB_HEAD (dest);
4118 		  end = NEXT_INSN (BB_END (dest));
4119 		  while (insn != end && !NONDEBUG_INSN_P (insn))
4120 		    insn = NEXT_INSN (insn);
4121 		  if (insn != end && INSN_HAS_LOCATION (insn)
4122 		      && INSN_LOCATION (insn) == e->goto_locus)
4123 		    continue;
4124 		}
4125 	      nb = split_edge (e);
4126 	      if (!INSN_P (BB_END (nb)))
4127 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4128 							 nb);
4129 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4130 
4131 	      /* If there are other incoming edges to the destination block
4132 		 with the same goto locus, redirect them to the new block as
4133 		 well, this can prevent other such blocks from being created
4134 		 in subsequent iterations of the loop.  */
4135 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4136 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4137 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4138 		    && e->goto_locus == e2->goto_locus)
4139 		  redirect_edge_and_branch (e2, nb);
4140 		else
4141 		  ei_next (&ei2);
4142 	    }
4143       }
4144 }
4145 
4146 /* Perform sanity checks on the insn chain.
4147    1. Check that next/prev pointers are consistent in both the forward and
4148       reverse direction.
4149    2. Count insns in chain, going both directions, and check if equal.
4150    3. Check that get_last_insn () returns the actual end of chain.  */
4151 
4152 DEBUG_FUNCTION void
verify_insn_chain(void)4153 verify_insn_chain (void)
4154 {
4155   rtx_insn *x, *prevx, *nextx;
4156   int insn_cnt1, insn_cnt2;
4157 
4158   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4159        x != 0;
4160        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4161     gcc_assert (PREV_INSN (x) == prevx);
4162 
4163   gcc_assert (prevx == get_last_insn ());
4164 
4165   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4166        x != 0;
4167        nextx = x, insn_cnt2++, x = PREV_INSN (x))
4168     gcc_assert (NEXT_INSN (x) == nextx);
4169 
4170   gcc_assert (insn_cnt1 == insn_cnt2);
4171 }
4172 
4173 /* If we have assembler epilogues, the block falling through to exit must
4174    be the last one in the reordered chain when we reach final.  Ensure
4175    that this condition is met.  */
4176 static void
fixup_fallthru_exit_predecessor(void)4177 fixup_fallthru_exit_predecessor (void)
4178 {
4179   edge e;
4180   basic_block bb = NULL;
4181 
4182   /* This transformation is not valid before reload, because we might
4183      separate a call from the instruction that copies the return
4184      value.  */
4185   gcc_assert (reload_completed);
4186 
4187   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4188   if (e)
4189     bb = e->src;
4190 
4191   if (bb && bb->aux)
4192     {
4193       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4194 
4195       /* If the very first block is the one with the fall-through exit
4196 	 edge, we have to split that block.  */
4197       if (c == bb)
4198 	{
4199 	  bb = split_block_after_labels (bb)->dest;
4200 	  bb->aux = c->aux;
4201 	  c->aux = bb;
4202 	  BB_FOOTER (bb) = BB_FOOTER (c);
4203 	  BB_FOOTER (c) = NULL;
4204 	}
4205 
4206       while (c->aux != bb)
4207 	c = (basic_block) c->aux;
4208 
4209       c->aux = bb->aux;
4210       while (c->aux)
4211 	c = (basic_block) c->aux;
4212 
4213       c->aux = bb;
4214       bb->aux = NULL;
4215     }
4216 }
4217 
4218 /* In case there are more than one fallthru predecessors of exit, force that
4219    there is only one.  */
4220 
4221 static void
force_one_exit_fallthru(void)4222 force_one_exit_fallthru (void)
4223 {
4224   edge e, predecessor = NULL;
4225   bool more = false;
4226   edge_iterator ei;
4227   basic_block forwarder, bb;
4228 
4229   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4230     if (e->flags & EDGE_FALLTHRU)
4231       {
4232 	if (predecessor == NULL)
4233 	  predecessor = e;
4234 	else
4235 	  {
4236 	    more = true;
4237 	    break;
4238 	  }
4239       }
4240 
4241   if (!more)
4242     return;
4243 
4244   /* Exit has several fallthru predecessors.  Create a forwarder block for
4245      them.  */
4246   forwarder = split_edge (predecessor);
4247   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4248        (e = ei_safe_edge (ei)); )
4249     {
4250       if (e->src == forwarder
4251 	  || !(e->flags & EDGE_FALLTHRU))
4252 	ei_next (&ei);
4253       else
4254 	redirect_edge_and_branch_force (e, forwarder);
4255     }
4256 
4257   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4258      exit block.  */
4259   FOR_EACH_BB_FN (bb, cfun)
4260     {
4261       if (bb->aux == NULL && bb != forwarder)
4262 	{
4263 	  bb->aux = forwarder;
4264 	  break;
4265 	}
4266     }
4267 }
4268 
4269 /* Return true in case it is possible to duplicate the basic block BB.  */
4270 
4271 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4272 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4273 {
4274   /* Do not attempt to duplicate tablejumps, as we need to unshare
4275      the dispatch table.  This is difficult to do, as the instructions
4276      computing jump destination may be hoisted outside the basic block.  */
4277   if (tablejump_p (BB_END (bb), NULL, NULL))
4278     return false;
4279 
4280   /* Do not duplicate blocks containing insns that can't be copied.  */
4281   if (targetm.cannot_copy_insn_p)
4282     {
4283       rtx_insn *insn = BB_HEAD (bb);
4284       while (1)
4285 	{
4286 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4287 	    return false;
4288 	  if (insn == BB_END (bb))
4289 	    break;
4290 	  insn = NEXT_INSN (insn);
4291 	}
4292     }
4293 
4294   return true;
4295 }
4296 
4297 rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to,class loop * loop,copy_bb_data * id)4298 duplicate_insn_chain (rtx_insn *from, rtx_insn *to,
4299 		      class loop *loop, copy_bb_data *id)
4300 {
4301   rtx_insn *insn, *next, *copy;
4302   rtx_note *last;
4303 
4304   /* Avoid updating of boundaries of previous basic block.  The
4305      note will get removed from insn stream in fixup.  */
4306   last = emit_note (NOTE_INSN_DELETED);
4307 
4308   /* Create copy at the end of INSN chain.  The chain will
4309      be reordered later.  */
4310   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4311     {
4312       switch (GET_CODE (insn))
4313 	{
4314 	case DEBUG_INSN:
4315 	  /* Don't duplicate label debug insns.  */
4316 	  if (DEBUG_BIND_INSN_P (insn)
4317 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4318 	    break;
4319 	  /* FALLTHRU */
4320 	case INSN:
4321 	case CALL_INSN:
4322 	case JUMP_INSN:
4323 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4324 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4325 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4326 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4327           maybe_copy_prologue_epilogue_insn (insn, copy);
4328 	  /* If requested remap dependence info of cliques brought in
4329 	     via inlining.  */
4330 	  if (id)
4331 	    {
4332 	      subrtx_iterator::array_type array;
4333 	      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4334 		if (MEM_P (*iter) && MEM_EXPR (*iter))
4335 		  {
4336 		    tree op = MEM_EXPR (*iter);
4337 		    if (TREE_CODE (op) == WITH_SIZE_EXPR)
4338 		      op = TREE_OPERAND (op, 0);
4339 		    while (handled_component_p (op))
4340 		      op = TREE_OPERAND (op, 0);
4341 		    if ((TREE_CODE (op) == MEM_REF
4342 			 || TREE_CODE (op) == TARGET_MEM_REF)
4343 			&& MR_DEPENDENCE_CLIQUE (op) > 1
4344 			&& (!loop
4345 			    || (MR_DEPENDENCE_CLIQUE (op)
4346 				!= loop->owned_clique)))
4347 		      {
4348 			if (!id->dependence_map)
4349 			  id->dependence_map = new hash_map<dependence_hash,
4350 			      unsigned short>;
4351 			bool existed;
4352 			unsigned short &newc = id->dependence_map->get_or_insert
4353 					 (MR_DEPENDENCE_CLIQUE (op), &existed);
4354 			if (!existed)
4355 			  {
4356 			    gcc_assert
4357 			      (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
4358 			    newc = ++cfun->last_clique;
4359 			  }
4360 			/* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4361 			   since MEM_EXPR is shared so make a copy and
4362 			   walk to the subtree again.  */
4363 			tree new_expr = unshare_expr (MEM_EXPR (*iter));
4364 			if (TREE_CODE (new_expr) == WITH_SIZE_EXPR)
4365 			  new_expr = TREE_OPERAND (new_expr, 0);
4366 			while (handled_component_p (new_expr))
4367 			  new_expr = TREE_OPERAND (new_expr, 0);
4368 			MR_DEPENDENCE_CLIQUE (new_expr) = newc;
4369 			set_mem_expr (const_cast <rtx> (*iter), new_expr);
4370 		      }
4371 		  }
4372 	    }
4373 	  break;
4374 
4375 	case JUMP_TABLE_DATA:
4376 	  /* Avoid copying of dispatch tables.  We never duplicate
4377 	     tablejumps, so this can hit only in case the table got
4378 	     moved far from original jump.
4379 	     Avoid copying following barrier as well if any
4380 	     (and debug insns in between).  */
4381 	  for (next = NEXT_INSN (insn);
4382 	       next != NEXT_INSN (to);
4383 	       next = NEXT_INSN (next))
4384 	    if (!DEBUG_INSN_P (next))
4385 	      break;
4386 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4387 	    insn = next;
4388 	  break;
4389 
4390 	case CODE_LABEL:
4391 	  break;
4392 
4393 	case BARRIER:
4394 	  emit_barrier ();
4395 	  break;
4396 
4397 	case NOTE:
4398 	  switch (NOTE_KIND (insn))
4399 	    {
4400 	      /* In case prologue is empty and function contain label
4401 		 in first BB, we may want to copy the block.  */
4402 	    case NOTE_INSN_PROLOGUE_END:
4403 
4404 	    case NOTE_INSN_DELETED:
4405 	    case NOTE_INSN_DELETED_LABEL:
4406 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4407 	      /* No problem to strip these.  */
4408 	    case NOTE_INSN_FUNCTION_BEG:
4409 	      /* There is always just single entry to function.  */
4410 	    case NOTE_INSN_BASIC_BLOCK:
4411               /* We should only switch text sections once.  */
4412 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4413 	      break;
4414 
4415 	    case NOTE_INSN_EPILOGUE_BEG:
4416 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4417 	      emit_note_copy (as_a <rtx_note *> (insn));
4418 	      break;
4419 
4420 	    default:
4421 	      /* All other notes should have already been eliminated.  */
4422 	      gcc_unreachable ();
4423 	    }
4424 	  break;
4425 	default:
4426 	  gcc_unreachable ();
4427 	}
4428     }
4429   insn = NEXT_INSN (last);
4430   delete_insn (last);
4431   return insn;
4432 }
4433 
4434 /* Create a duplicate of the basic block BB.  */
4435 
4436 static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data * id)4437 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *id)
4438 {
4439   rtx_insn *insn;
4440   basic_block new_bb;
4441 
4442   class loop *loop = (id && current_loops) ? bb->loop_father : NULL;
4443 
4444   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb), loop, id);
4445   new_bb = create_basic_block (insn,
4446 			       insn ? get_last_insn () : NULL,
4447 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4448 
4449   BB_COPY_PARTITION (new_bb, bb);
4450   if (BB_HEADER (bb))
4451     {
4452       insn = BB_HEADER (bb);
4453       while (NEXT_INSN (insn))
4454 	insn = NEXT_INSN (insn);
4455       insn = duplicate_insn_chain (BB_HEADER (bb), insn, loop, id);
4456       if (insn)
4457 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4458     }
4459 
4460   if (BB_FOOTER (bb))
4461     {
4462       insn = BB_FOOTER (bb);
4463       while (NEXT_INSN (insn))
4464 	insn = NEXT_INSN (insn);
4465       insn = duplicate_insn_chain (BB_FOOTER (bb), insn, loop, id);
4466       if (insn)
4467 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4468     }
4469 
4470   return new_bb;
4471 }
4472 
4473 
4474 /* Main entry point to this module - initialize the datastructures for
4475    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4476 
4477    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4478 
4479 void
cfg_layout_initialize(int flags)4480 cfg_layout_initialize (int flags)
4481 {
4482   rtx_insn_list *x;
4483   basic_block bb;
4484 
4485   /* Once bb partitioning is complete, cfg layout mode should not be
4486      re-entered.  Entering cfg layout mode may require fixups.  As an
4487      example, if edge forwarding performed when optimizing the cfg
4488      layout required moving a block from the hot to the cold
4489      section. This would create an illegal partitioning unless some
4490      manual fixup was performed.  */
4491   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4492 
4493   initialize_original_copy_tables ();
4494 
4495   cfg_layout_rtl_register_cfg_hooks ();
4496 
4497   record_effective_endpoints ();
4498 
4499   /* Make sure that the targets of non local gotos are marked.  */
4500   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4501     {
4502       bb = BLOCK_FOR_INSN (x->insn ());
4503       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4504     }
4505 
4506   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4507 }
4508 
4509 /* Splits superblocks.  */
4510 void
break_superblocks(void)4511 break_superblocks (void)
4512 {
4513   bool need = false;
4514   basic_block bb;
4515 
4516   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4517   bitmap_clear (superblocks);
4518 
4519   FOR_EACH_BB_FN (bb, cfun)
4520     if (bb->flags & BB_SUPERBLOCK)
4521       {
4522 	bb->flags &= ~BB_SUPERBLOCK;
4523 	bitmap_set_bit (superblocks, bb->index);
4524 	need = true;
4525       }
4526 
4527   if (need)
4528     {
4529       rebuild_jump_labels (get_insns ());
4530       find_many_sub_basic_blocks (superblocks);
4531     }
4532 }
4533 
4534 /* Finalize the changes: reorder insn list according to the sequence specified
4535    by aux pointers, enter compensation code, rebuild scope forest.  */
4536 
4537 void
cfg_layout_finalize(void)4538 cfg_layout_finalize (void)
4539 {
4540   free_dominance_info (CDI_DOMINATORS);
4541   force_one_exit_fallthru ();
4542   rtl_register_cfg_hooks ();
4543   if (reload_completed && !targetm.have_epilogue ())
4544     fixup_fallthru_exit_predecessor ();
4545   fixup_reorder_chain ();
4546 
4547   rebuild_jump_labels (get_insns ());
4548   delete_dead_jumptables ();
4549 
4550   if (flag_checking)
4551     verify_insn_chain ();
4552   checking_verify_flow_info ();
4553 }
4554 
4555 
4556 /* Same as split_block but update cfg_layout structures.  */
4557 
4558 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4559 cfg_layout_split_block (basic_block bb, void *insnp)
4560 {
4561   rtx insn = (rtx) insnp;
4562   basic_block new_bb = rtl_split_block (bb, insn);
4563 
4564   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4565   BB_FOOTER (bb) = NULL;
4566 
4567   return new_bb;
4568 }
4569 
4570 /* Redirect Edge to DEST.  */
4571 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4572 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4573 {
4574   basic_block src = e->src;
4575   edge ret;
4576 
4577   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4578     return NULL;
4579 
4580   if (e->dest == dest)
4581     return e;
4582 
4583   if (e->flags & EDGE_CROSSING
4584       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4585       && simplejump_p (BB_END (src)))
4586     {
4587       if (dump_file)
4588 	fprintf (dump_file,
4589 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4590 		 e->src->index, dest->index);
4591       delete_insn (BB_END (src));
4592       remove_barriers_from_footer (src);
4593       e->flags |= EDGE_FALLTHRU;
4594     }
4595 
4596   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4597       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4598     {
4599       df_set_bb_dirty (src);
4600       return ret;
4601     }
4602 
4603   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4604       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4605     {
4606       if (dump_file)
4607 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4608 		 e->src->index, dest->index);
4609 
4610       df_set_bb_dirty (e->src);
4611       redirect_edge_succ (e, dest);
4612       return e;
4613     }
4614 
4615   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4616      in the case the basic block appears to be in sequence.  Avoid this
4617      transformation.  */
4618 
4619   if (e->flags & EDGE_FALLTHRU)
4620     {
4621       /* Redirect any branch edges unified with the fallthru one.  */
4622       if (JUMP_P (BB_END (src))
4623 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4624 				     BB_END (src)))
4625 	{
4626 	  edge redirected;
4627 
4628 	  if (dump_file)
4629 	    fprintf (dump_file, "Fallthru edge unified with branch "
4630 		     "%i->%i redirected to %i\n",
4631 		     e->src->index, e->dest->index, dest->index);
4632 	  e->flags &= ~EDGE_FALLTHRU;
4633 	  redirected = redirect_branch_edge (e, dest);
4634 	  gcc_assert (redirected);
4635 	  redirected->flags |= EDGE_FALLTHRU;
4636 	  df_set_bb_dirty (redirected->src);
4637 	  return redirected;
4638 	}
4639       /* In case we are redirecting fallthru edge to the branch edge
4640 	 of conditional jump, remove it.  */
4641       if (EDGE_COUNT (src->succs) == 2)
4642 	{
4643 	  /* Find the edge that is different from E.  */
4644 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4645 
4646 	  if (s->dest == dest
4647 	      && any_condjump_p (BB_END (src))
4648 	      && onlyjump_p (BB_END (src)))
4649 	    delete_insn (BB_END (src));
4650 	}
4651       if (dump_file)
4652 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4653 		 e->src->index, e->dest->index, dest->index);
4654       ret = redirect_edge_succ_nodup (e, dest);
4655     }
4656   else
4657     ret = redirect_branch_edge (e, dest);
4658 
4659   if (!ret)
4660     return NULL;
4661 
4662   fixup_partition_crossing (ret);
4663   /* We don't want simplejumps in the insn stream during cfglayout.  */
4664   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4665 
4666   df_set_bb_dirty (src);
4667   return ret;
4668 }
4669 
4670 /* Simple wrapper as we always can redirect fallthru edges.  */
4671 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4672 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4673 {
4674   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4675 
4676   gcc_assert (redirected);
4677   return NULL;
4678 }
4679 
4680 /* Same as delete_basic_block but update cfg_layout structures.  */
4681 
4682 static void
cfg_layout_delete_block(basic_block bb)4683 cfg_layout_delete_block (basic_block bb)
4684 {
4685   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4686   rtx_insn **to;
4687 
4688   if (BB_HEADER (bb))
4689     {
4690       next = BB_HEAD (bb);
4691       if (prev)
4692 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4693       else
4694 	set_first_insn (BB_HEADER (bb));
4695       SET_PREV_INSN (BB_HEADER (bb)) = prev;
4696       insn = BB_HEADER (bb);
4697       while (NEXT_INSN (insn))
4698 	insn = NEXT_INSN (insn);
4699       SET_NEXT_INSN (insn) = next;
4700       SET_PREV_INSN (next) = insn;
4701     }
4702   next = NEXT_INSN (BB_END (bb));
4703   if (BB_FOOTER (bb))
4704     {
4705       insn = BB_FOOTER (bb);
4706       while (insn)
4707 	{
4708 	  if (BARRIER_P (insn))
4709 	    {
4710 	      if (PREV_INSN (insn))
4711 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4712 	      else
4713 		BB_FOOTER (bb) = NEXT_INSN (insn);
4714 	      if (NEXT_INSN (insn))
4715 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4716 	    }
4717 	  if (LABEL_P (insn))
4718 	    break;
4719 	  insn = NEXT_INSN (insn);
4720 	}
4721       if (BB_FOOTER (bb))
4722 	{
4723 	  insn = BB_END (bb);
4724 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4725 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4726 	  while (NEXT_INSN (insn))
4727 	    insn = NEXT_INSN (insn);
4728 	  SET_NEXT_INSN (insn) = next;
4729 	  if (next)
4730 	    SET_PREV_INSN (next) = insn;
4731 	  else
4732 	    set_last_insn (insn);
4733 	}
4734     }
4735   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4736     to = &BB_HEADER (bb->next_bb);
4737   else
4738     to = &cfg_layout_function_footer;
4739 
4740   rtl_delete_block (bb);
4741 
4742   if (prev)
4743     prev = NEXT_INSN (prev);
4744   else
4745     prev = get_insns ();
4746   if (next)
4747     next = PREV_INSN (next);
4748   else
4749     next = get_last_insn ();
4750 
4751   if (next && NEXT_INSN (next) != prev)
4752     {
4753       remaints = unlink_insn_chain (prev, next);
4754       insn = remaints;
4755       while (NEXT_INSN (insn))
4756 	insn = NEXT_INSN (insn);
4757       SET_NEXT_INSN (insn) = *to;
4758       if (*to)
4759 	SET_PREV_INSN (*to) = insn;
4760       *to = remaints;
4761     }
4762 }
4763 
4764 /* Return true when blocks A and B can be safely merged.  */
4765 
4766 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4767 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4768 {
4769   /* If we are partitioning hot/cold basic blocks, we don't want to
4770      mess up unconditional or indirect jumps that cross between hot
4771      and cold sections.
4772 
4773      Basic block partitioning may result in some jumps that appear to
4774      be optimizable (or blocks that appear to be mergeable), but which really
4775      must be left untouched (they are required to make it safely across
4776      partition boundaries).  See  the comments at the top of
4777      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4778 
4779   if (BB_PARTITION (a) != BB_PARTITION (b))
4780     return false;
4781 
4782   /* Protect the loop latches.  */
4783   if (current_loops && b->loop_father->latch == b)
4784     return false;
4785 
4786   /* If we would end up moving B's instructions, make sure it doesn't fall
4787      through into the exit block, since we cannot recover from a fallthrough
4788      edge into the exit block occurring in the middle of a function.  */
4789   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4790     {
4791       edge e = find_fallthru_edge (b->succs);
4792       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4793 	return false;
4794     }
4795 
4796   /* There must be exactly one edge in between the blocks.  */
4797   return (single_succ_p (a)
4798 	  && single_succ (a) == b
4799 	  && single_pred_p (b) == 1
4800 	  && a != b
4801 	  /* Must be simple edge.  */
4802 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4803 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4804 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4805 	  /* If the jump insn has side effects, we can't kill the edge.
4806 	     When not optimizing, try_redirect_by_replacing_jump will
4807 	     not allow us to redirect an edge by replacing a table jump.  */
4808 	  && (!JUMP_P (BB_END (a))
4809 	      || ((!optimize || reload_completed)
4810 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4811 }
4812 
4813 /* Merge block A and B.  The blocks must be mergeable.  */
4814 
4815 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4816 cfg_layout_merge_blocks (basic_block a, basic_block b)
4817 {
4818   /* If B is a forwarder block whose outgoing edge has no location, we'll
4819      propagate the locus of the edge between A and B onto it.  */
4820   const bool forward_edge_locus
4821     = (b->flags & BB_FORWARDER_BLOCK) != 0
4822       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4823   rtx_insn *insn;
4824 
4825   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4826 
4827   if (dump_file)
4828     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4829 			 a->index);
4830 
4831   /* If there was a CODE_LABEL beginning B, delete it.  */
4832   if (LABEL_P (BB_HEAD (b)))
4833     {
4834       delete_insn (BB_HEAD (b));
4835     }
4836 
4837   /* We should have fallthru edge in a, or we can do dummy redirection to get
4838      it cleaned up.  */
4839   if (JUMP_P (BB_END (a)))
4840     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4841   gcc_assert (!JUMP_P (BB_END (a)));
4842 
4843   /* If not optimizing, preserve the locus of the single edge between
4844      blocks A and B if necessary by emitting a nop.  */
4845   if (!optimize
4846       && !forward_edge_locus
4847       && !DECL_IGNORED_P (current_function_decl))
4848     emit_nop_for_unique_locus_between (a, b);
4849 
4850   /* Move things from b->footer after a->footer.  */
4851   if (BB_FOOTER (b))
4852     {
4853       if (!BB_FOOTER (a))
4854 	BB_FOOTER (a) = BB_FOOTER (b);
4855       else
4856 	{
4857 	  rtx_insn *last = BB_FOOTER (a);
4858 
4859 	  while (NEXT_INSN (last))
4860 	    last = NEXT_INSN (last);
4861 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
4862 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4863 	}
4864       BB_FOOTER (b) = NULL;
4865     }
4866 
4867   /* Move things from b->header before a->footer.
4868      Note that this may include dead tablejump data, but we don't clean
4869      those up until we go out of cfglayout mode.  */
4870    if (BB_HEADER (b))
4871      {
4872       if (! BB_FOOTER (a))
4873 	BB_FOOTER (a) = BB_HEADER (b);
4874       else
4875 	{
4876 	  rtx_insn *last = BB_HEADER (b);
4877 
4878 	  while (NEXT_INSN (last))
4879 	    last = NEXT_INSN (last);
4880 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
4881 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4882 	  BB_FOOTER (a) = BB_HEADER (b);
4883 	}
4884       BB_HEADER (b) = NULL;
4885     }
4886 
4887   /* In the case basic blocks are not adjacent, move them around.  */
4888   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4889     {
4890       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4891 
4892       emit_insn_after_noloc (insn, BB_END (a), a);
4893     }
4894   /* Otherwise just re-associate the instructions.  */
4895   else
4896     {
4897       insn = BB_HEAD (b);
4898       BB_END (a) = BB_END (b);
4899     }
4900 
4901   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4902      We need to explicitly call. */
4903   update_bb_for_insn_chain (insn, BB_END (b), a);
4904 
4905   /* Skip possible DELETED_LABEL insn.  */
4906   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4907     insn = NEXT_INSN (insn);
4908   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4909   BB_HEAD (b) = BB_END (b) = NULL;
4910   delete_insn (insn);
4911 
4912   df_bb_delete (b->index);
4913 
4914   if (forward_edge_locus)
4915     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4916 
4917   if (dump_file)
4918     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4919 }
4920 
4921 /* Split edge E.  */
4922 
4923 static basic_block
cfg_layout_split_edge(edge e)4924 cfg_layout_split_edge (edge e)
4925 {
4926   basic_block new_bb =
4927     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4928 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4929 			NULL_RTX, e->src);
4930 
4931   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4932     BB_COPY_PARTITION (new_bb, e->src);
4933   else
4934     BB_COPY_PARTITION (new_bb, e->dest);
4935   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4936   redirect_edge_and_branch_force (e, new_bb);
4937 
4938   return new_bb;
4939 }
4940 
4941 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4942 
4943 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4944 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4945 {
4946 }
4947 
4948 /* Return true if BB contains only labels or non-executable
4949    instructions.  */
4950 
4951 static bool
rtl_block_empty_p(basic_block bb)4952 rtl_block_empty_p (basic_block bb)
4953 {
4954   rtx_insn *insn;
4955 
4956   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4957       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4958     return true;
4959 
4960   FOR_BB_INSNS (bb, insn)
4961     if (NONDEBUG_INSN_P (insn)
4962 	&& (!any_uncondjump_p (insn) || !onlyjump_p (insn)))
4963       return false;
4964 
4965   return true;
4966 }
4967 
4968 /* Split a basic block if it ends with a conditional branch and if
4969    the other part of the block is not empty.  */
4970 
4971 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4972 rtl_split_block_before_cond_jump (basic_block bb)
4973 {
4974   rtx_insn *insn;
4975   rtx_insn *split_point = NULL;
4976   rtx_insn *last = NULL;
4977   bool found_code = false;
4978 
4979   FOR_BB_INSNS (bb, insn)
4980     {
4981       if (any_condjump_p (insn))
4982 	split_point = last;
4983       else if (NONDEBUG_INSN_P (insn))
4984 	found_code = true;
4985       last = insn;
4986     }
4987 
4988   /* Did not find everything.  */
4989   if (found_code && split_point)
4990     return split_block (bb, split_point)->dest;
4991   else
4992     return NULL;
4993 }
4994 
4995 /* Return 1 if BB ends with a call, possibly followed by some
4996    instructions that must stay with the call, 0 otherwise.  */
4997 
4998 static bool
rtl_block_ends_with_call_p(basic_block bb)4999 rtl_block_ends_with_call_p (basic_block bb)
5000 {
5001   rtx_insn *insn = BB_END (bb);
5002 
5003   while (!CALL_P (insn)
5004 	 && insn != BB_HEAD (bb)
5005 	 && (keep_with_call_p (insn)
5006 	     || NOTE_P (insn)
5007 	     || DEBUG_INSN_P (insn)))
5008     insn = PREV_INSN (insn);
5009   return (CALL_P (insn));
5010 }
5011 
5012 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
5013 
5014 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)5015 rtl_block_ends_with_condjump_p (const_basic_block bb)
5016 {
5017   return any_condjump_p (BB_END (bb));
5018 }
5019 
5020 /* Return true if we need to add fake edge to exit.
5021    Helper function for rtl_flow_call_edges_add.  */
5022 
5023 static bool
need_fake_edge_p(const rtx_insn * insn)5024 need_fake_edge_p (const rtx_insn *insn)
5025 {
5026   if (!INSN_P (insn))
5027     return false;
5028 
5029   if ((CALL_P (insn)
5030        && !SIBLING_CALL_P (insn)
5031        && !find_reg_note (insn, REG_NORETURN, NULL)
5032        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
5033     return true;
5034 
5035   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5036 	   && MEM_VOLATILE_P (PATTERN (insn)))
5037 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
5038 	      && asm_noperands (insn) != -1
5039 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
5040 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
5041 }
5042 
5043 /* Add fake edges to the function exit for any non constant and non noreturn
5044    calls, volatile inline assembly in the bitmap of blocks specified by
5045    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
5046    that were split.
5047 
5048    The goal is to expose cases in which entering a basic block does not imply
5049    that all subsequent instructions must be executed.  */
5050 
5051 static int
rtl_flow_call_edges_add(sbitmap blocks)5052 rtl_flow_call_edges_add (sbitmap blocks)
5053 {
5054   int i;
5055   int blocks_split = 0;
5056   int last_bb = last_basic_block_for_fn (cfun);
5057   bool check_last_block = false;
5058 
5059   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
5060     return 0;
5061 
5062   if (! blocks)
5063     check_last_block = true;
5064   else
5065     check_last_block = bitmap_bit_p (blocks,
5066 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
5067 
5068   /* In the last basic block, before epilogue generation, there will be
5069      a fallthru edge to EXIT.  Special care is required if the last insn
5070      of the last basic block is a call because make_edge folds duplicate
5071      edges, which would result in the fallthru edge also being marked
5072      fake, which would result in the fallthru edge being removed by
5073      remove_fake_edges, which would result in an invalid CFG.
5074 
5075      Moreover, we can't elide the outgoing fake edge, since the block
5076      profiler needs to take this into account in order to solve the minimal
5077      spanning tree in the case that the call doesn't return.
5078 
5079      Handle this by adding a dummy instruction in a new last basic block.  */
5080   if (check_last_block)
5081     {
5082       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5083       rtx_insn *insn = BB_END (bb);
5084 
5085       /* Back up past insns that must be kept in the same block as a call.  */
5086       while (insn != BB_HEAD (bb)
5087 	     && keep_with_call_p (insn))
5088 	insn = PREV_INSN (insn);
5089 
5090       if (need_fake_edge_p (insn))
5091 	{
5092 	  edge e;
5093 
5094 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5095 	  if (e)
5096 	    {
5097 	      insert_insn_on_edge (gen_use (const0_rtx), e);
5098 	      commit_edge_insertions ();
5099 	    }
5100 	}
5101     }
5102 
5103   /* Now add fake edges to the function exit for any non constant
5104      calls since there is no way that we can determine if they will
5105      return or not...  */
5106 
5107   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5108     {
5109       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5110       rtx_insn *insn;
5111       rtx_insn *prev_insn;
5112 
5113       if (!bb)
5114 	continue;
5115 
5116       if (blocks && !bitmap_bit_p (blocks, i))
5117 	continue;
5118 
5119       for (insn = BB_END (bb); ; insn = prev_insn)
5120 	{
5121 	  prev_insn = PREV_INSN (insn);
5122 	  if (need_fake_edge_p (insn))
5123 	    {
5124 	      edge e;
5125 	      rtx_insn *split_at_insn = insn;
5126 
5127 	      /* Don't split the block between a call and an insn that should
5128 		 remain in the same block as the call.  */
5129 	      if (CALL_P (insn))
5130 		while (split_at_insn != BB_END (bb)
5131 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
5132 		  split_at_insn = NEXT_INSN (split_at_insn);
5133 
5134 	      /* The handling above of the final block before the epilogue
5135 		 should be enough to verify that there is no edge to the exit
5136 		 block in CFG already.  Calling make_edge in such case would
5137 		 cause us to mark that edge as fake and remove it later.  */
5138 
5139 	      if (flag_checking && split_at_insn == BB_END (bb))
5140 		{
5141 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5142 		  gcc_assert (e == NULL);
5143 		}
5144 
5145 	      /* Note that the following may create a new basic block
5146 		 and renumber the existing basic blocks.  */
5147 	      if (split_at_insn != BB_END (bb))
5148 		{
5149 		  e = split_block (bb, split_at_insn);
5150 		  if (e)
5151 		    blocks_split++;
5152 		}
5153 
5154 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5155 	      ne->probability = profile_probability::guessed_never ();
5156 	    }
5157 
5158 	  if (insn == BB_HEAD (bb))
5159 	    break;
5160 	}
5161     }
5162 
5163   if (blocks_split)
5164     verify_flow_info ();
5165 
5166   return blocks_split;
5167 }
5168 
5169 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
5170    the conditional branch target, SECOND_HEAD should be the fall-thru
5171    there is no need to handle this here the loop versioning code handles
5172    this.  the reason for SECON_HEAD is that it is needed for condition
5173    in trees, and this should be of the same type since it is a hook.  */
5174 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)5175 rtl_lv_add_condition_to_bb (basic_block first_head ,
5176 			    basic_block second_head ATTRIBUTE_UNUSED,
5177 			    basic_block cond_bb, void *comp_rtx)
5178 {
5179   rtx_code_label *label;
5180   rtx_insn *seq, *jump;
5181   rtx op0 = XEXP ((rtx)comp_rtx, 0);
5182   rtx op1 = XEXP ((rtx)comp_rtx, 1);
5183   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5184   machine_mode mode;
5185 
5186 
5187   label = block_label (first_head);
5188   mode = GET_MODE (op0);
5189   if (mode == VOIDmode)
5190     mode = GET_MODE (op1);
5191 
5192   start_sequence ();
5193   op0 = force_operand (op0, NULL_RTX);
5194   op1 = force_operand (op1, NULL_RTX);
5195   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5196 			   profile_probability::uninitialized ());
5197   jump = get_last_insn ();
5198   JUMP_LABEL (jump) = label;
5199   LABEL_NUSES (label)++;
5200   seq = get_insns ();
5201   end_sequence ();
5202 
5203   /* Add the new cond, in the new head.  */
5204   emit_insn_after (seq, BB_END (cond_bb));
5205 }
5206 
5207 
5208 /* Given a block B with unconditional branch at its end, get the
5209    store the return the branch edge and the fall-thru edge in
5210    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
5211 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)5212 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5213 			   edge *fallthru_edge)
5214 {
5215   edge e = EDGE_SUCC (b, 0);
5216 
5217   if (e->flags & EDGE_FALLTHRU)
5218     {
5219       *fallthru_edge = e;
5220       *branch_edge = EDGE_SUCC (b, 1);
5221     }
5222   else
5223     {
5224       *branch_edge = e;
5225       *fallthru_edge = EDGE_SUCC (b, 1);
5226     }
5227 }
5228 
5229 void
init_rtl_bb_info(basic_block bb)5230 init_rtl_bb_info (basic_block bb)
5231 {
5232   gcc_assert (!bb->il.x.rtl);
5233   bb->il.x.head_ = NULL;
5234   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5235 }
5236 
5237 static bool
rtl_bb_info_initialized_p(basic_block bb)5238 rtl_bb_info_initialized_p (basic_block bb)
5239 {
5240   return bb->il.x.rtl;
5241 }
5242 
5243 /* Returns true if it is possible to remove edge E by redirecting
5244    it to the destination of the other edge from E->src.  */
5245 
5246 static bool
rtl_can_remove_branch_p(const_edge e)5247 rtl_can_remove_branch_p (const_edge e)
5248 {
5249   const_basic_block src = e->src;
5250   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5251   const rtx_insn *insn = BB_END (src);
5252   rtx set;
5253 
5254   /* The conditions are taken from try_redirect_by_replacing_jump.  */
5255   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5256     return false;
5257 
5258   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5259     return false;
5260 
5261   if (BB_PARTITION (src) != BB_PARTITION (target))
5262     return false;
5263 
5264   if (!onlyjump_p (insn)
5265       || tablejump_p (insn, NULL, NULL))
5266     return false;
5267 
5268   set = single_set (insn);
5269   if (!set || side_effects_p (set))
5270     return false;
5271 
5272   return true;
5273 }
5274 
5275 static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)5276 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5277 {
5278   bb = cfg_layout_duplicate_bb (bb, id);
5279   bb->aux = NULL;
5280   return bb;
5281 }
5282 
5283 /* Do book-keeping of basic block BB for the profile consistency checker.
5284    Store the counting in RECORD.  */
5285 static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)5286 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5287 {
5288   rtx_insn *insn;
5289   FOR_BB_INSNS (bb, insn)
5290     if (INSN_P (insn))
5291       {
5292 	record->size += insn_cost (insn, false);
5293 	if (bb->count.initialized_p ())
5294 	  record->time
5295 	    += insn_cost (insn, true) * bb->count.to_gcov_type ();
5296 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5297 	  record->time
5298 	    += insn_cost (insn, true) * bb->count.to_frequency (cfun);
5299       }
5300 }
5301 
5302 /* Implementation of CFG manipulation for linearized RTL.  */
5303 struct cfg_hooks rtl_cfg_hooks = {
5304   "rtl",
5305   rtl_verify_flow_info,
5306   rtl_dump_bb,
5307   rtl_dump_bb_for_graph,
5308   rtl_create_basic_block,
5309   rtl_redirect_edge_and_branch,
5310   rtl_redirect_edge_and_branch_force,
5311   rtl_can_remove_branch_p,
5312   rtl_delete_block,
5313   rtl_split_block,
5314   rtl_move_block_after,
5315   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5316   rtl_merge_blocks,
5317   rtl_predict_edge,
5318   rtl_predicted_by_p,
5319   cfg_layout_can_duplicate_bb_p,
5320   rtl_duplicate_bb,
5321   rtl_split_edge,
5322   rtl_make_forwarder_block,
5323   rtl_tidy_fallthru_edge,
5324   rtl_force_nonfallthru,
5325   rtl_block_ends_with_call_p,
5326   rtl_block_ends_with_condjump_p,
5327   rtl_flow_call_edges_add,
5328   NULL, /* execute_on_growing_pred */
5329   NULL, /* execute_on_shrinking_pred */
5330   NULL, /* duplicate loop for trees */
5331   NULL, /* lv_add_condition_to_bb */
5332   NULL, /* lv_adjust_loop_header_phi*/
5333   NULL, /* extract_cond_bb_edges */
5334   NULL, /* flush_pending_stmts */
5335   rtl_block_empty_p, /* block_empty_p */
5336   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5337   rtl_account_profile_record,
5338 };
5339 
5340 /* Implementation of CFG manipulation for cfg layout RTL, where
5341    basic block connected via fallthru edges does not have to be adjacent.
5342    This representation will hopefully become the default one in future
5343    version of the compiler.  */
5344 
5345 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5346   "cfglayout mode",
5347   rtl_verify_flow_info_1,
5348   rtl_dump_bb,
5349   rtl_dump_bb_for_graph,
5350   cfg_layout_create_basic_block,
5351   cfg_layout_redirect_edge_and_branch,
5352   cfg_layout_redirect_edge_and_branch_force,
5353   rtl_can_remove_branch_p,
5354   cfg_layout_delete_block,
5355   cfg_layout_split_block,
5356   rtl_move_block_after,
5357   cfg_layout_can_merge_blocks_p,
5358   cfg_layout_merge_blocks,
5359   rtl_predict_edge,
5360   rtl_predicted_by_p,
5361   cfg_layout_can_duplicate_bb_p,
5362   cfg_layout_duplicate_bb,
5363   cfg_layout_split_edge,
5364   rtl_make_forwarder_block,
5365   NULL, /* tidy_fallthru_edge */
5366   rtl_force_nonfallthru,
5367   rtl_block_ends_with_call_p,
5368   rtl_block_ends_with_condjump_p,
5369   rtl_flow_call_edges_add,
5370   NULL, /* execute_on_growing_pred */
5371   NULL, /* execute_on_shrinking_pred */
5372   duplicate_loop_to_header_edge, /* duplicate loop for trees */
5373   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5374   NULL, /* lv_adjust_loop_header_phi*/
5375   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5376   NULL, /* flush_pending_stmts */
5377   rtl_block_empty_p, /* block_empty_p */
5378   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5379   rtl_account_profile_record,
5380 };
5381 
5382 #include "gt-cfgrtl.h"
5383 
5384 #if __GNUC__ >= 10
5385 #  pragma GCC diagnostic pop
5386 #endif
5387