xref: /netbsd/external/gpl3/gcc/dist/gcc/cfgrtl.cc (revision f0fbc68b)
1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 #include "rtl-iter.h"
65 #include "gimplify.h"
66 #include "profile.h"
67 #include "sreal.h"
68 
69 /* Disable warnings about missing quoting in GCC diagnostics.  */
70 #if __GNUC__ >= 10
71 #  pragma GCC diagnostic push
72 #  pragma GCC diagnostic ignored "-Wformat-diag"
73 #endif
74 
75 /* Holds the interesting leading and trailing notes for the function.
76    Only applicable if the CFG is in cfglayout mode.  */
77 static GTY(()) rtx_insn *cfg_layout_function_footer;
78 static GTY(()) rtx_insn *cfg_layout_function_header;
79 
80 static rtx_insn *skip_insns_after_block (basic_block);
81 static void record_effective_endpoints (void);
82 static void fixup_reorder_chain (void);
83 
84 void verify_insn_chain (void);
85 static void fixup_fallthru_exit_predecessor (void);
86 static int can_delete_note_p (const rtx_note *);
87 static int can_delete_label_p (const rtx_code_label *);
88 static basic_block rtl_split_edge (edge);
89 static bool rtl_move_block_after (basic_block, basic_block);
90 static int rtl_verify_flow_info (void);
91 static basic_block cfg_layout_split_block (basic_block, void *);
92 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
93 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
94 static void cfg_layout_delete_block (basic_block);
95 static void rtl_delete_block (basic_block);
96 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
97 static edge rtl_redirect_edge_and_branch (edge, basic_block);
98 static basic_block rtl_split_block (basic_block, void *);
99 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
100 static int rtl_verify_flow_info_1 (void);
101 static void rtl_make_forwarder_block (edge);
102 static bool rtl_bb_info_initialized_p (basic_block bb);
103 
104 /* Return true if NOTE is not one of the ones that must be kept paired,
105    so that we may simply delete it.  */
106 
107 static int
can_delete_note_p(const rtx_note * note)108 can_delete_note_p (const rtx_note *note)
109 {
110   switch (NOTE_KIND (note))
111     {
112     case NOTE_INSN_DELETED:
113     case NOTE_INSN_BASIC_BLOCK:
114     case NOTE_INSN_EPILOGUE_BEG:
115       return true;
116 
117     default:
118       return false;
119     }
120 }
121 
122 /* True if a given label can be deleted.  */
123 
124 static int
can_delete_label_p(const rtx_code_label * label)125 can_delete_label_p (const rtx_code_label *label)
126 {
127   return (!LABEL_PRESERVE_P (label)
128 	  /* User declared labels must be preserved.  */
129 	  && LABEL_NAME (label) == 0
130 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
131 					     const_cast<rtx_code_label *> (label)));
132 }
133 
134 /* Delete INSN by patching it out.  */
135 
136 void
delete_insn(rtx_insn * insn)137 delete_insn (rtx_insn *insn)
138 {
139   rtx note;
140   bool really_delete = true;
141 
142   if (LABEL_P (insn))
143     {
144       /* Some labels can't be directly removed from the INSN chain, as they
145 	 might be references via variables, constant pool etc.
146 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
147       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
148 	{
149 	  const char *name = LABEL_NAME (insn);
150 	  basic_block bb = BLOCK_FOR_INSN (insn);
151 	  rtx_insn *bb_note = NEXT_INSN (insn);
152 
153 	  really_delete = false;
154 	  PUT_CODE (insn, NOTE);
155 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
156 	  NOTE_DELETED_LABEL_NAME (insn) = name;
157 
158 	  /* If the note following the label starts a basic block, and the
159 	     label is a member of the same basic block, interchange the two.  */
160 	  if (bb_note != NULL_RTX
161 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
162 	      && bb != NULL
163 	      && bb == BLOCK_FOR_INSN (bb_note))
164 	    {
165 	      reorder_insns_nobb (insn, insn, bb_note);
166 	      BB_HEAD (bb) = bb_note;
167 	      if (BB_END (bb) == bb_note)
168 		BB_END (bb) = insn;
169 	    }
170 	}
171 
172       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
173     }
174 
175   if (really_delete)
176     {
177       /* If this insn has already been deleted, something is very wrong.  */
178       gcc_assert (!insn->deleted ());
179       if (INSN_P (insn))
180 	df_insn_delete (insn);
181       remove_insn (insn);
182       insn->set_deleted ();
183     }
184 
185   /* If deleting a jump, decrement the use count of the label.  Deleting
186      the label itself should happen in the normal course of block merging.  */
187   if (JUMP_P (insn))
188     {
189       if (JUMP_LABEL (insn)
190 	  && LABEL_P (JUMP_LABEL (insn)))
191 	LABEL_NUSES (JUMP_LABEL (insn))--;
192 
193       /* If there are more targets, remove them too.  */
194       while ((note
195 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
196 	     && LABEL_P (XEXP (note, 0)))
197 	{
198 	  LABEL_NUSES (XEXP (note, 0))--;
199 	  remove_note (insn, note);
200 	}
201     }
202 
203   /* Also if deleting any insn that references a label as an operand.  */
204   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
205 	 && LABEL_P (XEXP (note, 0)))
206     {
207       LABEL_NUSES (XEXP (note, 0))--;
208       remove_note (insn, note);
209     }
210 
211   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
212     {
213       rtvec vec = table->get_labels ();
214       int len = GET_NUM_ELEM (vec);
215       int i;
216 
217       for (i = 0; i < len; i++)
218 	{
219 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
220 
221 	  /* When deleting code in bulk (e.g. removing many unreachable
222 	     blocks) we can delete a label that's a target of the vector
223 	     before deleting the vector itself.  */
224 	  if (!NOTE_P (label))
225 	    LABEL_NUSES (label)--;
226 	}
227     }
228 }
229 
230 /* Like delete_insn but also purge dead edges from BB.
231    Return true if any edges are eliminated.  */
232 
233 bool
delete_insn_and_edges(rtx_insn * insn)234 delete_insn_and_edges (rtx_insn *insn)
235 {
236   bool purge = false;
237 
238   if (NONDEBUG_INSN_P (insn) && BLOCK_FOR_INSN (insn))
239     {
240       basic_block bb = BLOCK_FOR_INSN (insn);
241       if (BB_END (bb) == insn)
242 	purge = true;
243       else if (DEBUG_INSN_P (BB_END (bb)))
244 	for (rtx_insn *dinsn = NEXT_INSN (insn);
245 	     DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
246 	  if (BB_END (bb) == dinsn)
247 	    {
248 	      purge = true;
249 	      break;
250 	    }
251     }
252   delete_insn (insn);
253   if (purge)
254     return purge_dead_edges (BLOCK_FOR_INSN (insn));
255   return false;
256 }
257 
258 /* Unlink a chain of insns between START and FINISH, leaving notes
259    that must be paired.  If CLEAR_BB is true, we set bb field for
260    insns that cannot be removed to NULL.  */
261 
262 void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)263 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
264 {
265   /* Unchain the insns one by one.  It would be quicker to delete all of these
266      with a single unchaining, rather than one at a time, but we need to keep
267      the NOTE's.  */
268   rtx_insn *current = finish;
269   while (1)
270     {
271       rtx_insn *prev = PREV_INSN (current);
272       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
273 	;
274       else
275 	delete_insn (current);
276 
277       if (clear_bb && !current->deleted ())
278 	set_block_for_insn (current, NULL);
279 
280       if (current == start)
281 	break;
282       current = prev;
283     }
284 }
285 
286 /* Create a new basic block consisting of the instructions between HEAD and END
287    inclusive.  This function is designed to allow fast BB construction - reuses
288    the note and basic block struct in BB_NOTE, if any and do not grow
289    BASIC_BLOCK chain and should be used directly only by CFG construction code.
290    END can be NULL in to create new empty basic block before HEAD.  Both END
291    and HEAD can be NULL to create basic block at the end of INSN chain.
292    AFTER is the basic block we should be put after.  */
293 
294 basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)295 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
296 			      basic_block after)
297 {
298   basic_block bb;
299 
300   if (bb_note
301       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
302       && bb->aux == NULL)
303     {
304       /* If we found an existing note, thread it back onto the chain.  */
305 
306       rtx_insn *after;
307 
308       if (LABEL_P (head))
309 	after = head;
310       else
311 	{
312 	  after = PREV_INSN (head);
313 	  head = bb_note;
314 	}
315 
316       if (after != bb_note && NEXT_INSN (after) != bb_note)
317 	reorder_insns_nobb (bb_note, bb_note, after);
318     }
319   else
320     {
321       /* Otherwise we must create a note and a basic block structure.  */
322 
323       bb = alloc_block ();
324 
325       init_rtl_bb_info (bb);
326       if (!head && !end)
327 	head = end = bb_note
328 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
329       else if (LABEL_P (head) && end)
330 	{
331 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
332 	  if (head == end)
333 	    end = bb_note;
334 	}
335       else
336 	{
337 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
338 	  head = bb_note;
339 	  if (!end)
340 	    end = head;
341 	}
342 
343       NOTE_BASIC_BLOCK (bb_note) = bb;
344     }
345 
346   /* Always include the bb note in the block.  */
347   if (NEXT_INSN (end) == bb_note)
348     end = bb_note;
349 
350   BB_HEAD (bb) = head;
351   BB_END (bb) = end;
352   bb->index = last_basic_block_for_fn (cfun)++;
353   bb->flags = BB_NEW | BB_RTL;
354   link_block (bb, after);
355   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
356   df_bb_refs_record (bb->index, false);
357   update_bb_for_insn (bb);
358   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
359 
360   /* Tag the block so that we know it has been used when considering
361      other basic block notes.  */
362   bb->aux = bb;
363 
364   return bb;
365 }
366 
367 /* Create new basic block consisting of instructions in between HEAD and END
368    and place it to the BB chain after block AFTER.  END can be NULL to
369    create a new empty basic block before HEAD.  Both END and HEAD can be
370    NULL to create basic block at the end of INSN chain.  */
371 
372 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)373 rtl_create_basic_block (void *headp, void *endp, basic_block after)
374 {
375   rtx_insn *head = (rtx_insn *) headp;
376   rtx_insn *end = (rtx_insn *) endp;
377   basic_block bb;
378 
379   /* Grow the basic block array if needed.  */
380   if ((size_t) last_basic_block_for_fn (cfun)
381       >= basic_block_info_for_fn (cfun)->length ())
382     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
383 			   last_basic_block_for_fn (cfun) + 1);
384 
385   n_basic_blocks_for_fn (cfun)++;
386 
387   bb = create_basic_block_structure (head, end, NULL, after);
388   bb->aux = NULL;
389   return bb;
390 }
391 
392 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)393 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
394 {
395   basic_block newbb = rtl_create_basic_block (head, end, after);
396 
397   return newbb;
398 }
399 
400 /* Delete the insns in a (non-live) block.  We physically delete every
401    non-deleted-note insn, and update the flow graph appropriately.
402 
403    Return nonzero if we deleted an exception handler.  */
404 
405 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
406    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
407 
408 static void
rtl_delete_block(basic_block b)409 rtl_delete_block (basic_block b)
410 {
411   rtx_insn *insn, *end;
412 
413   /* If the head of this block is a CODE_LABEL, then it might be the
414      label for an exception handler which can't be reached.  We need
415      to remove the label from the exception_handler_label list.  */
416   insn = BB_HEAD (b);
417 
418   end = get_last_bb_insn (b);
419 
420   /* Selectively delete the entire chain.  */
421   BB_HEAD (b) = NULL;
422   delete_insn_chain (insn, end, true);
423 
424 
425   if (dump_file)
426     fprintf (dump_file, "deleting block %d\n", b->index);
427   df_bb_delete (b->index);
428 }
429 
430 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
431 
432 void
compute_bb_for_insn(void)433 compute_bb_for_insn (void)
434 {
435   basic_block bb;
436 
437   FOR_EACH_BB_FN (bb, cfun)
438     {
439       rtx_insn *end = BB_END (bb);
440       rtx_insn *insn;
441 
442       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
443 	{
444 	  BLOCK_FOR_INSN (insn) = bb;
445 	  if (insn == end)
446 	    break;
447 	}
448     }
449 }
450 
451 /* Release the basic_block_for_insn array.  */
452 
453 unsigned int
free_bb_for_insn(void)454 free_bb_for_insn (void)
455 {
456   rtx_insn *insn;
457   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
458     if (!BARRIER_P (insn))
459       BLOCK_FOR_INSN (insn) = NULL;
460   return 0;
461 }
462 
463 namespace {
464 
465 const pass_data pass_data_free_cfg =
466 {
467   RTL_PASS, /* type */
468   "*free_cfg", /* name */
469   OPTGROUP_NONE, /* optinfo_flags */
470   TV_NONE, /* tv_id */
471   0, /* properties_required */
472   0, /* properties_provided */
473   PROP_cfg, /* properties_destroyed */
474   0, /* todo_flags_start */
475   0, /* todo_flags_finish */
476 };
477 
478 class pass_free_cfg : public rtl_opt_pass
479 {
480 public:
pass_free_cfg(gcc::context * ctxt)481   pass_free_cfg (gcc::context *ctxt)
482     : rtl_opt_pass (pass_data_free_cfg, ctxt)
483   {}
484 
485   /* opt_pass methods: */
486   virtual unsigned int execute (function *);
487 
488 }; // class pass_free_cfg
489 
490 unsigned int
execute(function *)491 pass_free_cfg::execute (function *)
492 {
493   /* The resource.cc machinery uses DF but the CFG isn't guaranteed to be
494      valid at that point so it would be too late to call df_analyze.  */
495   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
496     {
497       df_note_add_problem ();
498       df_analyze ();
499     }
500 
501   if (crtl->has_bb_partition)
502     insert_section_boundary_note ();
503 
504   free_bb_for_insn ();
505   return 0;
506 }
507 
508 } // anon namespace
509 
510 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)511 make_pass_free_cfg (gcc::context *ctxt)
512 {
513   return new pass_free_cfg (ctxt);
514 }
515 
516 /* Return RTX to emit after when we want to emit code on the entry of function.  */
517 rtx_insn *
entry_of_function(void)518 entry_of_function (void)
519 {
520   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
521 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
522 }
523 
524 /* Emit INSN at the entry point of the function, ensuring that it is only
525    executed once per function.  */
526 void
emit_insn_at_entry(rtx insn)527 emit_insn_at_entry (rtx insn)
528 {
529   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
530   edge e = ei_safe_edge (ei);
531   gcc_assert (e->flags & EDGE_FALLTHRU);
532 
533   insert_insn_on_edge (insn, e);
534   commit_edge_insertions ();
535 }
536 
537 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
538    (or BARRIER if found) and notify df of the bb change.
539    The insn chain range is inclusive
540    (i.e. both BEGIN and END will be updated. */
541 
542 static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)543 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
544 {
545   rtx_insn *insn;
546 
547   end = NEXT_INSN (end);
548   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
549     if (!BARRIER_P (insn))
550       df_insn_change_bb (insn, bb);
551 }
552 
553 /* Update BLOCK_FOR_INSN of insns in BB to BB,
554    and notify df of the change.  */
555 
556 void
update_bb_for_insn(basic_block bb)557 update_bb_for_insn (basic_block bb)
558 {
559   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
560 }
561 
562 
563 /* Like active_insn_p, except keep the return value use or clobber around
564    even after reload.  */
565 
566 static bool
flow_active_insn_p(const rtx_insn * insn)567 flow_active_insn_p (const rtx_insn *insn)
568 {
569   if (active_insn_p (insn))
570     return true;
571 
572   /* A clobber of the function return value exists for buggy
573      programs that fail to return a value.  Its effect is to
574      keep the return value from being live across the entire
575      function.  If we allow it to be skipped, we introduce the
576      possibility for register lifetime confusion.
577      Similarly, keep a USE of the function return value, otherwise
578      the USE is dropped and we could fail to thread jump if USE
579      appears on some paths and not on others, see PR90257.  */
580   if ((GET_CODE (PATTERN (insn)) == CLOBBER
581        || GET_CODE (PATTERN (insn)) == USE)
582       && REG_P (XEXP (PATTERN (insn), 0))
583       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
584     return true;
585 
586   return false;
587 }
588 
589 /* Return true if the block has no effect and only forwards control flow to
590    its single destination.  */
591 
592 bool
contains_no_active_insn_p(const_basic_block bb)593 contains_no_active_insn_p (const_basic_block bb)
594 {
595   rtx_insn *insn;
596 
597   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
598       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
599       || !single_succ_p (bb)
600       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
601     return false;
602 
603   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
604     if (INSN_P (insn) && flow_active_insn_p (insn))
605       return false;
606 
607   return (!INSN_P (insn)
608 	  || (JUMP_P (insn) && simplejump_p (insn))
609 	  || !flow_active_insn_p (insn));
610 }
611 
612 /* Likewise, but protect loop latches, headers and preheaders.  */
613 /* FIXME: Make this a cfg hook.  */
614 
615 bool
forwarder_block_p(const_basic_block bb)616 forwarder_block_p (const_basic_block bb)
617 {
618   if (!contains_no_active_insn_p (bb))
619     return false;
620 
621   /* Protect loop latches, headers and preheaders.  */
622   if (current_loops)
623     {
624       basic_block dest;
625       if (bb->loop_father->header == bb)
626 	return false;
627       dest = EDGE_SUCC (bb, 0)->dest;
628       if (dest->loop_father->header == dest)
629 	return false;
630     }
631 
632   return true;
633 }
634 
635 /* Return nonzero if we can reach target from src by falling through.  */
636 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
637 
638 bool
can_fallthru(basic_block src,basic_block target)639 can_fallthru (basic_block src, basic_block target)
640 {
641   rtx_insn *insn = BB_END (src);
642   rtx_insn *insn2;
643   edge e;
644   edge_iterator ei;
645 
646   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
647     return true;
648   if (src->next_bb != target)
649     return false;
650 
651   /* ??? Later we may add code to move jump tables offline.  */
652   if (tablejump_p (insn, NULL, NULL))
653     return false;
654 
655   FOR_EACH_EDGE (e, ei, src->succs)
656     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
657 	&& e->flags & EDGE_FALLTHRU)
658       return false;
659 
660   insn2 = BB_HEAD (target);
661   if (!active_insn_p (insn2))
662     insn2 = next_active_insn (insn2);
663 
664   return next_active_insn (insn) == insn2;
665 }
666 
667 /* Return nonzero if we could reach target from src by falling through,
668    if the target was made adjacent.  If we already have a fall-through
669    edge to the exit block, we can't do that.  */
670 static bool
could_fall_through(basic_block src,basic_block target)671 could_fall_through (basic_block src, basic_block target)
672 {
673   edge e;
674   edge_iterator ei;
675 
676   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
677     return true;
678   FOR_EACH_EDGE (e, ei, src->succs)
679     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
680 	&& e->flags & EDGE_FALLTHRU)
681       return 0;
682   return true;
683 }
684 
685 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
686 rtx_note *
bb_note(basic_block bb)687 bb_note (basic_block bb)
688 {
689   rtx_insn *note;
690 
691   note = BB_HEAD (bb);
692   if (LABEL_P (note))
693     note = NEXT_INSN (note);
694 
695   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
696   return as_a <rtx_note *> (note);
697 }
698 
699 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
700    note associated with the BLOCK.  */
701 
702 static rtx_insn *
first_insn_after_basic_block_note(basic_block block)703 first_insn_after_basic_block_note (basic_block block)
704 {
705   rtx_insn *insn;
706 
707   /* Get the first instruction in the block.  */
708   insn = BB_HEAD (block);
709 
710   if (insn == NULL_RTX)
711     return NULL;
712   if (LABEL_P (insn))
713     insn = NEXT_INSN (insn);
714   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
715 
716   return NEXT_INSN (insn);
717 }
718 
719 /* Creates a new basic block just after basic block BB by splitting
720    everything after specified instruction INSNP.  */
721 
722 static basic_block
rtl_split_block(basic_block bb,void * insnp)723 rtl_split_block (basic_block bb, void *insnp)
724 {
725   basic_block new_bb;
726   rtx_insn *insn = (rtx_insn *) insnp;
727   edge e;
728   edge_iterator ei;
729 
730   if (!insn)
731     {
732       insn = first_insn_after_basic_block_note (bb);
733 
734       if (insn)
735 	{
736 	  rtx_insn *next = insn;
737 
738 	  insn = PREV_INSN (insn);
739 
740 	  /* If the block contains only debug insns, insn would have
741 	     been NULL in a non-debug compilation, and then we'd end
742 	     up emitting a DELETED note.  For -fcompare-debug
743 	     stability, emit the note too.  */
744 	  if (insn != BB_END (bb)
745 	      && DEBUG_INSN_P (next)
746 	      && DEBUG_INSN_P (BB_END (bb)))
747 	    {
748 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
749 		next = NEXT_INSN (next);
750 
751 	      if (next == BB_END (bb))
752 		emit_note_after (NOTE_INSN_DELETED, next);
753 	    }
754 	}
755       else
756 	insn = get_last_insn ();
757     }
758 
759   /* We probably should check type of the insn so that we do not create
760      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
761      bother.  */
762   if (insn == BB_END (bb))
763     emit_note_after (NOTE_INSN_DELETED, insn);
764 
765   /* Create the new basic block.  */
766   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
767   BB_COPY_PARTITION (new_bb, bb);
768   BB_END (bb) = insn;
769 
770   /* Redirect the outgoing edges.  */
771   new_bb->succs = bb->succs;
772   bb->succs = NULL;
773   FOR_EACH_EDGE (e, ei, new_bb->succs)
774     e->src = new_bb;
775 
776   /* The new block starts off being dirty.  */
777   df_set_bb_dirty (bb);
778   return new_bb;
779 }
780 
781 /* Return true if LOC1 and LOC2 are equivalent for
782    unique_locus_on_edge_between_p purposes.  */
783 
784 static bool
loc_equal(location_t loc1,location_t loc2)785 loc_equal (location_t loc1, location_t loc2)
786 {
787   if (loc1 == loc2)
788     return true;
789 
790   expanded_location loce1 = expand_location (loc1);
791   expanded_location loce2 = expand_location (loc2);
792 
793   if (loce1.line != loce2.line
794       || loce1.column != loce2.column
795       || loce1.data != loce2.data)
796     return false;
797   if (loce1.file == loce2.file)
798     return true;
799   return (loce1.file != NULL
800 	  && loce2.file != NULL
801 	  && filename_cmp (loce1.file, loce2.file) == 0);
802 }
803 
804 /* Return true if the single edge between blocks A and B is the only place
805    in RTL which holds some unique locus.  */
806 
807 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)808 unique_locus_on_edge_between_p (basic_block a, basic_block b)
809 {
810   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
811   rtx_insn *insn, *end;
812 
813   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
814     return false;
815 
816   /* First scan block A backward.  */
817   insn = BB_END (a);
818   end = PREV_INSN (BB_HEAD (a));
819   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
820     insn = PREV_INSN (insn);
821 
822   if (insn != end && loc_equal (INSN_LOCATION (insn), goto_locus))
823     return false;
824 
825   /* Then scan block B forward.  */
826   insn = BB_HEAD (b);
827   if (insn)
828     {
829       end = NEXT_INSN (BB_END (b));
830       while (insn != end && !NONDEBUG_INSN_P (insn))
831 	insn = NEXT_INSN (insn);
832 
833       if (insn != end && INSN_HAS_LOCATION (insn)
834 	  && loc_equal (INSN_LOCATION (insn), goto_locus))
835 	return false;
836     }
837 
838   return true;
839 }
840 
841 /* If the single edge between blocks A and B is the only place in RTL which
842    holds some unique locus, emit a nop with that locus between the blocks.  */
843 
844 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)845 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
846 {
847   if (!unique_locus_on_edge_between_p (a, b))
848     return;
849 
850   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
851   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
852 }
853 
854 /* Blocks A and B are to be merged into a single block A.  The insns
855    are already contiguous.  */
856 
857 static void
rtl_merge_blocks(basic_block a,basic_block b)858 rtl_merge_blocks (basic_block a, basic_block b)
859 {
860   /* If B is a forwarder block whose outgoing edge has no location, we'll
861      propagate the locus of the edge between A and B onto it.  */
862   const bool forward_edge_locus
863     = (b->flags & BB_FORWARDER_BLOCK) != 0
864       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
865   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
866   rtx_insn *del_first = NULL, *del_last = NULL;
867   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
868   int b_empty = 0;
869 
870   if (dump_file)
871     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
872 	     a->index);
873 
874   while (DEBUG_INSN_P (b_end))
875     b_end = PREV_INSN (b_debug_start = b_end);
876 
877   /* If there was a CODE_LABEL beginning B, delete it.  */
878   if (LABEL_P (b_head))
879     {
880       /* Detect basic blocks with nothing but a label.  This can happen
881 	 in particular at the end of a function.  */
882       if (b_head == b_end)
883 	b_empty = 1;
884 
885       del_first = del_last = b_head;
886       b_head = NEXT_INSN (b_head);
887     }
888 
889   /* Delete the basic block note and handle blocks containing just that
890      note.  */
891   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
892     {
893       if (b_head == b_end)
894 	b_empty = 1;
895       if (! del_last)
896 	del_first = b_head;
897 
898       del_last = b_head;
899       b_head = NEXT_INSN (b_head);
900     }
901 
902   /* If there was a jump out of A, delete it.  */
903   if (JUMP_P (a_end))
904     {
905       rtx_insn *prev;
906 
907       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
908 	if (!NOTE_P (prev)
909 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
910 	    || prev == BB_HEAD (a))
911 	  break;
912 
913       del_first = a_end;
914 
915       a_end = PREV_INSN (del_first);
916     }
917   else if (BARRIER_P (NEXT_INSN (a_end)))
918     del_first = NEXT_INSN (a_end);
919 
920   /* Delete everything marked above as well as crap that might be
921      hanging out between the two blocks.  */
922   BB_END (a) = a_end;
923   BB_HEAD (b) = b_empty ? NULL : b_head;
924   delete_insn_chain (del_first, del_last, true);
925 
926   /* If not optimizing, preserve the locus of the single edge between
927      blocks A and B if necessary by emitting a nop.  */
928   if (!optimize
929       && !forward_edge_locus
930       && !DECL_IGNORED_P (current_function_decl))
931     {
932       emit_nop_for_unique_locus_between (a, b);
933       a_end = BB_END (a);
934     }
935 
936   /* Reassociate the insns of B with A.  */
937   if (!b_empty)
938     {
939       update_bb_for_insn_chain (a_end, b_debug_end, a);
940 
941       BB_END (a) = b_debug_end;
942       BB_HEAD (b) = NULL;
943     }
944   else if (b_end != b_debug_end)
945     {
946       /* Move any deleted labels and other notes between the end of A
947 	 and the debug insns that make up B after the debug insns,
948 	 bringing the debug insns into A while keeping the notes after
949 	 the end of A.  */
950       if (NEXT_INSN (a_end) != b_debug_start)
951 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
952 			    b_debug_end);
953       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
954       BB_END (a) = b_debug_end;
955     }
956 
957   df_bb_delete (b->index);
958 
959   if (forward_edge_locus)
960     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
961 
962   if (dump_file)
963     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
964 }
965 
966 
967 /* Return true when block A and B can be merged.  */
968 
969 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)970 rtl_can_merge_blocks (basic_block a, basic_block b)
971 {
972   /* If we are partitioning hot/cold basic blocks, we don't want to
973      mess up unconditional or indirect jumps that cross between hot
974      and cold sections.
975 
976      Basic block partitioning may result in some jumps that appear to
977      be optimizable (or blocks that appear to be mergeable), but which really
978      must be left untouched (they are required to make it safely across
979      partition boundaries).  See  the comments at the top of
980      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
981 
982   if (BB_PARTITION (a) != BB_PARTITION (b))
983     return false;
984 
985   /* Protect the loop latches.  */
986   if (current_loops && b->loop_father->latch == b)
987     return false;
988 
989   /* There must be exactly one edge in between the blocks.  */
990   return (single_succ_p (a)
991 	  && single_succ (a) == b
992 	  && single_pred_p (b)
993 	  && a != b
994 	  /* Must be simple edge.  */
995 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
996 	  && a->next_bb == b
997 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
998 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
999 	  /* If the jump insn has side effects,
1000 	     we can't kill the edge.  */
1001 	  && (!JUMP_P (BB_END (a))
1002 	      || (reload_completed
1003 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
1004 }
1005 
1006 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
1007    exist.  */
1008 
1009 rtx_code_label *
block_label(basic_block block)1010 block_label (basic_block block)
1011 {
1012   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1013     return NULL;
1014 
1015   if (!LABEL_P (BB_HEAD (block)))
1016     {
1017       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1018     }
1019 
1020   return as_a <rtx_code_label *> (BB_HEAD (block));
1021 }
1022 
1023 /* Remove all barriers from BB_FOOTER of a BB.  */
1024 
1025 static void
remove_barriers_from_footer(basic_block bb)1026 remove_barriers_from_footer (basic_block bb)
1027 {
1028   rtx_insn *insn = BB_FOOTER (bb);
1029 
1030   /* Remove barriers but keep jumptables.  */
1031   while (insn)
1032     {
1033       if (BARRIER_P (insn))
1034 	{
1035 	  if (PREV_INSN (insn))
1036 	    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1037 	  else
1038 	    BB_FOOTER (bb) = NEXT_INSN (insn);
1039 	  if (NEXT_INSN (insn))
1040 	    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1041 	}
1042       if (LABEL_P (insn))
1043 	return;
1044       insn = NEXT_INSN (insn);
1045     }
1046 }
1047 
1048 /* Attempt to perform edge redirection by replacing possibly complex jump
1049    instruction by unconditional jump or removing jump completely.  This can
1050    apply only if all edges now point to the same block.  The parameters and
1051    return values are equivalent to redirect_edge_and_branch.  */
1052 
1053 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)1054 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1055 {
1056   basic_block src = e->src;
1057   rtx_insn *insn = BB_END (src);
1058   rtx set;
1059   int fallthru = 0;
1060 
1061   /* If we are partitioning hot/cold basic blocks, we don't want to
1062      mess up unconditional or indirect jumps that cross between hot
1063      and cold sections.
1064 
1065      Basic block partitioning may result in some jumps that appear to
1066      be optimizable (or blocks that appear to be mergeable), but which really
1067      must be left untouched (they are required to make it safely across
1068      partition boundaries).  See  the comments at the top of
1069      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
1070 
1071   if (BB_PARTITION (src) != BB_PARTITION (target))
1072     return NULL;
1073 
1074   /* We can replace or remove a complex jump only when we have exactly
1075      two edges.  Also, if we have exactly one outgoing edge, we can
1076      redirect that.  */
1077   if (EDGE_COUNT (src->succs) >= 3
1078       /* Verify that all targets will be TARGET.  Specifically, the
1079 	 edge that is not E must also go to TARGET.  */
1080       || (EDGE_COUNT (src->succs) == 2
1081 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1082     return NULL;
1083 
1084   if (!onlyjump_p (insn))
1085     return NULL;
1086   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1087     return NULL;
1088 
1089   /* Avoid removing branch with side effects.  */
1090   set = single_set (insn);
1091   if (!set || side_effects_p (set))
1092     return NULL;
1093 
1094   /* See if we can create the fallthru edge.  */
1095   if (in_cfglayout || can_fallthru (src, target))
1096     {
1097       if (dump_file)
1098 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1099       fallthru = 1;
1100 
1101       /* Selectively unlink whole insn chain.  */
1102       if (in_cfglayout)
1103 	{
1104 	  delete_insn_chain (insn, BB_END (src), false);
1105 	  remove_barriers_from_footer (src);
1106 	}
1107       else
1108 	delete_insn_chain (insn, PREV_INSN (BB_HEAD (target)), false);
1109     }
1110 
1111   /* If this already is simplejump, redirect it.  */
1112   else if (simplejump_p (insn))
1113     {
1114       if (e->dest == target)
1115 	return NULL;
1116       if (dump_file)
1117 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1118 		 INSN_UID (insn), e->dest->index, target->index);
1119       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1120 			  block_label (target), 0))
1121 	{
1122 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1123 	  return NULL;
1124 	}
1125     }
1126 
1127   /* Cannot do anything for target exit block.  */
1128   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1129     return NULL;
1130 
1131   /* Or replace possibly complicated jump insn by simple jump insn.  */
1132   else
1133     {
1134       rtx_code_label *target_label = block_label (target);
1135       rtx_insn *barrier;
1136       rtx_insn *label;
1137       rtx_jump_table_data *table;
1138 
1139       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1140       JUMP_LABEL (BB_END (src)) = target_label;
1141       LABEL_NUSES (target_label)++;
1142       if (dump_file)
1143 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1144 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1145 
1146 
1147       delete_insn_chain (insn, insn, false);
1148 
1149       /* Recognize a tablejump that we are converting to a
1150 	 simple jump and remove its associated CODE_LABEL
1151 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1152       if (tablejump_p (insn, &label, &table))
1153 	delete_insn_chain (label, table, false);
1154 
1155       barrier = next_nonnote_nondebug_insn (BB_END (src));
1156       if (!barrier || !BARRIER_P (barrier))
1157 	emit_barrier_after (BB_END (src));
1158       else
1159 	{
1160 	  if (barrier != NEXT_INSN (BB_END (src)))
1161 	    {
1162 	      /* Move the jump before barrier so that the notes
1163 		 which originally were or were created before jump table are
1164 		 inside the basic block.  */
1165 	      rtx_insn *new_insn = BB_END (src);
1166 
1167 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1168 				        PREV_INSN (barrier), src);
1169 
1170 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1171 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1172 
1173 	      SET_NEXT_INSN (new_insn) = barrier;
1174 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1175 
1176 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1177 	      SET_PREV_INSN (barrier) = new_insn;
1178 	    }
1179 	}
1180     }
1181 
1182   /* Keep only one edge out and set proper flags.  */
1183   if (!single_succ_p (src))
1184     remove_edge (e);
1185   gcc_assert (single_succ_p (src));
1186 
1187   e = single_succ_edge (src);
1188   if (fallthru)
1189     e->flags = EDGE_FALLTHRU;
1190   else
1191     e->flags = 0;
1192 
1193   e->probability = profile_probability::always ();
1194 
1195   if (e->dest != target)
1196     redirect_edge_succ (e, target);
1197   return e;
1198 }
1199 
1200 /* Subroutine of redirect_branch_edge that tries to patch the jump
1201    instruction INSN so that it reaches block NEW.  Do this
1202    only when it originally reached block OLD.  Return true if this
1203    worked or the original target wasn't OLD, return false if redirection
1204    doesn't work.  */
1205 
1206 static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)1207 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1208 {
1209   rtx_jump_table_data *table;
1210   rtx tmp;
1211   /* Recognize a tablejump and adjust all matching cases.  */
1212   if (tablejump_p (insn, NULL, &table))
1213     {
1214       rtvec vec;
1215       int j;
1216       rtx_code_label *new_label = block_label (new_bb);
1217 
1218       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1219 	return false;
1220       vec = table->get_labels ();
1221 
1222       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1223 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1224 	  {
1225 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1226 	    --LABEL_NUSES (old_label);
1227 	    ++LABEL_NUSES (new_label);
1228 	  }
1229 
1230       /* Handle casesi dispatch insns.  */
1231       if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1232 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1233 	{
1234 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1235 						       new_label);
1236 	  --LABEL_NUSES (old_label);
1237 	  ++LABEL_NUSES (new_label);
1238 	}
1239     }
1240   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1241     {
1242       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1243       rtx note;
1244 
1245       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1246 	return false;
1247       rtx_code_label *new_label = block_label (new_bb);
1248 
1249       for (i = 0; i < n; ++i)
1250 	{
1251 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1252 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1253 	  if (XEXP (old_ref, 0) == old_label)
1254 	    {
1255 	      ASM_OPERANDS_LABEL (tmp, i)
1256 		= gen_rtx_LABEL_REF (Pmode, new_label);
1257 	      --LABEL_NUSES (old_label);
1258 	      ++LABEL_NUSES (new_label);
1259 	    }
1260 	}
1261 
1262       if (JUMP_LABEL (insn) == old_label)
1263 	{
1264 	  JUMP_LABEL (insn) = new_label;
1265 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1266 	  if (note)
1267 	    remove_note (insn, note);
1268 	}
1269       else
1270 	{
1271 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1272 	  if (note)
1273 	    remove_note (insn, note);
1274 	  if (JUMP_LABEL (insn) != new_label
1275 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1276 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1277 	}
1278       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1279 	     != NULL_RTX)
1280 	XEXP (note, 0) = new_label;
1281     }
1282   else
1283     {
1284       /* ?? We may play the games with moving the named labels from
1285 	 one basic block to the other in case only one computed_jump is
1286 	 available.  */
1287       if (computed_jump_p (insn)
1288 	  /* A return instruction can't be redirected.  */
1289 	  || returnjump_p (insn))
1290 	return false;
1291 
1292       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1293 	{
1294 	  /* If the insn doesn't go where we think, we're confused.  */
1295 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1296 
1297 	  /* If the substitution doesn't succeed, die.  This can happen
1298 	     if the back end emitted unrecognizable instructions or if
1299 	     target is exit block on some arches.  Or for crossing
1300 	     jumps.  */
1301 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1302 			      block_label (new_bb), 0))
1303 	    {
1304 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1305 			  || CROSSING_JUMP_P (insn));
1306 	      return false;
1307 	    }
1308 	}
1309     }
1310   return true;
1311 }
1312 
1313 
1314 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1315    NULL on failure  */
1316 static edge
redirect_branch_edge(edge e,basic_block target)1317 redirect_branch_edge (edge e, basic_block target)
1318 {
1319   rtx_insn *old_label = BB_HEAD (e->dest);
1320   basic_block src = e->src;
1321   rtx_insn *insn = BB_END (src);
1322 
1323   /* We can only redirect non-fallthru edges of jump insn.  */
1324   if (e->flags & EDGE_FALLTHRU)
1325     return NULL;
1326   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1327     return NULL;
1328 
1329   if (!currently_expanding_to_rtl)
1330     {
1331       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1332 	return NULL;
1333     }
1334   else
1335     /* When expanding this BB might actually contain multiple
1336        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1337        Redirect all of those that match our label.  */
1338     FOR_BB_INSNS (src, insn)
1339       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1340 					     old_label, target))
1341 	return NULL;
1342 
1343   if (dump_file)
1344     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1345 	     e->src->index, e->dest->index, target->index);
1346 
1347   if (e->dest != target)
1348     e = redirect_edge_succ_nodup (e, target);
1349 
1350   return e;
1351 }
1352 
1353 /* Called when edge E has been redirected to a new destination,
1354    in order to update the region crossing flag on the edge and
1355    jump.  */
1356 
1357 static void
fixup_partition_crossing(edge e)1358 fixup_partition_crossing (edge e)
1359 {
1360   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1361       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1362     return;
1363   /* If we redirected an existing edge, it may already be marked
1364      crossing, even though the new src is missing a reg crossing note.
1365      But make sure reg crossing note doesn't already exist before
1366      inserting.  */
1367   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1368     {
1369       e->flags |= EDGE_CROSSING;
1370       if (JUMP_P (BB_END (e->src)))
1371 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1372     }
1373   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1374     {
1375       e->flags &= ~EDGE_CROSSING;
1376       /* Remove the section crossing note from jump at end of
1377          src if it exists, and if no other successors are
1378          still crossing.  */
1379       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1380         {
1381           bool has_crossing_succ = false;
1382           edge e2;
1383           edge_iterator ei;
1384           FOR_EACH_EDGE (e2, ei, e->src->succs)
1385             {
1386               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1387               if (has_crossing_succ)
1388                 break;
1389             }
1390           if (!has_crossing_succ)
1391 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1392         }
1393     }
1394 }
1395 
1396 /* Called when block BB has been reassigned to the cold partition,
1397    because it is now dominated by another cold block,
1398    to ensure that the region crossing attributes are updated.  */
1399 
1400 static void
fixup_new_cold_bb(basic_block bb)1401 fixup_new_cold_bb (basic_block bb)
1402 {
1403   edge e;
1404   edge_iterator ei;
1405 
1406   /* This is called when a hot bb is found to now be dominated
1407      by a cold bb and therefore needs to become cold. Therefore,
1408      its preds will no longer be region crossing. Any non-dominating
1409      preds that were previously hot would also have become cold
1410      in the caller for the same region. Any preds that were previously
1411      region-crossing will be adjusted in fixup_partition_crossing.  */
1412   FOR_EACH_EDGE (e, ei, bb->preds)
1413     {
1414       fixup_partition_crossing (e);
1415     }
1416 
1417   /* Possibly need to make bb's successor edges region crossing,
1418      or remove stale region crossing.  */
1419   FOR_EACH_EDGE (e, ei, bb->succs)
1420     {
1421       /* We can't have fall-through edges across partition boundaries.
1422          Note that force_nonfallthru will do any necessary partition
1423          boundary fixup by calling fixup_partition_crossing itself.  */
1424       if ((e->flags & EDGE_FALLTHRU)
1425           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1426 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1427         force_nonfallthru (e);
1428       else
1429         fixup_partition_crossing (e);
1430     }
1431 }
1432 
1433 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1434    expense of adding new instructions or reordering basic blocks.
1435 
1436    Function can be also called with edge destination equivalent to the TARGET.
1437    Then it should try the simplifications and do nothing if none is possible.
1438 
1439    Return edge representing the branch if transformation succeeded.  Return NULL
1440    on failure.
1441    We still return NULL in case E already destinated TARGET and we didn't
1442    managed to simplify instruction stream.  */
1443 
1444 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1445 rtl_redirect_edge_and_branch (edge e, basic_block target)
1446 {
1447   edge ret;
1448   basic_block src = e->src;
1449   basic_block dest = e->dest;
1450 
1451   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1452     return NULL;
1453 
1454   if (dest == target)
1455     return e;
1456 
1457   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1458     {
1459       df_set_bb_dirty (src);
1460       fixup_partition_crossing (ret);
1461       return ret;
1462     }
1463 
1464   ret = redirect_branch_edge (e, target);
1465   if (!ret)
1466     return NULL;
1467 
1468   df_set_bb_dirty (src);
1469   fixup_partition_crossing (ret);
1470   return ret;
1471 }
1472 
1473 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1474 
1475 void
emit_barrier_after_bb(basic_block bb)1476 emit_barrier_after_bb (basic_block bb)
1477 {
1478   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1479   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1480               || current_ir_type () == IR_RTL_CFGLAYOUT);
1481   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1482     {
1483       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1484 
1485       if (BB_FOOTER (bb))
1486 	{
1487           rtx_insn *footer_tail = BB_FOOTER (bb);
1488 
1489           while (NEXT_INSN (footer_tail))
1490             footer_tail = NEXT_INSN (footer_tail);
1491           if (!BARRIER_P (footer_tail))
1492             {
1493               SET_NEXT_INSN (footer_tail) = insn;
1494               SET_PREV_INSN (insn) = footer_tail;
1495             }
1496 	}
1497       else
1498         BB_FOOTER (bb) = insn;
1499     }
1500 }
1501 
1502 /* Like force_nonfallthru below, but additionally performs redirection
1503    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1504    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1505    simple_return_rtx, indicating which kind of returnjump to create.
1506    It should be NULL otherwise.  */
1507 
1508 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1509 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1510 {
1511   basic_block jump_block, new_bb = NULL, src = e->src;
1512   rtx note;
1513   edge new_edge;
1514   int abnormal_edge_flags = 0;
1515   bool asm_goto_edge = false;
1516   int loc;
1517 
1518   /* In the case the last instruction is conditional jump to the next
1519      instruction, first redirect the jump itself and then continue
1520      by creating a basic block afterwards to redirect fallthru edge.  */
1521   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1522       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1523       && any_condjump_p (BB_END (e->src))
1524       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1525     {
1526       rtx note;
1527       edge b = unchecked_make_edge (e->src, target, 0);
1528       bool redirected;
1529 
1530       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1531 				  block_label (target), 0);
1532       gcc_assert (redirected);
1533 
1534       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1535       if (note)
1536 	{
1537 	  int prob = XINT (note, 0);
1538 
1539 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
1540 	  e->probability -= e->probability;
1541 	}
1542     }
1543 
1544   if (e->flags & EDGE_ABNORMAL)
1545     {
1546       /* Irritating special case - fallthru edge to the same block as abnormal
1547 	 edge.
1548 	 We can't redirect abnormal edge, but we still can split the fallthru
1549 	 one and create separate abnormal edge to original destination.
1550 	 This allows bb-reorder to make such edge non-fallthru.  */
1551       gcc_assert (e->dest == target);
1552       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1553       e->flags &= EDGE_FALLTHRU;
1554     }
1555   else
1556     {
1557       gcc_assert (e->flags & EDGE_FALLTHRU);
1558       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1559 	{
1560 	  /* We can't redirect the entry block.  Create an empty block
1561 	     at the start of the function which we use to add the new
1562 	     jump.  */
1563 	  edge tmp;
1564 	  edge_iterator ei;
1565 	  bool found = false;
1566 
1567 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1568 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1569 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1570 
1571 	  /* Make sure new block ends up in correct hot/cold section.  */
1572 	  BB_COPY_PARTITION (bb, e->dest);
1573 
1574 	  /* Change the existing edge's source to be the new block, and add
1575 	     a new edge from the entry block to the new block.  */
1576 	  e->src = bb;
1577 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1578 	       (tmp = ei_safe_edge (ei)); )
1579 	    {
1580 	      if (tmp == e)
1581 		{
1582 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1583 		  found = true;
1584 		  break;
1585 		}
1586 	      else
1587 		ei_next (&ei);
1588 	    }
1589 
1590 	  gcc_assert (found);
1591 
1592 	  vec_safe_push (bb->succs, e);
1593 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1594 				 EDGE_FALLTHRU);
1595 	}
1596     }
1597 
1598   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1599      don't point to the target or fallthru label.  */
1600   if (JUMP_P (BB_END (e->src))
1601       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1602       && (e->flags & EDGE_FALLTHRU)
1603       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1604     {
1605       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1606       bool adjust_jump_target = false;
1607 
1608       for (i = 0; i < n; ++i)
1609 	{
1610 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1611 	    {
1612 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1613 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1614 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1615 	      adjust_jump_target = true;
1616 	    }
1617 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1618 	    asm_goto_edge = true;
1619 	}
1620       if (adjust_jump_target)
1621 	{
1622 	  rtx_insn *insn = BB_END (e->src);
1623 	  rtx note;
1624 	  rtx_insn *old_label = BB_HEAD (e->dest);
1625 	  rtx_insn *new_label = BB_HEAD (target);
1626 
1627 	  if (JUMP_LABEL (insn) == old_label)
1628 	    {
1629 	      JUMP_LABEL (insn) = new_label;
1630 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1631 	      if (note)
1632 		remove_note (insn, note);
1633 	    }
1634 	  else
1635 	    {
1636 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1637 	      if (note)
1638 		remove_note (insn, note);
1639 	      if (JUMP_LABEL (insn) != new_label
1640 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1641 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1642 	    }
1643 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1644 		 != NULL_RTX)
1645 	    XEXP (note, 0) = new_label;
1646 	}
1647     }
1648 
1649   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1650     {
1651       rtx_insn *new_head;
1652       profile_count count = e->count ();
1653       profile_probability probability = e->probability;
1654       /* Create the new structures.  */
1655 
1656       /* If the old block ended with a tablejump, skip its table
1657 	 by searching forward from there.  Otherwise start searching
1658 	 forward from the last instruction of the old block.  */
1659       rtx_jump_table_data *table;
1660       if (tablejump_p (BB_END (e->src), NULL, &table))
1661 	new_head = table;
1662       else
1663 	new_head = BB_END (e->src);
1664       new_head = NEXT_INSN (new_head);
1665 
1666       jump_block = create_basic_block (new_head, NULL, e->src);
1667       jump_block->count = count;
1668 
1669       /* Make sure new block ends up in correct hot/cold section.  */
1670 
1671       BB_COPY_PARTITION (jump_block, e->src);
1672 
1673       /* Wire edge in.  */
1674       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1675       new_edge->probability = probability;
1676 
1677       /* Redirect old edge.  */
1678       redirect_edge_pred (e, jump_block);
1679       e->probability = profile_probability::always ();
1680 
1681       /* If e->src was previously region crossing, it no longer is
1682          and the reg crossing note should be removed.  */
1683       fixup_partition_crossing (new_edge);
1684 
1685       /* If asm goto has any label refs to target's label,
1686 	 add also edge from asm goto bb to target.  */
1687       if (asm_goto_edge)
1688 	{
1689 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1690 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1691 	  edge new_edge2 = make_edge (new_edge->src, target,
1692 				      e->flags & ~EDGE_FALLTHRU);
1693 	  new_edge2->probability = probability - new_edge->probability;
1694 	}
1695 
1696       new_bb = jump_block;
1697     }
1698   else
1699     jump_block = e->src;
1700 
1701   loc = e->goto_locus;
1702   e->flags &= ~EDGE_FALLTHRU;
1703   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1704     {
1705       if (jump_label == ret_rtx)
1706 	emit_jump_insn_after_setloc (targetm.gen_return (),
1707 				     BB_END (jump_block), loc);
1708       else
1709 	{
1710 	  gcc_assert (jump_label == simple_return_rtx);
1711 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1712 				       BB_END (jump_block), loc);
1713 	}
1714       set_return_jump_label (BB_END (jump_block));
1715     }
1716   else
1717     {
1718       rtx_code_label *label = block_label (target);
1719       emit_jump_insn_after_setloc (targetm.gen_jump (label),
1720 				   BB_END (jump_block), loc);
1721       JUMP_LABEL (BB_END (jump_block)) = label;
1722       LABEL_NUSES (label)++;
1723     }
1724 
1725   /* We might be in cfg layout mode, and if so, the following routine will
1726      insert the barrier correctly.  */
1727   emit_barrier_after_bb (jump_block);
1728   redirect_edge_succ_nodup (e, target);
1729 
1730   if (abnormal_edge_flags)
1731     make_edge (src, target, abnormal_edge_flags);
1732 
1733   df_mark_solutions_dirty ();
1734   fixup_partition_crossing (e);
1735   return new_bb;
1736 }
1737 
1738 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1739    (and possibly create new basic block) to make edge non-fallthru.
1740    Return newly created BB or NULL if none.  */
1741 
1742 static basic_block
rtl_force_nonfallthru(edge e)1743 rtl_force_nonfallthru (edge e)
1744 {
1745   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1746 }
1747 
1748 /* Redirect edge even at the expense of creating new jump insn or
1749    basic block.  Return new basic block if created, NULL otherwise.
1750    Conversion must be possible.  */
1751 
1752 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1753 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1754 {
1755   if (redirect_edge_and_branch (e, target)
1756       || e->dest == target)
1757     return NULL;
1758 
1759   /* In case the edge redirection failed, try to force it to be non-fallthru
1760      and redirect newly created simplejump.  */
1761   df_set_bb_dirty (e->src);
1762   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1763 }
1764 
1765 /* The given edge should potentially be a fallthru edge.  If that is in
1766    fact true, delete the jump and barriers that are in the way.  */
1767 
1768 static void
rtl_tidy_fallthru_edge(edge e)1769 rtl_tidy_fallthru_edge (edge e)
1770 {
1771   rtx_insn *q;
1772   basic_block b = e->src, c = b->next_bb;
1773 
1774   /* ??? In a late-running flow pass, other folks may have deleted basic
1775      blocks by nopping out blocks, leaving multiple BARRIERs between here
1776      and the target label. They ought to be chastised and fixed.
1777 
1778      We can also wind up with a sequence of undeletable labels between
1779      one block and the next.
1780 
1781      So search through a sequence of barriers, labels, and notes for
1782      the head of block C and assert that we really do fall through.  */
1783 
1784   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1785     if (NONDEBUG_INSN_P (q))
1786       return;
1787 
1788   /* Remove what will soon cease being the jump insn from the source block.
1789      If block B consisted only of this single jump, turn it into a deleted
1790      note.  */
1791   q = BB_END (b);
1792   if (JUMP_P (q)
1793       && onlyjump_p (q)
1794       && (any_uncondjump_p (q)
1795 	  || single_succ_p (b)))
1796     {
1797       rtx_insn *label;
1798       rtx_jump_table_data *table;
1799 
1800       if (tablejump_p (q, &label, &table))
1801 	{
1802 	  /* The label is likely mentioned in some instruction before
1803 	     the tablejump and might not be DCEd, so turn it into
1804 	     a note instead and move before the tablejump that is going to
1805 	     be deleted.  */
1806 	  const char *name = LABEL_NAME (label);
1807 	  PUT_CODE (label, NOTE);
1808 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1809 	  NOTE_DELETED_LABEL_NAME (label) = name;
1810 	  reorder_insns (label, label, PREV_INSN (q));
1811 	  delete_insn (table);
1812 	}
1813 
1814       q = PREV_INSN (q);
1815     }
1816   /* Unconditional jumps with side-effects (i.e. which we can't just delete
1817      together with the barrier) should never have a fallthru edge.  */
1818   else if (JUMP_P (q) && any_uncondjump_p (q))
1819     return;
1820 
1821   /* Selectively unlink the sequence.  */
1822   if (q != PREV_INSN (BB_HEAD (c)))
1823     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1824 
1825   e->flags |= EDGE_FALLTHRU;
1826 }
1827 
1828 /* Should move basic block BB after basic block AFTER.  NIY.  */
1829 
1830 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1831 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1832 		      basic_block after ATTRIBUTE_UNUSED)
1833 {
1834   return false;
1835 }
1836 
1837 /* Locate the last bb in the same partition as START_BB.  */
1838 
1839 static basic_block
last_bb_in_partition(basic_block start_bb)1840 last_bb_in_partition (basic_block start_bb)
1841 {
1842   basic_block bb;
1843   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1844     {
1845       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1846         return bb;
1847     }
1848   /* Return bb before the exit block.  */
1849   return bb->prev_bb;
1850 }
1851 
1852 /* Split a (typically critical) edge.  Return the new block.
1853    The edge must not be abnormal.
1854 
1855    ??? The code generally expects to be called on critical edges.
1856    The case of a block ending in an unconditional jump to a
1857    block with multiple predecessors is not handled optimally.  */
1858 
1859 static basic_block
rtl_split_edge(edge edge_in)1860 rtl_split_edge (edge edge_in)
1861 {
1862   basic_block bb, new_bb;
1863   rtx_insn *before;
1864 
1865   /* Abnormal edges cannot be split.  */
1866   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1867 
1868   /* We are going to place the new block in front of edge destination.
1869      Avoid existence of fallthru predecessors.  */
1870   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1871     {
1872       edge e = find_fallthru_edge (edge_in->dest->preds);
1873 
1874       if (e)
1875 	force_nonfallthru (e);
1876     }
1877 
1878   /* Create the basic block note.  */
1879   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1880     before = BB_HEAD (edge_in->dest);
1881   else
1882     before = NULL;
1883 
1884   /* If this is a fall through edge to the exit block, the blocks might be
1885      not adjacent, and the right place is after the source.  */
1886   if ((edge_in->flags & EDGE_FALLTHRU)
1887       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1888     {
1889       before = NEXT_INSN (BB_END (edge_in->src));
1890       bb = create_basic_block (before, NULL, edge_in->src);
1891       BB_COPY_PARTITION (bb, edge_in->src);
1892     }
1893   else
1894     {
1895       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1896         {
1897           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1898           BB_COPY_PARTITION (bb, edge_in->dest);
1899         }
1900       else
1901         {
1902           basic_block after = edge_in->dest->prev_bb;
1903           /* If this is post-bb reordering, and the edge crosses a partition
1904              boundary, the new block needs to be inserted in the bb chain
1905              at the end of the src partition (since we put the new bb into
1906              that partition, see below). Otherwise we may end up creating
1907              an extra partition crossing in the chain, which is illegal.
1908              It can't go after the src, because src may have a fall-through
1909              to a different block.  */
1910           if (crtl->bb_reorder_complete
1911               && (edge_in->flags & EDGE_CROSSING))
1912             {
1913               after = last_bb_in_partition (edge_in->src);
1914               before = get_last_bb_insn (after);
1915               /* The instruction following the last bb in partition should
1916                  be a barrier, since it cannot end in a fall-through.  */
1917               gcc_checking_assert (BARRIER_P (before));
1918               before = NEXT_INSN (before);
1919             }
1920           bb = create_basic_block (before, NULL, after);
1921           /* Put the split bb into the src partition, to avoid creating
1922              a situation where a cold bb dominates a hot bb, in the case
1923              where src is cold and dest is hot. The src will dominate
1924              the new bb (whereas it might not have dominated dest).  */
1925           BB_COPY_PARTITION (bb, edge_in->src);
1926         }
1927     }
1928 
1929   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1930 
1931   /* Can't allow a region crossing edge to be fallthrough.  */
1932   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1933       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1934     {
1935       new_bb = force_nonfallthru (single_succ_edge (bb));
1936       gcc_assert (!new_bb);
1937     }
1938 
1939   /* For non-fallthru edges, we must adjust the predecessor's
1940      jump instruction to target our new block.  */
1941   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1942     {
1943       edge redirected = redirect_edge_and_branch (edge_in, bb);
1944       gcc_assert (redirected);
1945     }
1946   else
1947     {
1948       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1949 	{
1950 	  /* For asm goto even splitting of fallthru edge might
1951 	     need insn patching, as other labels might point to the
1952 	     old label.  */
1953 	  rtx_insn *last = BB_END (edge_in->src);
1954 	  if (last
1955 	      && JUMP_P (last)
1956 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1957 	      && (extract_asm_operands (PATTERN (last))
1958 		  || JUMP_LABEL (last) == before)
1959 	      && patch_jump_insn (last, before, bb))
1960 	    df_set_bb_dirty (edge_in->src);
1961 	}
1962       redirect_edge_succ (edge_in, bb);
1963     }
1964 
1965   return bb;
1966 }
1967 
1968 /* Queue instructions for insertion on an edge between two basic blocks.
1969    The new instructions and basic blocks (if any) will not appear in the
1970    CFG until commit_edge_insertions is called.  */
1971 
1972 void
insert_insn_on_edge(rtx pattern,edge e)1973 insert_insn_on_edge (rtx pattern, edge e)
1974 {
1975   /* We cannot insert instructions on an abnormal critical edge.
1976      It will be easier to find the culprit if we die now.  */
1977   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1978 
1979   if (e->insns.r == NULL_RTX)
1980     start_sequence ();
1981   else
1982     push_to_sequence (e->insns.r);
1983 
1984   emit_insn (pattern);
1985 
1986   e->insns.r = get_insns ();
1987   end_sequence ();
1988 }
1989 
1990 /* Update the CFG for the instructions queued on edge E.  */
1991 
1992 void
commit_one_edge_insertion(edge e)1993 commit_one_edge_insertion (edge e)
1994 {
1995   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
1996   basic_block bb;
1997 
1998   /* Pull the insns off the edge now since the edge might go away.  */
1999   insns = e->insns.r;
2000   e->insns.r = NULL;
2001 
2002   /* Figure out where to put these insns.  If the destination has
2003      one predecessor, insert there.  Except for the exit block.  */
2004   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2005     {
2006       bb = e->dest;
2007 
2008       /* Get the location correct wrt a code label, and "nice" wrt
2009 	 a basic block note, and before everything else.  */
2010       tmp = BB_HEAD (bb);
2011       if (LABEL_P (tmp))
2012 	tmp = NEXT_INSN (tmp);
2013       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2014 	tmp = NEXT_INSN (tmp);
2015       if (tmp == BB_HEAD (bb))
2016 	before = tmp;
2017       else if (tmp)
2018 	after = PREV_INSN (tmp);
2019       else
2020 	after = get_last_insn ();
2021     }
2022 
2023   /* If the source has one successor and the edge is not abnormal,
2024      insert there.  Except for the entry block.
2025      Don't do this if the predecessor ends in a jump other than
2026      unconditional simple jump.  E.g. for asm goto that points all
2027      its labels at the fallthru basic block, we can't insert instructions
2028      before the asm goto, as the asm goto can have various of side effects,
2029      and can't emit instructions after the asm goto, as it must end
2030      the basic block.  */
2031   else if ((e->flags & EDGE_ABNORMAL) == 0
2032 	   && single_succ_p (e->src)
2033 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2034 	   && (!JUMP_P (BB_END (e->src))
2035 	       || simplejump_p (BB_END (e->src))))
2036     {
2037       bb = e->src;
2038 
2039       /* It is possible to have a non-simple jump here.  Consider a target
2040 	 where some forms of unconditional jumps clobber a register.  This
2041 	 happens on the fr30 for example.
2042 
2043 	 We know this block has a single successor, so we can just emit
2044 	 the queued insns before the jump.  */
2045       if (JUMP_P (BB_END (bb)))
2046 	before = BB_END (bb);
2047       else
2048 	{
2049 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2050 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2051 
2052 	  after = BB_END (bb);
2053 	}
2054     }
2055 
2056   /* Otherwise we must split the edge.  */
2057   else
2058     {
2059       bb = split_edge (e);
2060 
2061       /* If E crossed a partition boundary, we needed to make bb end in
2062          a region-crossing jump, even though it was originally fallthru.  */
2063       if (JUMP_P (BB_END (bb)))
2064 	before = BB_END (bb);
2065       else
2066         after = BB_END (bb);
2067     }
2068 
2069   /* Now that we've found the spot, do the insertion.  */
2070   if (before)
2071     {
2072       emit_insn_before_noloc (insns, before, bb);
2073       last = prev_nonnote_insn (before);
2074     }
2075   else
2076     last = emit_insn_after_noloc (insns, after, bb);
2077 
2078   if (returnjump_p (last))
2079     {
2080       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2081 	 This is not currently a problem because this only happens
2082 	 for the (single) epilogue, which already has a fallthru edge
2083 	 to EXIT.  */
2084 
2085       e = single_succ_edge (bb);
2086       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2087 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2088 
2089       e->flags &= ~EDGE_FALLTHRU;
2090       emit_barrier_after (last);
2091 
2092       if (before)
2093 	delete_insn (before);
2094     }
2095   else
2096     gcc_assert (!JUMP_P (last));
2097 }
2098 
2099 /* Update the CFG for all queued instructions.  */
2100 
2101 void
commit_edge_insertions(void)2102 commit_edge_insertions (void)
2103 {
2104   basic_block bb;
2105 
2106   /* Optimization passes that invoke this routine can cause hot blocks
2107      previously reached by both hot and cold blocks to become dominated only
2108      by cold blocks. This will cause the verification below to fail,
2109      and lead to now cold code in the hot section. In some cases this
2110      may only be visible after newly unreachable blocks are deleted,
2111      which will be done by fixup_partitions.  */
2112   fixup_partitions ();
2113 
2114   if (!currently_expanding_to_rtl)
2115     checking_verify_flow_info ();
2116 
2117   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2118 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2119     {
2120       edge e;
2121       edge_iterator ei;
2122 
2123       FOR_EACH_EDGE (e, ei, bb->succs)
2124 	if (e->insns.r)
2125 	  {
2126 	    if (currently_expanding_to_rtl)
2127 	      rebuild_jump_labels_chain (e->insns.r);
2128 	    commit_one_edge_insertion (e);
2129 	  }
2130     }
2131 }
2132 
2133 
2134 /* Print out RTL-specific basic block information (live information
2135    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2136    documented in dumpfile.h.  */
2137 
2138 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2139 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2140 {
2141   char *s_indent;
2142 
2143   s_indent = (char *) alloca ((size_t) indent + 1);
2144   memset (s_indent, ' ', (size_t) indent);
2145   s_indent[indent] = '\0';
2146 
2147   if (df && (flags & TDF_DETAILS))
2148     {
2149       df_dump_top (bb, outf);
2150       putc ('\n', outf);
2151     }
2152 
2153   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK
2154       && rtl_bb_info_initialized_p (bb))
2155     {
2156       rtx_insn *last = BB_END (bb);
2157       if (last)
2158 	last = NEXT_INSN (last);
2159       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2160 	{
2161 	  if (flags & TDF_DETAILS)
2162 	    df_dump_insn_top (insn, outf);
2163 	  if (! (flags & TDF_SLIM))
2164 	    print_rtl_single (outf, insn);
2165 	  else
2166 	    dump_insn_slim (outf, insn);
2167 	  if (flags & TDF_DETAILS)
2168 	    df_dump_insn_bottom (insn, outf);
2169 	}
2170     }
2171 
2172   if (df && (flags & TDF_DETAILS))
2173     {
2174       df_dump_bottom (bb, outf);
2175       putc ('\n', outf);
2176     }
2177 
2178 }
2179 
2180 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2181    for the start of each basic block.  FLAGS are the TDF_* masks documented
2182    in dumpfile.h.  */
2183 
2184 void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2185 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2186 {
2187   const rtx_insn *tmp_rtx;
2188   if (rtx_first == 0)
2189     fprintf (outf, "(nil)\n");
2190   else
2191     {
2192       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2193       int max_uid = get_max_uid ();
2194       basic_block *start = XCNEWVEC (basic_block, max_uid);
2195       basic_block *end = XCNEWVEC (basic_block, max_uid);
2196       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2197       basic_block bb;
2198 
2199       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2200 	 insns, but the CFG is not maintained so the basic block info
2201 	 is not reliable.  Therefore it's omitted from the dumps.  */
2202       if (! (cfun->curr_properties & PROP_cfg))
2203         flags &= ~TDF_BLOCKS;
2204 
2205       if (df)
2206 	df_dump_start (outf);
2207 
2208       if (cfun->curr_properties & PROP_cfg)
2209 	{
2210 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2211 	    {
2212 	      rtx_insn *x;
2213 
2214 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2215 	      end[INSN_UID (BB_END (bb))] = bb;
2216 	      if (flags & TDF_BLOCKS)
2217 		{
2218 		  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2219 		    {
2220 		      enum bb_state state = IN_MULTIPLE_BB;
2221 
2222 		      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2223 			state = IN_ONE_BB;
2224 		      in_bb_p[INSN_UID (x)] = state;
2225 
2226 		      if (x == BB_END (bb))
2227 			break;
2228 		    }
2229 		}
2230 	    }
2231 	}
2232 
2233       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2234 	{
2235 	  if (flags & TDF_BLOCKS)
2236 	    {
2237 	      bb = start[INSN_UID (tmp_rtx)];
2238 	      if (bb != NULL)
2239 		{
2240 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
2241 		  if (df && (flags & TDF_DETAILS))
2242 		    df_dump_top (bb, outf);
2243 		}
2244 
2245 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2246 		  && !NOTE_P (tmp_rtx)
2247 		  && !BARRIER_P (tmp_rtx))
2248 		fprintf (outf, ";; Insn is not within a basic block\n");
2249 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2250 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2251 	    }
2252 
2253 	  if (flags & TDF_DETAILS)
2254 	    df_dump_insn_top (tmp_rtx, outf);
2255 	  if (! (flags & TDF_SLIM))
2256 	    print_rtl_single (outf, tmp_rtx);
2257 	  else
2258 	    dump_insn_slim (outf, tmp_rtx);
2259 	  if (flags & TDF_DETAILS)
2260 	    df_dump_insn_bottom (tmp_rtx, outf);
2261 
2262 	  bb = end[INSN_UID (tmp_rtx)];
2263 	  if (bb != NULL)
2264 	    {
2265 	      if (flags & TDF_BLOCKS)
2266 		{
2267 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
2268 		  if (df && (flags & TDF_DETAILS))
2269 		    df_dump_bottom (bb, outf);
2270 		  putc ('\n', outf);
2271 		}
2272 	      /* Emit a hint if the fallthrough target of current basic block
2273 	         isn't the one placed right next.  */
2274 	      else if (EDGE_COUNT (bb->succs) > 0)
2275 		{
2276 		  gcc_assert (BB_END (bb) == tmp_rtx);
2277 		  const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2278 		  /* Bypass intervening deleted-insn notes and debug insns.  */
2279 		  while (ninsn
2280 			 && !NONDEBUG_INSN_P (ninsn)
2281 			 && !start[INSN_UID (ninsn)])
2282 		    ninsn = NEXT_INSN (ninsn);
2283 		  edge e = find_fallthru_edge (bb->succs);
2284 		  if (e && ninsn)
2285 		    {
2286 		      basic_block dest = e->dest;
2287 		      if (start[INSN_UID (ninsn)] != dest)
2288 			fprintf (outf, "%s      ; pc falls through to BB %d\n",
2289 				 print_rtx_head, dest->index);
2290 		    }
2291 		}
2292 	    }
2293 	}
2294 
2295       free (start);
2296       free (end);
2297       free (in_bb_p);
2298     }
2299 }
2300 
2301 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2302 
2303 void
update_br_prob_note(basic_block bb)2304 update_br_prob_note (basic_block bb)
2305 {
2306   rtx note;
2307   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2308   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2309     {
2310       if (note)
2311 	{
2312 	  rtx *note_link, this_rtx;
2313 
2314 	  note_link = &REG_NOTES (BB_END (bb));
2315 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2316 	    if (this_rtx == note)
2317 	      {
2318 		*note_link = XEXP (this_rtx, 1);
2319 		break;
2320 	      }
2321 	}
2322       return;
2323     }
2324   if (!note
2325       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2326     return;
2327   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2328 }
2329 
2330 /* Get the last insn associated with block BB (that includes barriers and
2331    tablejumps after BB).  */
2332 rtx_insn *
get_last_bb_insn(basic_block bb)2333 get_last_bb_insn (basic_block bb)
2334 {
2335   rtx_jump_table_data *table;
2336   rtx_insn *tmp;
2337   rtx_insn *end = BB_END (bb);
2338 
2339   /* Include any jump table following the basic block.  */
2340   if (tablejump_p (end, NULL, &table))
2341     end = table;
2342 
2343   /* Include any barriers that may follow the basic block.  */
2344   tmp = next_nonnote_nondebug_insn_bb (end);
2345   while (tmp && BARRIER_P (tmp))
2346     {
2347       end = tmp;
2348       tmp = next_nonnote_nondebug_insn_bb (end);
2349     }
2350 
2351   return end;
2352 }
2353 
2354 /* Add all BBs reachable from entry via hot paths into the SET.  */
2355 
2356 void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2357 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2358 {
2359   auto_vec<basic_block, 64> worklist;
2360 
2361   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2362   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2363 
2364   while (worklist.length () > 0)
2365     {
2366       basic_block bb = worklist.pop ();
2367       edge_iterator ei;
2368       edge e;
2369 
2370       FOR_EACH_EDGE (e, ei, bb->succs)
2371 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2372 	    && !set->add (e->dest))
2373 	  worklist.safe_push (e->dest);
2374     }
2375 }
2376 
2377 /* Sanity check partition hotness to ensure that basic blocks in
2378    the cold partition don't dominate basic blocks in the hot partition.
2379    If FLAG_ONLY is true, report violations as errors. Otherwise
2380    re-mark the dominated blocks as cold, since this is run after
2381    cfg optimizations that may make hot blocks previously reached
2382    by both hot and cold blocks now only reachable along cold paths.  */
2383 
2384 static auto_vec<basic_block>
find_partition_fixes(bool flag_only)2385 find_partition_fixes (bool flag_only)
2386 {
2387   basic_block bb;
2388   auto_vec<basic_block> bbs_to_fix;
2389   hash_set<basic_block> set;
2390 
2391   /* Callers check this.  */
2392   gcc_checking_assert (crtl->has_bb_partition);
2393 
2394   find_bbs_reachable_by_hot_paths (&set);
2395 
2396   FOR_EACH_BB_FN (bb, cfun)
2397     if (!set.contains (bb)
2398 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
2399       {
2400 	if (flag_only)
2401 	  error ("non-cold basic block %d reachable only "
2402 		 "by paths crossing the cold partition", bb->index);
2403 	else
2404 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2405 	bbs_to_fix.safe_push (bb);
2406       }
2407 
2408   return bbs_to_fix;
2409 }
2410 
2411 /* Perform cleanup on the hot/cold bb partitioning after optimization
2412    passes that modify the cfg.  */
2413 
2414 void
fixup_partitions(void)2415 fixup_partitions (void)
2416 {
2417   if (!crtl->has_bb_partition)
2418     return;
2419 
2420   /* Delete any blocks that became unreachable and weren't
2421      already cleaned up, for example during edge forwarding
2422      and convert_jumps_to_returns. This will expose more
2423      opportunities for fixing the partition boundaries here.
2424      Also, the calculation of the dominance graph during verification
2425      will assert if there are unreachable nodes.  */
2426   delete_unreachable_blocks ();
2427 
2428   /* If there are partitions, do a sanity check on them: A basic block in
2429      a cold partition cannot dominate a basic block in a hot partition.
2430      Fixup any that now violate this requirement, as a result of edge
2431      forwarding and unreachable block deletion.  */
2432   auto_vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2433 
2434   /* Do the partition fixup after all necessary blocks have been converted to
2435      cold, so that we only update the region crossings the minimum number of
2436      places, which can require forcing edges to be non fallthru.  */
2437   if (! bbs_to_fix.is_empty ())
2438     {
2439       do
2440 	{
2441 	  basic_block bb = bbs_to_fix.pop ();
2442 	  fixup_new_cold_bb (bb);
2443 	}
2444       while (! bbs_to_fix.is_empty ());
2445 
2446       /* Fix up hot cold block grouping if needed.  */
2447       if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2448 	{
2449 	  basic_block bb, first = NULL, second = NULL;
2450 	  int current_partition = BB_UNPARTITIONED;
2451 
2452 	  FOR_EACH_BB_FN (bb, cfun)
2453 	    {
2454 	      if (current_partition != BB_UNPARTITIONED
2455 		  && BB_PARTITION (bb) != current_partition)
2456 		{
2457 		  if (first == NULL)
2458 		    first = bb;
2459 		  else if (second == NULL)
2460 		    second = bb;
2461 		  else
2462 		    {
2463 		      /* If we switch partitions for the 3rd, 5th etc. time,
2464 			 move bbs first (inclusive) .. second (exclusive) right
2465 			 before bb.  */
2466 		      basic_block prev_first = first->prev_bb;
2467 		      basic_block prev_second = second->prev_bb;
2468 		      basic_block prev_bb = bb->prev_bb;
2469 		      prev_first->next_bb = second;
2470 		      second->prev_bb = prev_first;
2471 		      prev_second->next_bb = bb;
2472 		      bb->prev_bb = prev_second;
2473 		      prev_bb->next_bb = first;
2474 		      first->prev_bb = prev_bb;
2475 		      rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2476 		      rtx_insn *prev_second_insn
2477 			= PREV_INSN (BB_HEAD (second));
2478 		      rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2479 		      SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2480 		      SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2481 		      SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2482 		      SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2483 		      SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2484 		      SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2485 		      second = NULL;
2486 		    }
2487 		}
2488 	      current_partition = BB_PARTITION (bb);
2489 	    }
2490 	  gcc_assert (!second);
2491 	}
2492     }
2493 }
2494 
2495 /* Verify, in the basic block chain, that there is at most one switch
2496    between hot/cold partitions. This condition will not be true until
2497    after reorder_basic_blocks is called.  */
2498 
2499 static int
verify_hot_cold_block_grouping(void)2500 verify_hot_cold_block_grouping (void)
2501 {
2502   basic_block bb;
2503   int err = 0;
2504   bool switched_sections = false;
2505   int current_partition = BB_UNPARTITIONED;
2506 
2507   /* Even after bb reordering is complete, we go into cfglayout mode
2508      again (in compgoto). Ensure we don't call this before going back
2509      into linearized RTL when any layout fixes would have been committed.  */
2510   if (!crtl->bb_reorder_complete
2511       || current_ir_type () != IR_RTL_CFGRTL)
2512     return err;
2513 
2514   FOR_EACH_BB_FN (bb, cfun)
2515     {
2516       if (current_partition != BB_UNPARTITIONED
2517           && BB_PARTITION (bb) != current_partition)
2518 	{
2519 	  if (switched_sections)
2520 	    {
2521 	      error ("multiple hot/cold transitions found (bb %i)",
2522 		     bb->index);
2523 	      err = 1;
2524 	    }
2525 	  else
2526             switched_sections = true;
2527 
2528           if (!crtl->has_bb_partition)
2529             error ("partition found but function partition flag not set");
2530 	}
2531       current_partition = BB_PARTITION (bb);
2532     }
2533 
2534   return err;
2535 }
2536 
2537 
2538 /* Perform several checks on the edges out of each block, such as
2539    the consistency of the branch probabilities, the correctness
2540    of hot/cold partition crossing edges, and the number of expected
2541    successor edges.  Also verify that the dominance relationship
2542    between hot/cold blocks is sane.  */
2543 
2544 static int
rtl_verify_edges(void)2545 rtl_verify_edges (void)
2546 {
2547   int err = 0;
2548   basic_block bb;
2549 
2550   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2551     {
2552       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2553       int n_eh = 0, n_abnormal = 0;
2554       edge e, fallthru = NULL;
2555       edge_iterator ei;
2556       rtx note;
2557       bool has_crossing_edge = false;
2558 
2559       if (JUMP_P (BB_END (bb))
2560 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2561 	  && EDGE_COUNT (bb->succs) >= 2
2562 	  && any_condjump_p (BB_END (bb)))
2563 	{
2564 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2565 	    {
2566 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2567 		{
2568 		  error ("verify_flow_info: "
2569 			 "REG_BR_PROB is set but cfg probability is not");
2570 		  err = 1;
2571 		}
2572 	    }
2573 	  else if (XINT (note, 0)
2574 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2575 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2576 	    {
2577 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2578 		     XINT (note, 0),
2579 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2580 	      err = 1;
2581 	    }
2582 	}
2583 
2584       FOR_EACH_EDGE (e, ei, bb->succs)
2585 	{
2586 	  bool is_crossing;
2587 
2588 	  if (e->flags & EDGE_FALLTHRU)
2589 	    n_fallthru++, fallthru = e;
2590 
2591 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2592 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2593 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2594           has_crossing_edge |= is_crossing;
2595 	  if (e->flags & EDGE_CROSSING)
2596 	    {
2597 	      if (!is_crossing)
2598 		{
2599 		  error ("EDGE_CROSSING incorrectly set across same section");
2600 		  err = 1;
2601 		}
2602 	      if (e->flags & EDGE_FALLTHRU)
2603 		{
2604 		  error ("fallthru edge crosses section boundary in bb %i",
2605 			 e->src->index);
2606 		  err = 1;
2607 		}
2608 	      if (e->flags & EDGE_EH)
2609 		{
2610 		  error ("EH edge crosses section boundary in bb %i",
2611 			 e->src->index);
2612 		  err = 1;
2613 		}
2614               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2615 		{
2616 		  error ("No region crossing jump at section boundary in bb %i",
2617 			 bb->index);
2618 		  err = 1;
2619 		}
2620 	    }
2621 	  else if (is_crossing)
2622 	    {
2623 	      error ("EDGE_CROSSING missing across section boundary");
2624 	      err = 1;
2625 	    }
2626 
2627 	  if ((e->flags & ~(EDGE_DFS_BACK
2628 			    | EDGE_CAN_FALLTHRU
2629 			    | EDGE_IRREDUCIBLE_LOOP
2630 			    | EDGE_LOOP_EXIT
2631 			    | EDGE_CROSSING
2632 			    | EDGE_PRESERVE)) == 0)
2633 	    n_branch++;
2634 
2635 	  if (e->flags & EDGE_ABNORMAL_CALL)
2636 	    n_abnormal_call++;
2637 
2638 	  if (e->flags & EDGE_SIBCALL)
2639 	    n_sibcall++;
2640 
2641 	  if (e->flags & EDGE_EH)
2642 	    n_eh++;
2643 
2644 	  if (e->flags & EDGE_ABNORMAL)
2645 	    n_abnormal++;
2646 	}
2647 
2648       if (!has_crossing_edge
2649 	  && JUMP_P (BB_END (bb))
2650 	  && CROSSING_JUMP_P (BB_END (bb)))
2651 	{
2652 	  print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2653 	  error ("Region crossing jump across same section in bb %i",
2654 		 bb->index);
2655 	  err = 1;
2656 	}
2657 
2658       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2659 	{
2660 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2661 	  err = 1;
2662 	}
2663       if (n_eh > 1)
2664 	{
2665 	  error ("too many exception handling edges in bb %i", bb->index);
2666 	  err = 1;
2667 	}
2668       if (n_branch
2669 	  && (!JUMP_P (BB_END (bb))
2670 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2671 				   || any_condjump_p (BB_END (bb))))))
2672 	{
2673 	  error ("too many outgoing branch edges from bb %i", bb->index);
2674 	  err = 1;
2675 	}
2676       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2677 	{
2678 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2679 	  err = 1;
2680 	}
2681       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2682 	{
2683 	  error ("wrong number of branch edges after unconditional jump"
2684 		 " in bb %i", bb->index);
2685 	  err = 1;
2686 	}
2687       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2688 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2689 	{
2690 	  error ("wrong amount of branch edges after conditional jump"
2691 		 " in bb %i", bb->index);
2692 	  err = 1;
2693 	}
2694       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2695 	{
2696 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2697 	  err = 1;
2698 	}
2699       if (n_sibcall && !CALL_P (BB_END (bb)))
2700 	{
2701 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2702 	  err = 1;
2703 	}
2704       if (n_abnormal > n_eh
2705 	  && !(CALL_P (BB_END (bb))
2706 	       && n_abnormal == n_abnormal_call + n_sibcall)
2707 	  && (!JUMP_P (BB_END (bb))
2708 	      || any_condjump_p (BB_END (bb))
2709 	      || any_uncondjump_p (BB_END (bb))))
2710 	{
2711 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2712 	  err = 1;
2713 	}
2714 
2715       int has_eh = -1;
2716       FOR_EACH_EDGE (e, ei, bb->preds)
2717 	{
2718 	  if (has_eh == -1)
2719 	    has_eh = (e->flags & EDGE_EH);
2720 	  if ((e->flags & EDGE_EH) == has_eh)
2721 	    continue;
2722 	  error ("EH incoming edge mixed with non-EH incoming edges "
2723 		 "in bb %i", bb->index);
2724 	  err = 1;
2725 	  break;
2726 	}
2727     }
2728 
2729   /* If there are partitions, do a sanity check on them: A basic block in
2730      a cold partition cannot dominate a basic block in a hot partition.  */
2731   if (crtl->has_bb_partition && !err
2732       && current_ir_type () == IR_RTL_CFGLAYOUT)
2733     {
2734       auto_vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2735       err = !bbs_to_fix.is_empty ();
2736     }
2737 
2738   /* Clean up.  */
2739   return err;
2740 }
2741 
2742 /* Checks on the instructions within blocks. Currently checks that each
2743    block starts with a basic block note, and that basic block notes and
2744    control flow jumps are not found in the middle of the block.  */
2745 
2746 static int
rtl_verify_bb_insns(void)2747 rtl_verify_bb_insns (void)
2748 {
2749   rtx_insn *x;
2750   int err = 0;
2751   basic_block bb;
2752 
2753   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2754     {
2755       /* Now check the header of basic
2756 	 block.  It ought to contain optional CODE_LABEL followed
2757 	 by NOTE_BASIC_BLOCK.  */
2758       x = BB_HEAD (bb);
2759       if (LABEL_P (x))
2760 	{
2761 	  if (BB_END (bb) == x)
2762 	    {
2763 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2764 		     bb->index);
2765 	      err = 1;
2766 	    }
2767 
2768 	  x = NEXT_INSN (x);
2769 	}
2770 
2771       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2772 	{
2773 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2774 		 bb->index);
2775 	  err = 1;
2776 	}
2777 
2778       if (BB_END (bb) == x)
2779 	/* Do checks for empty blocks here.  */
2780 	;
2781       else
2782 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2783 	  {
2784 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2785 	      {
2786 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2787 		       INSN_UID (x), bb->index);
2788 		err = 1;
2789 	      }
2790 
2791 	    if (x == BB_END (bb))
2792 	      break;
2793 
2794 	    if (control_flow_insn_p (x))
2795 	      {
2796 		error ("in basic block %d:", bb->index);
2797 		fatal_insn ("flow control insn inside a basic block", x);
2798 	      }
2799 	  }
2800     }
2801 
2802   /* Clean up.  */
2803   return err;
2804 }
2805 
2806 /* Verify that block pointers for instructions in basic blocks, headers and
2807    footers are set appropriately.  */
2808 
2809 static int
rtl_verify_bb_pointers(void)2810 rtl_verify_bb_pointers (void)
2811 {
2812   int err = 0;
2813   basic_block bb;
2814 
2815   /* Check the general integrity of the basic blocks.  */
2816   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2817     {
2818       rtx_insn *insn;
2819 
2820       if (!(bb->flags & BB_RTL))
2821 	{
2822 	  error ("BB_RTL flag not set for block %d", bb->index);
2823 	  err = 1;
2824 	}
2825 
2826       FOR_BB_INSNS (bb, insn)
2827 	if (BLOCK_FOR_INSN (insn) != bb)
2828 	  {
2829 	    error ("insn %d basic block pointer is %d, should be %d",
2830 		   INSN_UID (insn),
2831 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2832 		   bb->index);
2833 	    err = 1;
2834 	  }
2835 
2836       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2837 	if (!BARRIER_P (insn)
2838 	    && BLOCK_FOR_INSN (insn) != NULL)
2839 	  {
2840 	    error ("insn %d in header of bb %d has non-NULL basic block",
2841 		   INSN_UID (insn), bb->index);
2842 	    err = 1;
2843 	  }
2844       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2845 	if (!BARRIER_P (insn)
2846 	    && BLOCK_FOR_INSN (insn) != NULL)
2847 	  {
2848 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2849 		   INSN_UID (insn), bb->index);
2850 	    err = 1;
2851 	  }
2852     }
2853 
2854   /* Clean up.  */
2855   return err;
2856 }
2857 
2858 /* Verify the CFG and RTL consistency common for both underlying RTL and
2859    cfglayout RTL.
2860 
2861    Currently it does following checks:
2862 
2863    - overlapping of basic blocks
2864    - insns with wrong BLOCK_FOR_INSN pointers
2865    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2866    - tails of basic blocks (ensure that boundary is necessary)
2867    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2868      and NOTE_INSN_BASIC_BLOCK
2869    - verify that no fall_thru edge crosses hot/cold partition boundaries
2870    - verify that there are no pending RTL branch predictions
2871    - verify that hot blocks are not dominated by cold blocks
2872 
2873    In future it can be extended check a lot of other stuff as well
2874    (reachability of basic blocks, life information, etc. etc.).  */
2875 
2876 static int
rtl_verify_flow_info_1(void)2877 rtl_verify_flow_info_1 (void)
2878 {
2879   int err = 0;
2880 
2881   err |= rtl_verify_bb_pointers ();
2882 
2883   err |= rtl_verify_bb_insns ();
2884 
2885   err |= rtl_verify_edges ();
2886 
2887   return err;
2888 }
2889 
2890 /* Walk the instruction chain and verify that bb head/end pointers
2891   are correct, and that instructions are in exactly one bb and have
2892   correct block pointers.  */
2893 
2894 static int
rtl_verify_bb_insn_chain(void)2895 rtl_verify_bb_insn_chain (void)
2896 {
2897   basic_block bb;
2898   int err = 0;
2899   rtx_insn *x;
2900   rtx_insn *last_head = get_last_insn ();
2901   basic_block *bb_info;
2902   const int max_uid = get_max_uid ();
2903 
2904   bb_info = XCNEWVEC (basic_block, max_uid);
2905 
2906   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2907     {
2908       rtx_insn *head = BB_HEAD (bb);
2909       rtx_insn *end = BB_END (bb);
2910 
2911       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2912 	{
2913 	  /* Verify the end of the basic block is in the INSN chain.  */
2914 	  if (x == end)
2915 	    break;
2916 
2917             /* And that the code outside of basic blocks has NULL bb field.  */
2918           if (!BARRIER_P (x)
2919               && BLOCK_FOR_INSN (x) != NULL)
2920             {
2921               error ("insn %d outside of basic blocks has non-NULL bb field",
2922                      INSN_UID (x));
2923               err = 1;
2924             }
2925 	}
2926 
2927       if (!x)
2928 	{
2929 	  error ("end insn %d for block %d not found in the insn stream",
2930 		 INSN_UID (end), bb->index);
2931 	  err = 1;
2932 	}
2933 
2934       /* Work backwards from the end to the head of the basic block
2935 	 to verify the head is in the RTL chain.  */
2936       for (; x != NULL_RTX; x = PREV_INSN (x))
2937 	{
2938 	  /* While walking over the insn chain, verify insns appear
2939 	     in only one basic block.  */
2940 	  if (bb_info[INSN_UID (x)] != NULL)
2941 	    {
2942 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2943 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2944 	      err = 1;
2945 	    }
2946 
2947 	  bb_info[INSN_UID (x)] = bb;
2948 
2949 	  if (x == head)
2950 	    break;
2951 	}
2952       if (!x)
2953 	{
2954 	  error ("head insn %d for block %d not found in the insn stream",
2955 		 INSN_UID (head), bb->index);
2956 	  err = 1;
2957 	}
2958 
2959       last_head = PREV_INSN (x);
2960     }
2961 
2962   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2963     {
2964       /* Check that the code before the first basic block has NULL
2965 	 bb field.  */
2966       if (!BARRIER_P (x)
2967 	  && BLOCK_FOR_INSN (x) != NULL)
2968 	{
2969 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2970 		 INSN_UID (x));
2971 	  err = 1;
2972 	}
2973     }
2974   free (bb_info);
2975 
2976   return err;
2977 }
2978 
2979 /* Verify that fallthru edges point to adjacent blocks in layout order and
2980    that barriers exist after non-fallthru blocks.  */
2981 
2982 static int
rtl_verify_fallthru(void)2983 rtl_verify_fallthru (void)
2984 {
2985   basic_block bb;
2986   int err = 0;
2987 
2988   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2989     {
2990       edge e;
2991 
2992       e = find_fallthru_edge (bb->succs);
2993       if (!e)
2994 	{
2995 	  rtx_insn *insn;
2996 
2997 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2998 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2999 	    {
3000 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3001 		{
3002 		  error ("missing barrier after block %i", bb->index);
3003 		  err = 1;
3004 		  break;
3005 		}
3006 	      if (BARRIER_P (insn))
3007 		break;
3008 	    }
3009 	}
3010       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3011 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3012 	{
3013 	  rtx_insn *insn;
3014 
3015 	  if (e->src->next_bb != e->dest)
3016 	    {
3017 	      error
3018 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3019 		 e->src->index, e->dest->index);
3020 	      err = 1;
3021 	    }
3022 	  else
3023 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3024 		 insn = NEXT_INSN (insn))
3025 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3026 		{
3027 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
3028 			 e->src->index, e->dest->index);
3029 		  error ("wrong insn in the fallthru edge");
3030 		  debug_rtx (insn);
3031 		  err = 1;
3032 		}
3033 	}
3034     }
3035 
3036    return err;
3037 }
3038 
3039 /* Verify that blocks are laid out in consecutive order. While walking the
3040    instructions, verify that all expected instructions are inside the basic
3041    blocks, and that all returns are followed by barriers.  */
3042 
3043 static int
rtl_verify_bb_layout(void)3044 rtl_verify_bb_layout (void)
3045 {
3046   basic_block bb;
3047   int err = 0;
3048   rtx_insn *x, *y;
3049   int num_bb_notes;
3050   rtx_insn * const rtx_first = get_insns ();
3051   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3052 
3053   num_bb_notes = 0;
3054 
3055   for (x = rtx_first; x; x = NEXT_INSN (x))
3056     {
3057       if (NOTE_INSN_BASIC_BLOCK_P (x))
3058 	{
3059 	  bb = NOTE_BASIC_BLOCK (x);
3060 
3061 	  num_bb_notes++;
3062 	  if (bb != last_bb_seen->next_bb)
3063 	    internal_error ("basic blocks not laid down consecutively");
3064 
3065 	  curr_bb = last_bb_seen = bb;
3066 	}
3067 
3068       if (!curr_bb)
3069 	{
3070 	  switch (GET_CODE (x))
3071 	    {
3072 	    case BARRIER:
3073 	    case NOTE:
3074 	      break;
3075 
3076 	    case CODE_LABEL:
3077 	      /* An ADDR_VEC is placed outside any basic block.  */
3078 	      if (NEXT_INSN (x)
3079 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3080 		x = NEXT_INSN (x);
3081 
3082 	      /* But in any case, non-deletable labels can appear anywhere.  */
3083 	      break;
3084 
3085 	    default:
3086 	      fatal_insn ("insn outside basic block", x);
3087 	    }
3088 	}
3089 
3090       if (JUMP_P (x)
3091 	  && returnjump_p (x) && ! condjump_p (x)
3092 	  && ! ((y = next_nonnote_nondebug_insn (x))
3093 		&& BARRIER_P (y)))
3094 	    fatal_insn ("return not followed by barrier", x);
3095 
3096       if (curr_bb && x == BB_END (curr_bb))
3097 	curr_bb = NULL;
3098     }
3099 
3100   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3101     internal_error
3102       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3103        num_bb_notes, n_basic_blocks_for_fn (cfun));
3104 
3105    return err;
3106 }
3107 
3108 /* Verify the CFG and RTL consistency common for both underlying RTL and
3109    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3110 
3111    Currently it does following checks:
3112    - all checks of rtl_verify_flow_info_1
3113    - test head/end pointers
3114    - check that blocks are laid out in consecutive order
3115    - check that all insns are in the basic blocks
3116      (except the switch handling code, barriers and notes)
3117    - check that all returns are followed by barriers
3118    - check that all fallthru edge points to the adjacent blocks
3119    - verify that there is a single hot/cold partition boundary after bbro  */
3120 
3121 static int
rtl_verify_flow_info(void)3122 rtl_verify_flow_info (void)
3123 {
3124   int err = 0;
3125 
3126   err |= rtl_verify_flow_info_1 ();
3127 
3128   err |= rtl_verify_bb_insn_chain ();
3129 
3130   err |= rtl_verify_fallthru ();
3131 
3132   err |= rtl_verify_bb_layout ();
3133 
3134   err |= verify_hot_cold_block_grouping ();
3135 
3136   return err;
3137 }
3138 
3139 /* Assume that the preceding pass has possibly eliminated jump instructions
3140    or converted the unconditional jumps.  Eliminate the edges from CFG.
3141    Return true if any edges are eliminated.  */
3142 
3143 bool
purge_dead_edges(basic_block bb)3144 purge_dead_edges (basic_block bb)
3145 {
3146   edge e;
3147   rtx_insn *insn = BB_END (bb);
3148   rtx note;
3149   bool purged = false;
3150   bool found;
3151   edge_iterator ei;
3152 
3153   if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3154     do
3155       insn = PREV_INSN (insn);
3156     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3157 
3158   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3159   if (NONJUMP_INSN_P (insn)
3160       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3161     {
3162       rtx eqnote;
3163 
3164       if (! may_trap_p (PATTERN (insn))
3165 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3166 	      && ! may_trap_p (XEXP (eqnote, 0))))
3167 	remove_note (insn, note);
3168     }
3169 
3170   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3171   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3172     {
3173       bool remove = false;
3174 
3175       /* There are three types of edges we need to handle correctly here: EH
3176 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3177 	 latter can appear when nonlocal gotos are used.  */
3178       if (e->flags & EDGE_ABNORMAL_CALL)
3179 	{
3180 	  if (!CALL_P (insn))
3181 	    remove = true;
3182 	  else if (can_nonlocal_goto (insn))
3183 	    ;
3184 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3185 	    ;
3186 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3187 	    ;
3188 	  else
3189 	    remove = true;
3190 	}
3191       else if (e->flags & EDGE_EH)
3192 	remove = !can_throw_internal (insn);
3193 
3194       if (remove)
3195 	{
3196 	  remove_edge (e);
3197 	  df_set_bb_dirty (bb);
3198 	  purged = true;
3199 	}
3200       else
3201 	ei_next (&ei);
3202     }
3203 
3204   if (JUMP_P (insn))
3205     {
3206       rtx note;
3207       edge b,f;
3208       edge_iterator ei;
3209 
3210       /* We do care only about conditional jumps and simplejumps.  */
3211       if (!any_condjump_p (insn)
3212 	  && !returnjump_p (insn)
3213 	  && !simplejump_p (insn))
3214 	return purged;
3215 
3216       /* Branch probability/prediction notes are defined only for
3217 	 condjumps.  We've possibly turned condjump into simplejump.  */
3218       if (simplejump_p (insn))
3219 	{
3220 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3221 	  if (note)
3222 	    remove_note (insn, note);
3223 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3224 	    remove_note (insn, note);
3225 	}
3226 
3227       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3228 	{
3229 	  /* Avoid abnormal flags to leak from computed jumps turned
3230 	     into simplejumps.  */
3231 
3232 	  e->flags &= ~EDGE_ABNORMAL;
3233 
3234 	  /* See if this edge is one we should keep.  */
3235 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3236 	    /* A conditional jump can fall through into the next
3237 	       block, so we should keep the edge.  */
3238 	    {
3239 	      ei_next (&ei);
3240 	      continue;
3241 	    }
3242 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3243 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3244 	    /* If the destination block is the target of the jump,
3245 	       keep the edge.  */
3246 	    {
3247 	      ei_next (&ei);
3248 	      continue;
3249 	    }
3250 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3251 		   && returnjump_p (insn))
3252 	    /* If the destination block is the exit block, and this
3253 	       instruction is a return, then keep the edge.  */
3254 	    {
3255 	      ei_next (&ei);
3256 	      continue;
3257 	    }
3258 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3259 	    /* Keep the edges that correspond to exceptions thrown by
3260 	       this instruction and rematerialize the EDGE_ABNORMAL
3261 	       flag we just cleared above.  */
3262 	    {
3263 	      e->flags |= EDGE_ABNORMAL;
3264 	      ei_next (&ei);
3265 	      continue;
3266 	    }
3267 
3268 	  /* We do not need this edge.  */
3269 	  df_set_bb_dirty (bb);
3270 	  purged = true;
3271 	  remove_edge (e);
3272 	}
3273 
3274       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3275 	return purged;
3276 
3277       if (dump_file)
3278 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3279 
3280       if (!optimize)
3281 	return purged;
3282 
3283       /* Redistribute probabilities.  */
3284       if (single_succ_p (bb))
3285 	{
3286 	  single_succ_edge (bb)->probability = profile_probability::always ();
3287 	}
3288       else
3289 	{
3290 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3291 	  if (!note)
3292 	    return purged;
3293 
3294 	  b = BRANCH_EDGE (bb);
3295 	  f = FALLTHRU_EDGE (bb);
3296 	  b->probability = profile_probability::from_reg_br_prob_note
3297 					 (XINT (note, 0));
3298 	  f->probability = b->probability.invert ();
3299 	}
3300 
3301       return purged;
3302     }
3303   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3304     {
3305       /* First, there should not be any EH or ABCALL edges resulting
3306 	 from non-local gotos and the like.  If there were, we shouldn't
3307 	 have created the sibcall in the first place.  Second, there
3308 	 should of course never have been a fallthru edge.  */
3309       gcc_assert (single_succ_p (bb));
3310       gcc_assert (single_succ_edge (bb)->flags
3311 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3312 
3313       return 0;
3314     }
3315 
3316   /* If we don't see a jump insn, we don't know exactly why the block would
3317      have been broken at this point.  Look for a simple, non-fallthru edge,
3318      as these are only created by conditional branches.  If we find such an
3319      edge we know that there used to be a jump here and can then safely
3320      remove all non-fallthru edges.  */
3321   found = false;
3322   FOR_EACH_EDGE (e, ei, bb->succs)
3323     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3324       {
3325 	found = true;
3326 	break;
3327       }
3328 
3329   if (!found)
3330     return purged;
3331 
3332   /* Remove all but the fake and fallthru edges.  The fake edge may be
3333      the only successor for this block in the case of noreturn
3334      calls.  */
3335   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3336     {
3337       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3338 	{
3339 	  df_set_bb_dirty (bb);
3340 	  remove_edge (e);
3341 	  purged = true;
3342 	}
3343       else
3344 	ei_next (&ei);
3345     }
3346 
3347   gcc_assert (single_succ_p (bb));
3348 
3349   single_succ_edge (bb)->probability = profile_probability::always ();
3350 
3351   if (dump_file)
3352     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3353 	     bb->index);
3354   return purged;
3355 }
3356 
3357 /* Search all basic blocks for potentially dead edges and purge them.  Return
3358    true if some edge has been eliminated.  */
3359 
3360 bool
purge_all_dead_edges(void)3361 purge_all_dead_edges (void)
3362 {
3363   int purged = false;
3364   basic_block bb;
3365 
3366   FOR_EACH_BB_FN (bb, cfun)
3367     {
3368       bool purged_here = purge_dead_edges (bb);
3369 
3370       purged |= purged_here;
3371     }
3372 
3373   return purged;
3374 }
3375 
3376 /* This is used by a few passes that emit some instructions after abnormal
3377    calls, moving the basic block's end, while they in fact do want to emit
3378    them on the fallthru edge.  Look for abnormal call edges, find backward
3379    the call in the block and insert the instructions on the edge instead.
3380 
3381    Similarly, handle instructions throwing exceptions internally.
3382 
3383    Return true when instructions have been found and inserted on edges.  */
3384 
3385 bool
fixup_abnormal_edges(void)3386 fixup_abnormal_edges (void)
3387 {
3388   bool inserted = false;
3389   basic_block bb;
3390 
3391   FOR_EACH_BB_FN (bb, cfun)
3392     {
3393       edge e;
3394       edge_iterator ei;
3395 
3396       /* Look for cases we are interested in - calls or instructions causing
3397          exceptions.  */
3398       FOR_EACH_EDGE (e, ei, bb->succs)
3399 	if ((e->flags & EDGE_ABNORMAL_CALL)
3400 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3401 		== (EDGE_ABNORMAL | EDGE_EH)))
3402 	  break;
3403 
3404       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3405 	{
3406 	  rtx_insn *insn;
3407 
3408 	  /* Get past the new insns generated.  Allow notes, as the insns
3409 	     may be already deleted.  */
3410 	  insn = BB_END (bb);
3411 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3412 		 && !can_throw_internal (insn)
3413 		 && insn != BB_HEAD (bb))
3414 	    insn = PREV_INSN (insn);
3415 
3416 	  if (CALL_P (insn) || can_throw_internal (insn))
3417 	    {
3418 	      rtx_insn *stop, *next;
3419 
3420 	      e = find_fallthru_edge (bb->succs);
3421 
3422 	      stop = NEXT_INSN (BB_END (bb));
3423 	      BB_END (bb) = insn;
3424 
3425 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3426 		{
3427 		  next = NEXT_INSN (insn);
3428 		  if (INSN_P (insn))
3429 		    {
3430 		      delete_insn (insn);
3431 
3432 		      /* Sometimes there's still the return value USE.
3433 			 If it's placed after a trapping call (i.e. that
3434 			 call is the last insn anyway), we have no fallthru
3435 			 edge.  Simply delete this use and don't try to insert
3436 			 on the non-existent edge.
3437 			 Similarly, sometimes a call that can throw is
3438 			 followed in the source with __builtin_unreachable (),
3439 			 meaning that there is UB if the call returns rather
3440 			 than throws.  If there weren't any instructions
3441 			 following such calls before, supposedly even the ones
3442 			 we've deleted aren't significant and can be
3443 			 removed.  */
3444 		      if (e)
3445 			{
3446 			  /* We're not deleting it, we're moving it.  */
3447 			  insn->set_undeleted ();
3448 			  SET_PREV_INSN (insn) = NULL_RTX;
3449 			  SET_NEXT_INSN (insn) = NULL_RTX;
3450 
3451 			  insert_insn_on_edge (insn, e);
3452 			  inserted = true;
3453 			}
3454 		    }
3455 		  else if (!BARRIER_P (insn))
3456 		    set_block_for_insn (insn, NULL);
3457 		}
3458 	    }
3459 
3460 	  /* It may be that we don't find any trapping insn.  In this
3461 	     case we discovered quite late that the insn that had been
3462 	     marked as can_throw_internal in fact couldn't trap at all.
3463 	     So we should in fact delete the EH edges out of the block.  */
3464 	  else
3465 	    purge_dead_edges (bb);
3466 	}
3467     }
3468 
3469   return inserted;
3470 }
3471 
3472 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
3473    Note that the INSN should be deleted *after* removing dead edges, so
3474    that the kept edge is the fallthrough edge for a (set (pc) (pc))
3475    but not for a (set (pc) (label_ref FOO)).  */
3476 
3477 void
update_cfg_for_uncondjump(rtx_insn * insn)3478 update_cfg_for_uncondjump (rtx_insn *insn)
3479 {
3480   basic_block bb = BLOCK_FOR_INSN (insn);
3481   gcc_assert (BB_END (bb) == insn);
3482 
3483   purge_dead_edges (bb);
3484 
3485   if (current_ir_type () != IR_RTL_CFGLAYOUT)
3486     {
3487       if (!find_fallthru_edge (bb->succs))
3488 	{
3489 	  auto barrier = next_nonnote_nondebug_insn (insn);
3490 	  if (!barrier || !BARRIER_P (barrier))
3491 	    emit_barrier_after (insn);
3492 	}
3493       return;
3494     }
3495 
3496   delete_insn (insn);
3497   if (EDGE_COUNT (bb->succs) == 1)
3498     {
3499       rtx_insn *insn;
3500 
3501       single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3502 
3503       /* Remove barriers from the footer if there are any.  */
3504       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
3505 	if (BARRIER_P (insn))
3506 	  {
3507 	    if (PREV_INSN (insn))
3508 	      SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3509 	    else
3510 	      BB_FOOTER (bb) = NEXT_INSN (insn);
3511 	    if (NEXT_INSN (insn))
3512 	      SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3513 	  }
3514 	else if (LABEL_P (insn))
3515 	  break;
3516     }
3517 }
3518 
3519 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3520 
3521 rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)3522 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3523 {
3524   rtx_insn *prevfirst = PREV_INSN (first);
3525   rtx_insn *nextlast = NEXT_INSN (last);
3526 
3527   SET_PREV_INSN (first) = NULL;
3528   SET_NEXT_INSN (last) = NULL;
3529   if (prevfirst)
3530     SET_NEXT_INSN (prevfirst) = nextlast;
3531   if (nextlast)
3532     SET_PREV_INSN (nextlast) = prevfirst;
3533   else
3534     set_last_insn (prevfirst);
3535   if (!prevfirst)
3536     set_first_insn (nextlast);
3537   return first;
3538 }
3539 
3540 /* Skip over inter-block insns occurring after BB which are typically
3541    associated with BB (e.g., barriers). If there are any such insns,
3542    we return the last one. Otherwise, we return the end of BB.  */
3543 
3544 static rtx_insn *
skip_insns_after_block(basic_block bb)3545 skip_insns_after_block (basic_block bb)
3546 {
3547   rtx_insn *insn, *last_insn, *next_head, *prev;
3548 
3549   next_head = NULL;
3550   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3551     next_head = BB_HEAD (bb->next_bb);
3552 
3553   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3554     {
3555       if (insn == next_head)
3556 	break;
3557 
3558       switch (GET_CODE (insn))
3559 	{
3560 	case BARRIER:
3561 	  last_insn = insn;
3562 	  continue;
3563 
3564 	case NOTE:
3565 	  gcc_assert (NOTE_KIND (insn) != NOTE_INSN_BLOCK_END);
3566 	  continue;
3567 
3568 	case CODE_LABEL:
3569 	  if (NEXT_INSN (insn)
3570 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3571 	    {
3572 	      insn = NEXT_INSN (insn);
3573 	      last_insn = insn;
3574 	      continue;
3575 	    }
3576 	  break;
3577 
3578 	default:
3579 	  break;
3580 	}
3581 
3582       break;
3583     }
3584 
3585   /* It is possible to hit contradictory sequence.  For instance:
3586 
3587      jump_insn
3588      NOTE_INSN_BLOCK_BEG
3589      barrier
3590 
3591      Where barrier belongs to jump_insn, but the note does not.  This can be
3592      created by removing the basic block originally following
3593      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3594 
3595   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3596     {
3597       prev = PREV_INSN (insn);
3598       if (NOTE_P (insn))
3599 	switch (NOTE_KIND (insn))
3600 	  {
3601 	  case NOTE_INSN_BLOCK_END:
3602 	    gcc_unreachable ();
3603 	    break;
3604 	  case NOTE_INSN_DELETED:
3605 	  case NOTE_INSN_DELETED_LABEL:
3606 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3607 	    continue;
3608 	  default:
3609 	    reorder_insns (insn, insn, last_insn);
3610 	  }
3611     }
3612 
3613   return last_insn;
3614 }
3615 
3616 /* Locate or create a label for a given basic block.  */
3617 
3618 static rtx_insn *
label_for_bb(basic_block bb)3619 label_for_bb (basic_block bb)
3620 {
3621   rtx_insn *label = BB_HEAD (bb);
3622 
3623   if (!LABEL_P (label))
3624     {
3625       if (dump_file)
3626 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3627 
3628       label = block_label (bb);
3629     }
3630 
3631   return label;
3632 }
3633 
3634 /* Locate the effective beginning and end of the insn chain for each
3635    block, as defined by skip_insns_after_block above.  */
3636 
3637 static void
record_effective_endpoints(void)3638 record_effective_endpoints (void)
3639 {
3640   rtx_insn *next_insn;
3641   basic_block bb;
3642   rtx_insn *insn;
3643 
3644   for (insn = get_insns ();
3645        insn
3646        && NOTE_P (insn)
3647        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3648        insn = NEXT_INSN (insn))
3649     continue;
3650   /* No basic blocks at all?  */
3651   gcc_assert (insn);
3652 
3653   if (PREV_INSN (insn))
3654     cfg_layout_function_header =
3655 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3656   else
3657     cfg_layout_function_header = NULL;
3658 
3659   next_insn = get_insns ();
3660   FOR_EACH_BB_FN (bb, cfun)
3661     {
3662       rtx_insn *end;
3663 
3664       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3665 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3666 						PREV_INSN (BB_HEAD (bb)));
3667       end = skip_insns_after_block (bb);
3668       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3669 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3670       next_insn = NEXT_INSN (BB_END (bb));
3671     }
3672 
3673   cfg_layout_function_footer = next_insn;
3674   if (cfg_layout_function_footer)
3675     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3676 }
3677 
3678 namespace {
3679 
3680 const pass_data pass_data_into_cfg_layout_mode =
3681 {
3682   RTL_PASS, /* type */
3683   "into_cfglayout", /* name */
3684   OPTGROUP_NONE, /* optinfo_flags */
3685   TV_CFG, /* tv_id */
3686   0, /* properties_required */
3687   PROP_cfglayout, /* properties_provided */
3688   0, /* properties_destroyed */
3689   0, /* todo_flags_start */
3690   0, /* todo_flags_finish */
3691 };
3692 
3693 class pass_into_cfg_layout_mode : public rtl_opt_pass
3694 {
3695 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3696   pass_into_cfg_layout_mode (gcc::context *ctxt)
3697     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3698   {}
3699 
3700   /* opt_pass methods: */
execute(function *)3701   virtual unsigned int execute (function *)
3702     {
3703       cfg_layout_initialize (0);
3704       return 0;
3705     }
3706 
3707 }; // class pass_into_cfg_layout_mode
3708 
3709 } // anon namespace
3710 
3711 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3712 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3713 {
3714   return new pass_into_cfg_layout_mode (ctxt);
3715 }
3716 
3717 namespace {
3718 
3719 const pass_data pass_data_outof_cfg_layout_mode =
3720 {
3721   RTL_PASS, /* type */
3722   "outof_cfglayout", /* name */
3723   OPTGROUP_NONE, /* optinfo_flags */
3724   TV_CFG, /* tv_id */
3725   0, /* properties_required */
3726   0, /* properties_provided */
3727   PROP_cfglayout, /* properties_destroyed */
3728   0, /* todo_flags_start */
3729   0, /* todo_flags_finish */
3730 };
3731 
3732 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3733 {
3734 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3735   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3736     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3737   {}
3738 
3739   /* opt_pass methods: */
3740   virtual unsigned int execute (function *);
3741 
3742 }; // class pass_outof_cfg_layout_mode
3743 
3744 unsigned int
execute(function * fun)3745 pass_outof_cfg_layout_mode::execute (function *fun)
3746 {
3747   basic_block bb;
3748 
3749   FOR_EACH_BB_FN (bb, fun)
3750     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3751       bb->aux = bb->next_bb;
3752 
3753   cfg_layout_finalize ();
3754 
3755   return 0;
3756 }
3757 
3758 } // anon namespace
3759 
3760 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3761 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3762 {
3763   return new pass_outof_cfg_layout_mode (ctxt);
3764 }
3765 
3766 
3767 /* Link the basic blocks in the correct order, compacting the basic
3768    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3769    function also clears the basic block header and footer fields.
3770 
3771    This function is usually called after a pass (e.g. tracer) finishes
3772    some transformations while in cfglayout mode.  The required sequence
3773    of the basic blocks is in a linked list along the bb->aux field.
3774    This functions re-links the basic block prev_bb and next_bb pointers
3775    accordingly, and it compacts and renumbers the blocks.
3776 
3777    FIXME: This currently works only for RTL, but the only RTL-specific
3778    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3779    to GIMPLE a long time ago, but it doesn't relink the basic block
3780    chain.  It could do that (to give better initial RTL) if this function
3781    is made IR-agnostic (and moved to cfganal.cc or cfg.cc while at it).  */
3782 
3783 void
relink_block_chain(bool stay_in_cfglayout_mode)3784 relink_block_chain (bool stay_in_cfglayout_mode)
3785 {
3786   basic_block bb, prev_bb;
3787   int index;
3788 
3789   /* Maybe dump the re-ordered sequence.  */
3790   if (dump_file)
3791     {
3792       fprintf (dump_file, "Reordered sequence:\n");
3793       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3794 	   NUM_FIXED_BLOCKS;
3795 	   bb;
3796 	   bb = (basic_block) bb->aux, index++)
3797 	{
3798 	  fprintf (dump_file, " %i ", index);
3799 	  if (get_bb_original (bb))
3800 	    fprintf (dump_file, "duplicate of %i\n",
3801 		     get_bb_original (bb)->index);
3802 	  else if (forwarder_block_p (bb)
3803 		   && !LABEL_P (BB_HEAD (bb)))
3804 	    fprintf (dump_file, "compensation\n");
3805 	  else
3806 	    fprintf (dump_file, "bb %i\n", bb->index);
3807 	}
3808     }
3809 
3810   /* Now reorder the blocks.  */
3811   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3812   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3813   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3814     {
3815       bb->prev_bb = prev_bb;
3816       prev_bb->next_bb = bb;
3817     }
3818   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3819   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3820 
3821   /* Then, clean up the aux fields.  */
3822   FOR_ALL_BB_FN (bb, cfun)
3823     {
3824       bb->aux = NULL;
3825       if (!stay_in_cfglayout_mode)
3826 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3827     }
3828 
3829   /* Maybe reset the original copy tables, they are not valid anymore
3830      when we renumber the basic blocks in compact_blocks.  If we are
3831      are going out of cfglayout mode, don't re-allocate the tables.  */
3832   if (original_copy_tables_initialized_p ())
3833     free_original_copy_tables ();
3834   if (stay_in_cfglayout_mode)
3835     initialize_original_copy_tables ();
3836 
3837   /* Finally, put basic_block_info in the new order.  */
3838   compact_blocks ();
3839 }
3840 
3841 
3842 /* Given a reorder chain, rearrange the code to match.  */
3843 
3844 static void
fixup_reorder_chain(void)3845 fixup_reorder_chain (void)
3846 {
3847   basic_block bb;
3848   rtx_insn *insn = NULL;
3849 
3850   if (cfg_layout_function_header)
3851     {
3852       set_first_insn (cfg_layout_function_header);
3853       insn = cfg_layout_function_header;
3854       while (NEXT_INSN (insn))
3855 	insn = NEXT_INSN (insn);
3856     }
3857 
3858   /* First do the bulk reordering -- rechain the blocks without regard to
3859      the needed changes to jumps and labels.  */
3860 
3861   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3862        bb->aux)
3863     {
3864       if (BB_HEADER (bb))
3865 	{
3866 	  if (insn)
3867 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
3868 	  else
3869 	    set_first_insn (BB_HEADER (bb));
3870 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
3871 	  insn = BB_HEADER (bb);
3872 	  while (NEXT_INSN (insn))
3873 	    insn = NEXT_INSN (insn);
3874 	}
3875       if (insn)
3876 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
3877       else
3878 	set_first_insn (BB_HEAD (bb));
3879       SET_PREV_INSN (BB_HEAD (bb)) = insn;
3880       insn = BB_END (bb);
3881       if (BB_FOOTER (bb))
3882 	{
3883 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3884 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3885 	  while (NEXT_INSN (insn))
3886 	    insn = NEXT_INSN (insn);
3887 	}
3888     }
3889 
3890   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3891   if (cfg_layout_function_footer)
3892     SET_PREV_INSN (cfg_layout_function_footer) = insn;
3893 
3894   while (NEXT_INSN (insn))
3895     insn = NEXT_INSN (insn);
3896 
3897   set_last_insn (insn);
3898   if (flag_checking)
3899     verify_insn_chain ();
3900 
3901   /* Now add jumps and labels as needed to match the blocks new
3902      outgoing edges.  */
3903 
3904   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3905        bb->aux)
3906     {
3907       edge e_fall, e_taken, e;
3908       rtx_insn *bb_end_insn;
3909       rtx ret_label = NULL_RTX;
3910       basic_block nb;
3911       edge_iterator ei;
3912 
3913       if (EDGE_COUNT (bb->succs) == 0)
3914 	continue;
3915 
3916       /* Find the old fallthru edge, and another non-EH edge for
3917 	 a taken jump.  */
3918       e_taken = e_fall = NULL;
3919 
3920       FOR_EACH_EDGE (e, ei, bb->succs)
3921 	if (e->flags & EDGE_FALLTHRU)
3922 	  e_fall = e;
3923 	else if (! (e->flags & EDGE_EH))
3924 	  e_taken = e;
3925 
3926       bb_end_insn = BB_END (bb);
3927       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3928 	{
3929 	  ret_label = JUMP_LABEL (bb_end_jump);
3930 	  if (any_condjump_p (bb_end_jump))
3931 	    {
3932 	      /* This might happen if the conditional jump has side
3933 		 effects and could therefore not be optimized away.
3934 		 Make the basic block to end with a barrier in order
3935 		 to prevent rtl_verify_flow_info from complaining.  */
3936 	      if (!e_fall)
3937 		{
3938 		  gcc_assert (!onlyjump_p (bb_end_jump)
3939 			      || returnjump_p (bb_end_jump)
3940                               || (e_taken->flags & EDGE_CROSSING));
3941 		  emit_barrier_after (bb_end_jump);
3942 		  continue;
3943 		}
3944 
3945 	      /* If the old fallthru is still next, nothing to do.  */
3946 	      if (bb->aux == e_fall->dest
3947 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3948 		continue;
3949 
3950 	      /* The degenerated case of conditional jump jumping to the next
3951 		 instruction can happen for jumps with side effects.  We need
3952 		 to construct a forwarder block and this will be done just
3953 		 fine by force_nonfallthru below.  */
3954 	      if (!e_taken)
3955 		;
3956 
3957 	      /* There is another special case: if *neither* block is next,
3958 		 such as happens at the very end of a function, then we'll
3959 		 need to add a new unconditional jump.  Choose the taken
3960 		 edge based on known or assumed probability.  */
3961 	      else if (bb->aux != e_taken->dest)
3962 		{
3963 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3964 
3965 		  if (note
3966 		      && profile_probability::from_reg_br_prob_note
3967 				 (XINT (note, 0)) < profile_probability::even ()
3968 		      && invert_jump (bb_end_jump,
3969 				      (e_fall->dest
3970 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3971 				       ? NULL_RTX
3972 				       : label_for_bb (e_fall->dest)), 0))
3973 		    {
3974 		      e_fall->flags &= ~EDGE_FALLTHRU;
3975 		      gcc_checking_assert (could_fall_through
3976 					   (e_taken->src, e_taken->dest));
3977 		      e_taken->flags |= EDGE_FALLTHRU;
3978 		      update_br_prob_note (bb);
3979 		      e = e_fall, e_fall = e_taken, e_taken = e;
3980 		    }
3981 		}
3982 
3983 	      /* If the "jumping" edge is a crossing edge, and the fall
3984 		 through edge is non-crossing, leave things as they are.  */
3985 	      else if ((e_taken->flags & EDGE_CROSSING)
3986 		       && !(e_fall->flags & EDGE_CROSSING))
3987 		continue;
3988 
3989 	      /* Otherwise we can try to invert the jump.  This will
3990 		 basically never fail, however, keep up the pretense.  */
3991 	      else if (invert_jump (bb_end_jump,
3992 				    (e_fall->dest
3993 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
3994 				     ? NULL_RTX
3995 				     : label_for_bb (e_fall->dest)), 0))
3996 		{
3997 		  e_fall->flags &= ~EDGE_FALLTHRU;
3998 		  gcc_checking_assert (could_fall_through
3999 				       (e_taken->src, e_taken->dest));
4000 		  e_taken->flags |= EDGE_FALLTHRU;
4001 		  update_br_prob_note (bb);
4002 		  if (LABEL_NUSES (ret_label) == 0
4003 		      && single_pred_p (e_taken->dest))
4004 		    delete_insn (as_a<rtx_insn *> (ret_label));
4005 		  continue;
4006 		}
4007 	    }
4008 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
4009 	    {
4010 	      /* If the old fallthru is still next or if
4011 		 asm goto doesn't have a fallthru (e.g. when followed by
4012 		 __builtin_unreachable ()), nothing to do.  */
4013 	      if (! e_fall
4014 		  || bb->aux == e_fall->dest
4015 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4016 		continue;
4017 
4018 	      /* Otherwise we'll have to use the fallthru fixup below.  */
4019 	    }
4020 	  else
4021 	    {
4022 	      /* Otherwise we have some return, switch or computed
4023 		 jump.  In the 99% case, there should not have been a
4024 		 fallthru edge.  */
4025 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
4026 	      continue;
4027 	    }
4028 	}
4029       else
4030 	{
4031 	  /* No fallthru implies a noreturn function with EH edges, or
4032 	     something similarly bizarre.  In any case, we don't need to
4033 	     do anything.  */
4034 	  if (! e_fall)
4035 	    continue;
4036 
4037 	  /* If the fallthru block is still next, nothing to do.  */
4038 	  if (bb->aux == e_fall->dest)
4039 	    continue;
4040 
4041 	  /* A fallthru to exit block.  */
4042 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4043 	    continue;
4044 	}
4045 
4046       /* We got here if we need to add a new jump insn.
4047 	 Note force_nonfallthru can delete E_FALL and thus we have to
4048 	 save E_FALL->src prior to the call to force_nonfallthru.  */
4049       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
4050       if (nb)
4051 	{
4052 	  nb->aux = bb->aux;
4053 	  bb->aux = nb;
4054 	  /* Don't process this new block.  */
4055 	  bb = nb;
4056 	}
4057     }
4058 
4059   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4060 
4061   /* Annoying special case - jump around dead jumptables left in the code.  */
4062   FOR_EACH_BB_FN (bb, cfun)
4063     {
4064       edge e = find_fallthru_edge (bb->succs);
4065 
4066       if (e && !can_fallthru (e->src, e->dest))
4067 	force_nonfallthru (e);
4068     }
4069 
4070   /* Ensure goto_locus from edges has some instructions with that locus in RTL
4071      when not optimizing.  */
4072   if (!optimize && !DECL_IGNORED_P (current_function_decl))
4073     FOR_EACH_BB_FN (bb, cfun)
4074       {
4075         edge e;
4076         edge_iterator ei;
4077 
4078         FOR_EACH_EDGE (e, ei, bb->succs)
4079 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4080 	      && !(e->flags & EDGE_ABNORMAL))
4081 	    {
4082 	      edge e2;
4083 	      edge_iterator ei2;
4084 	      basic_block dest, nb;
4085 	      rtx_insn *end;
4086 
4087 	      insn = BB_END (e->src);
4088 	      end = PREV_INSN (BB_HEAD (e->src));
4089 	      while (insn != end
4090 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4091 		insn = PREV_INSN (insn);
4092 	      if (insn != end
4093 		  && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4094 		continue;
4095 	      if (simplejump_p (BB_END (e->src))
4096 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
4097 		{
4098 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4099 		  continue;
4100 		}
4101 	      dest = e->dest;
4102 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4103 		{
4104 		  /* Non-fallthru edges to the exit block cannot be split.  */
4105 		  if (!(e->flags & EDGE_FALLTHRU))
4106 		    continue;
4107 		}
4108 	      else
4109 		{
4110 		  insn = BB_HEAD (dest);
4111 		  end = NEXT_INSN (BB_END (dest));
4112 		  while (insn != end && !NONDEBUG_INSN_P (insn))
4113 		    insn = NEXT_INSN (insn);
4114 		  if (insn != end && INSN_HAS_LOCATION (insn)
4115 		      && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4116 		    continue;
4117 		}
4118 	      nb = split_edge (e);
4119 	      if (!INSN_P (BB_END (nb)))
4120 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4121 							 nb);
4122 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4123 
4124 	      /* If there are other incoming edges to the destination block
4125 		 with the same goto locus, redirect them to the new block as
4126 		 well, this can prevent other such blocks from being created
4127 		 in subsequent iterations of the loop.  */
4128 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4129 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4130 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4131 		    && e->goto_locus == e2->goto_locus)
4132 		  redirect_edge_and_branch (e2, nb);
4133 		else
4134 		  ei_next (&ei2);
4135 	    }
4136       }
4137 }
4138 
4139 /* Perform sanity checks on the insn chain.
4140    1. Check that next/prev pointers are consistent in both the forward and
4141       reverse direction.
4142    2. Count insns in chain, going both directions, and check if equal.
4143    3. Check that get_last_insn () returns the actual end of chain.  */
4144 
4145 DEBUG_FUNCTION void
verify_insn_chain(void)4146 verify_insn_chain (void)
4147 {
4148   rtx_insn *x, *prevx, *nextx;
4149   int insn_cnt1, insn_cnt2;
4150 
4151   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4152        x != 0;
4153        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4154     gcc_assert (PREV_INSN (x) == prevx);
4155 
4156   gcc_assert (prevx == get_last_insn ());
4157 
4158   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4159        x != 0;
4160        nextx = x, insn_cnt2++, x = PREV_INSN (x))
4161     gcc_assert (NEXT_INSN (x) == nextx);
4162 
4163   gcc_assert (insn_cnt1 == insn_cnt2);
4164 }
4165 
4166 /* If we have assembler epilogues, the block falling through to exit must
4167    be the last one in the reordered chain when we reach final.  Ensure
4168    that this condition is met.  */
4169 static void
fixup_fallthru_exit_predecessor(void)4170 fixup_fallthru_exit_predecessor (void)
4171 {
4172   edge e;
4173   basic_block bb = NULL;
4174 
4175   /* This transformation is not valid before reload, because we might
4176      separate a call from the instruction that copies the return
4177      value.  */
4178   gcc_assert (reload_completed);
4179 
4180   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4181   if (e)
4182     bb = e->src;
4183 
4184   if (bb && bb->aux)
4185     {
4186       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4187 
4188       /* If the very first block is the one with the fall-through exit
4189 	 edge, we have to split that block.  */
4190       if (c == bb)
4191 	{
4192 	  bb = split_block_after_labels (bb)->dest;
4193 	  bb->aux = c->aux;
4194 	  c->aux = bb;
4195 	  BB_FOOTER (bb) = BB_FOOTER (c);
4196 	  BB_FOOTER (c) = NULL;
4197 	}
4198 
4199       while (c->aux != bb)
4200 	c = (basic_block) c->aux;
4201 
4202       c->aux = bb->aux;
4203       while (c->aux)
4204 	c = (basic_block) c->aux;
4205 
4206       c->aux = bb;
4207       bb->aux = NULL;
4208     }
4209 }
4210 
4211 /* In case there are more than one fallthru predecessors of exit, force that
4212    there is only one.  */
4213 
4214 static void
force_one_exit_fallthru(void)4215 force_one_exit_fallthru (void)
4216 {
4217   edge e, predecessor = NULL;
4218   bool more = false;
4219   edge_iterator ei;
4220   basic_block forwarder, bb;
4221 
4222   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4223     if (e->flags & EDGE_FALLTHRU)
4224       {
4225 	if (predecessor == NULL)
4226 	  predecessor = e;
4227 	else
4228 	  {
4229 	    more = true;
4230 	    break;
4231 	  }
4232       }
4233 
4234   if (!more)
4235     return;
4236 
4237   /* Exit has several fallthru predecessors.  Create a forwarder block for
4238      them.  */
4239   forwarder = split_edge (predecessor);
4240   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4241        (e = ei_safe_edge (ei)); )
4242     {
4243       if (e->src == forwarder
4244 	  || !(e->flags & EDGE_FALLTHRU))
4245 	ei_next (&ei);
4246       else
4247 	redirect_edge_and_branch_force (e, forwarder);
4248     }
4249 
4250   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4251      exit block.  */
4252   FOR_EACH_BB_FN (bb, cfun)
4253     {
4254       if (bb->aux == NULL && bb != forwarder)
4255 	{
4256 	  bb->aux = forwarder;
4257 	  break;
4258 	}
4259     }
4260 }
4261 
4262 /* Return true in case it is possible to duplicate the basic block BB.  */
4263 
4264 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4265 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4266 {
4267   /* Do not attempt to duplicate tablejumps, as we need to unshare
4268      the dispatch table.  This is difficult to do, as the instructions
4269      computing jump destination may be hoisted outside the basic block.  */
4270   if (tablejump_p (BB_END (bb), NULL, NULL))
4271     return false;
4272 
4273   /* Do not duplicate blocks containing insns that can't be copied.  */
4274   if (targetm.cannot_copy_insn_p)
4275     {
4276       rtx_insn *insn = BB_HEAD (bb);
4277       while (1)
4278 	{
4279 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4280 	    return false;
4281 	  if (insn == BB_END (bb))
4282 	    break;
4283 	  insn = NEXT_INSN (insn);
4284 	}
4285     }
4286 
4287   return true;
4288 }
4289 
4290 rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to,class loop * loop,copy_bb_data * id)4291 duplicate_insn_chain (rtx_insn *from, rtx_insn *to,
4292 		      class loop *loop, copy_bb_data *id)
4293 {
4294   rtx_insn *insn, *next, *copy;
4295   rtx_note *last;
4296 
4297   /* Avoid updating of boundaries of previous basic block.  The
4298      note will get removed from insn stream in fixup.  */
4299   last = emit_note (NOTE_INSN_DELETED);
4300 
4301   /* Create copy at the end of INSN chain.  The chain will
4302      be reordered later.  */
4303   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4304     {
4305       switch (GET_CODE (insn))
4306 	{
4307 	case DEBUG_INSN:
4308 	  /* Don't duplicate label debug insns.  */
4309 	  if (DEBUG_BIND_INSN_P (insn)
4310 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4311 	    break;
4312 	  /* FALLTHRU */
4313 	case INSN:
4314 	case CALL_INSN:
4315 	case JUMP_INSN:
4316 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4317 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4318 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4319 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4320           maybe_copy_prologue_epilogue_insn (insn, copy);
4321 	  /* If requested remap dependence info of cliques brought in
4322 	     via inlining.  */
4323 	  if (id)
4324 	    {
4325 	      subrtx_iterator::array_type array;
4326 	      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4327 		if (MEM_P (*iter) && MEM_EXPR (*iter))
4328 		  {
4329 		    tree op = MEM_EXPR (*iter);
4330 		    if (TREE_CODE (op) == WITH_SIZE_EXPR)
4331 		      op = TREE_OPERAND (op, 0);
4332 		    while (handled_component_p (op))
4333 		      op = TREE_OPERAND (op, 0);
4334 		    if ((TREE_CODE (op) == MEM_REF
4335 			 || TREE_CODE (op) == TARGET_MEM_REF)
4336 			&& MR_DEPENDENCE_CLIQUE (op) > 1
4337 			&& (!loop
4338 			    || (MR_DEPENDENCE_CLIQUE (op)
4339 				!= loop->owned_clique)))
4340 		      {
4341 			if (!id->dependence_map)
4342 			  id->dependence_map = new hash_map<dependence_hash,
4343 			      unsigned short>;
4344 			bool existed;
4345 			unsigned short &newc = id->dependence_map->get_or_insert
4346 					 (MR_DEPENDENCE_CLIQUE (op), &existed);
4347 			if (!existed)
4348 			  {
4349 			    gcc_assert
4350 			      (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
4351 			    newc = ++cfun->last_clique;
4352 			  }
4353 			/* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4354 			   since MEM_EXPR is shared so make a copy and
4355 			   walk to the subtree again.  */
4356 			tree new_expr = unshare_expr (MEM_EXPR (*iter));
4357 			if (TREE_CODE (new_expr) == WITH_SIZE_EXPR)
4358 			  new_expr = TREE_OPERAND (new_expr, 0);
4359 			while (handled_component_p (new_expr))
4360 			  new_expr = TREE_OPERAND (new_expr, 0);
4361 			MR_DEPENDENCE_CLIQUE (new_expr) = newc;
4362 			set_mem_expr (const_cast <rtx> (*iter), new_expr);
4363 		      }
4364 		  }
4365 	    }
4366 	  break;
4367 
4368 	case JUMP_TABLE_DATA:
4369 	  /* Avoid copying of dispatch tables.  We never duplicate
4370 	     tablejumps, so this can hit only in case the table got
4371 	     moved far from original jump.
4372 	     Avoid copying following barrier as well if any
4373 	     (and debug insns in between).  */
4374 	  for (next = NEXT_INSN (insn);
4375 	       next != NEXT_INSN (to);
4376 	       next = NEXT_INSN (next))
4377 	    if (!DEBUG_INSN_P (next))
4378 	      break;
4379 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4380 	    insn = next;
4381 	  break;
4382 
4383 	case CODE_LABEL:
4384 	  break;
4385 
4386 	case BARRIER:
4387 	  emit_barrier ();
4388 	  break;
4389 
4390 	case NOTE:
4391 	  switch (NOTE_KIND (insn))
4392 	    {
4393 	      /* In case prologue is empty and function contain label
4394 		 in first BB, we may want to copy the block.  */
4395 	    case NOTE_INSN_PROLOGUE_END:
4396 
4397 	    case NOTE_INSN_DELETED:
4398 	    case NOTE_INSN_DELETED_LABEL:
4399 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4400 	      /* No problem to strip these.  */
4401 	    case NOTE_INSN_FUNCTION_BEG:
4402 	      /* There is always just single entry to function.  */
4403 	    case NOTE_INSN_BASIC_BLOCK:
4404               /* We should only switch text sections once.  */
4405 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4406 	      break;
4407 
4408 	    case NOTE_INSN_EPILOGUE_BEG:
4409 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4410 	      emit_note_copy (as_a <rtx_note *> (insn));
4411 	      break;
4412 
4413 	    default:
4414 	      /* All other notes should have already been eliminated.  */
4415 	      gcc_unreachable ();
4416 	    }
4417 	  break;
4418 	default:
4419 	  gcc_unreachable ();
4420 	}
4421     }
4422   insn = NEXT_INSN (last);
4423   delete_insn (last);
4424   return insn;
4425 }
4426 
4427 /* Create a duplicate of the basic block BB.  */
4428 
4429 static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data * id)4430 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *id)
4431 {
4432   rtx_insn *insn;
4433   basic_block new_bb;
4434 
4435   class loop *loop = (id && current_loops) ? bb->loop_father : NULL;
4436 
4437   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb), loop, id);
4438   new_bb = create_basic_block (insn,
4439 			       insn ? get_last_insn () : NULL,
4440 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4441 
4442   BB_COPY_PARTITION (new_bb, bb);
4443   if (BB_HEADER (bb))
4444     {
4445       insn = BB_HEADER (bb);
4446       while (NEXT_INSN (insn))
4447 	insn = NEXT_INSN (insn);
4448       insn = duplicate_insn_chain (BB_HEADER (bb), insn, loop, id);
4449       if (insn)
4450 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4451     }
4452 
4453   if (BB_FOOTER (bb))
4454     {
4455       insn = BB_FOOTER (bb);
4456       while (NEXT_INSN (insn))
4457 	insn = NEXT_INSN (insn);
4458       insn = duplicate_insn_chain (BB_FOOTER (bb), insn, loop, id);
4459       if (insn)
4460 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4461     }
4462 
4463   return new_bb;
4464 }
4465 
4466 
4467 /* Main entry point to this module - initialize the datastructures for
4468    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4469 
4470    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4471 
4472 void
cfg_layout_initialize(int flags)4473 cfg_layout_initialize (int flags)
4474 {
4475   rtx_insn_list *x;
4476   basic_block bb;
4477 
4478   /* Once bb partitioning is complete, cfg layout mode should not be
4479      re-entered.  Entering cfg layout mode may require fixups.  As an
4480      example, if edge forwarding performed when optimizing the cfg
4481      layout required moving a block from the hot to the cold
4482      section. This would create an illegal partitioning unless some
4483      manual fixup was performed.  */
4484   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4485 
4486   initialize_original_copy_tables ();
4487 
4488   cfg_layout_rtl_register_cfg_hooks ();
4489 
4490   record_effective_endpoints ();
4491 
4492   /* Make sure that the targets of non local gotos are marked.  */
4493   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4494     {
4495       bb = BLOCK_FOR_INSN (x->insn ());
4496       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4497     }
4498 
4499   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4500 }
4501 
4502 /* Splits superblocks.  */
4503 void
break_superblocks(void)4504 break_superblocks (void)
4505 {
4506   bool need = false;
4507   basic_block bb;
4508 
4509   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4510   bitmap_clear (superblocks);
4511 
4512   FOR_EACH_BB_FN (bb, cfun)
4513     if (bb->flags & BB_SUPERBLOCK)
4514       {
4515 	bb->flags &= ~BB_SUPERBLOCK;
4516 	bitmap_set_bit (superblocks, bb->index);
4517 	need = true;
4518       }
4519 
4520   if (need)
4521     {
4522       rebuild_jump_labels (get_insns ());
4523       find_many_sub_basic_blocks (superblocks);
4524     }
4525 }
4526 
4527 /* Finalize the changes: reorder insn list according to the sequence specified
4528    by aux pointers, enter compensation code, rebuild scope forest.  */
4529 
4530 void
cfg_layout_finalize(void)4531 cfg_layout_finalize (void)
4532 {
4533   free_dominance_info (CDI_DOMINATORS);
4534   force_one_exit_fallthru ();
4535   rtl_register_cfg_hooks ();
4536   if (reload_completed && !targetm.have_epilogue ())
4537     fixup_fallthru_exit_predecessor ();
4538   fixup_reorder_chain ();
4539 
4540   rebuild_jump_labels (get_insns ());
4541   delete_dead_jumptables ();
4542 
4543   if (flag_checking)
4544     verify_insn_chain ();
4545   checking_verify_flow_info ();
4546 }
4547 
4548 
4549 /* Same as split_block but update cfg_layout structures.  */
4550 
4551 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4552 cfg_layout_split_block (basic_block bb, void *insnp)
4553 {
4554   rtx insn = (rtx) insnp;
4555   basic_block new_bb = rtl_split_block (bb, insn);
4556 
4557   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4558   BB_FOOTER (bb) = NULL;
4559 
4560   return new_bb;
4561 }
4562 
4563 /* Redirect Edge to DEST.  */
4564 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4565 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4566 {
4567   basic_block src = e->src;
4568   edge ret;
4569 
4570   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4571     return NULL;
4572 
4573   if (e->dest == dest)
4574     return e;
4575 
4576   if (e->flags & EDGE_CROSSING
4577       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4578       && simplejump_p (BB_END (src)))
4579     {
4580       if (dump_file)
4581 	fprintf (dump_file,
4582 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4583 		 e->src->index, dest->index);
4584       delete_insn (BB_END (src));
4585       remove_barriers_from_footer (src);
4586       e->flags |= EDGE_FALLTHRU;
4587     }
4588 
4589   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4590       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4591     {
4592       df_set_bb_dirty (src);
4593       return ret;
4594     }
4595 
4596   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4597       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4598     {
4599       if (dump_file)
4600 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4601 		 e->src->index, dest->index);
4602 
4603       df_set_bb_dirty (e->src);
4604       redirect_edge_succ (e, dest);
4605       return e;
4606     }
4607 
4608   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4609      in the case the basic block appears to be in sequence.  Avoid this
4610      transformation.  */
4611 
4612   if (e->flags & EDGE_FALLTHRU)
4613     {
4614       /* Redirect any branch edges unified with the fallthru one.  */
4615       if (JUMP_P (BB_END (src))
4616 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4617 				     BB_END (src)))
4618 	{
4619 	  edge redirected;
4620 
4621 	  if (dump_file)
4622 	    fprintf (dump_file, "Fallthru edge unified with branch "
4623 		     "%i->%i redirected to %i\n",
4624 		     e->src->index, e->dest->index, dest->index);
4625 	  e->flags &= ~EDGE_FALLTHRU;
4626 	  redirected = redirect_branch_edge (e, dest);
4627 	  gcc_assert (redirected);
4628 	  redirected->flags |= EDGE_FALLTHRU;
4629 	  df_set_bb_dirty (redirected->src);
4630 	  return redirected;
4631 	}
4632       /* In case we are redirecting fallthru edge to the branch edge
4633 	 of conditional jump, remove it.  */
4634       if (EDGE_COUNT (src->succs) == 2)
4635 	{
4636 	  /* Find the edge that is different from E.  */
4637 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4638 
4639 	  if (s->dest == dest
4640 	      && any_condjump_p (BB_END (src))
4641 	      && onlyjump_p (BB_END (src)))
4642 	    delete_insn (BB_END (src));
4643 	}
4644       if (dump_file)
4645 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4646 		 e->src->index, e->dest->index, dest->index);
4647       ret = redirect_edge_succ_nodup (e, dest);
4648     }
4649   else
4650     ret = redirect_branch_edge (e, dest);
4651 
4652   if (!ret)
4653     return NULL;
4654 
4655   fixup_partition_crossing (ret);
4656   /* We don't want simplejumps in the insn stream during cfglayout.  */
4657   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4658 
4659   df_set_bb_dirty (src);
4660   return ret;
4661 }
4662 
4663 /* Simple wrapper as we always can redirect fallthru edges.  */
4664 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4665 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4666 {
4667   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4668 
4669   gcc_assert (redirected);
4670   return NULL;
4671 }
4672 
4673 /* Same as delete_basic_block but update cfg_layout structures.  */
4674 
4675 static void
cfg_layout_delete_block(basic_block bb)4676 cfg_layout_delete_block (basic_block bb)
4677 {
4678   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4679   rtx_insn **to;
4680 
4681   if (BB_HEADER (bb))
4682     {
4683       next = BB_HEAD (bb);
4684       if (prev)
4685 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4686       else
4687 	set_first_insn (BB_HEADER (bb));
4688       SET_PREV_INSN (BB_HEADER (bb)) = prev;
4689       insn = BB_HEADER (bb);
4690       while (NEXT_INSN (insn))
4691 	insn = NEXT_INSN (insn);
4692       SET_NEXT_INSN (insn) = next;
4693       SET_PREV_INSN (next) = insn;
4694     }
4695   next = NEXT_INSN (BB_END (bb));
4696   if (BB_FOOTER (bb))
4697     {
4698       insn = BB_FOOTER (bb);
4699       while (insn)
4700 	{
4701 	  if (BARRIER_P (insn))
4702 	    {
4703 	      if (PREV_INSN (insn))
4704 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4705 	      else
4706 		BB_FOOTER (bb) = NEXT_INSN (insn);
4707 	      if (NEXT_INSN (insn))
4708 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4709 	    }
4710 	  if (LABEL_P (insn))
4711 	    break;
4712 	  insn = NEXT_INSN (insn);
4713 	}
4714       if (BB_FOOTER (bb))
4715 	{
4716 	  insn = BB_END (bb);
4717 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4718 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4719 	  while (NEXT_INSN (insn))
4720 	    insn = NEXT_INSN (insn);
4721 	  SET_NEXT_INSN (insn) = next;
4722 	  if (next)
4723 	    SET_PREV_INSN (next) = insn;
4724 	  else
4725 	    set_last_insn (insn);
4726 	}
4727     }
4728   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4729     to = &BB_HEADER (bb->next_bb);
4730   else
4731     to = &cfg_layout_function_footer;
4732 
4733   rtl_delete_block (bb);
4734 
4735   if (prev)
4736     prev = NEXT_INSN (prev);
4737   else
4738     prev = get_insns ();
4739   if (next)
4740     next = PREV_INSN (next);
4741   else
4742     next = get_last_insn ();
4743 
4744   if (next && NEXT_INSN (next) != prev)
4745     {
4746       remaints = unlink_insn_chain (prev, next);
4747       insn = remaints;
4748       while (NEXT_INSN (insn))
4749 	insn = NEXT_INSN (insn);
4750       SET_NEXT_INSN (insn) = *to;
4751       if (*to)
4752 	SET_PREV_INSN (*to) = insn;
4753       *to = remaints;
4754     }
4755 }
4756 
4757 /* Return true when blocks A and B can be safely merged.  */
4758 
4759 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4760 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4761 {
4762   /* If we are partitioning hot/cold basic blocks, we don't want to
4763      mess up unconditional or indirect jumps that cross between hot
4764      and cold sections.
4765 
4766      Basic block partitioning may result in some jumps that appear to
4767      be optimizable (or blocks that appear to be mergeable), but which really
4768      must be left untouched (they are required to make it safely across
4769      partition boundaries).  See  the comments at the top of
4770      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
4771 
4772   if (BB_PARTITION (a) != BB_PARTITION (b))
4773     return false;
4774 
4775   /* Protect the loop latches.  */
4776   if (current_loops && b->loop_father->latch == b)
4777     return false;
4778 
4779   /* If we would end up moving B's instructions, make sure it doesn't fall
4780      through into the exit block, since we cannot recover from a fallthrough
4781      edge into the exit block occurring in the middle of a function.  */
4782   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4783     {
4784       edge e = find_fallthru_edge (b->succs);
4785       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4786 	return false;
4787     }
4788 
4789   /* There must be exactly one edge in between the blocks.  */
4790   return (single_succ_p (a)
4791 	  && single_succ (a) == b
4792 	  && single_pred_p (b) == 1
4793 	  && a != b
4794 	  /* Must be simple edge.  */
4795 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4796 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4797 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4798 	  /* If the jump insn has side effects, we can't kill the edge.
4799 	     When not optimizing, try_redirect_by_replacing_jump will
4800 	     not allow us to redirect an edge by replacing a table jump.  */
4801 	  && (!JUMP_P (BB_END (a))
4802 	      || ((!optimize || reload_completed)
4803 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4804 }
4805 
4806 /* Merge block A and B.  The blocks must be mergeable.  */
4807 
4808 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4809 cfg_layout_merge_blocks (basic_block a, basic_block b)
4810 {
4811   /* If B is a forwarder block whose outgoing edge has no location, we'll
4812      propagate the locus of the edge between A and B onto it.  */
4813   const bool forward_edge_locus
4814     = (b->flags & BB_FORWARDER_BLOCK) != 0
4815       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4816   rtx_insn *insn;
4817 
4818   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4819 
4820   if (dump_file)
4821     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4822 			 a->index);
4823 
4824   /* If there was a CODE_LABEL beginning B, delete it.  */
4825   if (LABEL_P (BB_HEAD (b)))
4826     {
4827       delete_insn (BB_HEAD (b));
4828     }
4829 
4830   /* We should have fallthru edge in a, or we can do dummy redirection to get
4831      it cleaned up.  */
4832   if (JUMP_P (BB_END (a)))
4833     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4834   gcc_assert (!JUMP_P (BB_END (a)));
4835 
4836   /* If not optimizing, preserve the locus of the single edge between
4837      blocks A and B if necessary by emitting a nop.  */
4838   if (!optimize
4839       && !forward_edge_locus
4840       && !DECL_IGNORED_P (current_function_decl))
4841     emit_nop_for_unique_locus_between (a, b);
4842 
4843   /* Move things from b->footer after a->footer.  */
4844   if (BB_FOOTER (b))
4845     {
4846       if (!BB_FOOTER (a))
4847 	BB_FOOTER (a) = BB_FOOTER (b);
4848       else
4849 	{
4850 	  rtx_insn *last = BB_FOOTER (a);
4851 
4852 	  while (NEXT_INSN (last))
4853 	    last = NEXT_INSN (last);
4854 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
4855 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4856 	}
4857       BB_FOOTER (b) = NULL;
4858     }
4859 
4860   /* Move things from b->header before a->footer.
4861      Note that this may include dead tablejump data, but we don't clean
4862      those up until we go out of cfglayout mode.  */
4863    if (BB_HEADER (b))
4864      {
4865       if (! BB_FOOTER (a))
4866 	BB_FOOTER (a) = BB_HEADER (b);
4867       else
4868 	{
4869 	  rtx_insn *last = BB_HEADER (b);
4870 
4871 	  while (NEXT_INSN (last))
4872 	    last = NEXT_INSN (last);
4873 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
4874 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4875 	  BB_FOOTER (a) = BB_HEADER (b);
4876 	}
4877       BB_HEADER (b) = NULL;
4878     }
4879 
4880   /* In the case basic blocks are not adjacent, move them around.  */
4881   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4882     {
4883       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4884 
4885       emit_insn_after_noloc (insn, BB_END (a), a);
4886     }
4887   /* Otherwise just re-associate the instructions.  */
4888   else
4889     {
4890       insn = BB_HEAD (b);
4891       BB_END (a) = BB_END (b);
4892     }
4893 
4894   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4895      We need to explicitly call. */
4896   update_bb_for_insn_chain (insn, BB_END (b), a);
4897 
4898   /* Skip possible DELETED_LABEL insn.  */
4899   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4900     insn = NEXT_INSN (insn);
4901   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4902   BB_HEAD (b) = BB_END (b) = NULL;
4903   delete_insn (insn);
4904 
4905   df_bb_delete (b->index);
4906 
4907   if (forward_edge_locus)
4908     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4909 
4910   if (dump_file)
4911     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4912 }
4913 
4914 /* Split edge E.  */
4915 
4916 static basic_block
cfg_layout_split_edge(edge e)4917 cfg_layout_split_edge (edge e)
4918 {
4919   basic_block new_bb =
4920     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4921 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4922 			NULL_RTX, e->src);
4923 
4924   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4925     BB_COPY_PARTITION (new_bb, e->src);
4926   else
4927     BB_COPY_PARTITION (new_bb, e->dest);
4928   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4929   redirect_edge_and_branch_force (e, new_bb);
4930 
4931   return new_bb;
4932 }
4933 
4934 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4935 
4936 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4937 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4938 {
4939 }
4940 
4941 /* Return true if BB contains only labels or non-executable
4942    instructions.  */
4943 
4944 static bool
rtl_block_empty_p(basic_block bb)4945 rtl_block_empty_p (basic_block bb)
4946 {
4947   rtx_insn *insn;
4948 
4949   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4950       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4951     return true;
4952 
4953   FOR_BB_INSNS (bb, insn)
4954     if (NONDEBUG_INSN_P (insn)
4955 	&& (!any_uncondjump_p (insn) || !onlyjump_p (insn)))
4956       return false;
4957 
4958   return true;
4959 }
4960 
4961 /* Split a basic block if it ends with a conditional branch and if
4962    the other part of the block is not empty.  */
4963 
4964 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4965 rtl_split_block_before_cond_jump (basic_block bb)
4966 {
4967   rtx_insn *insn;
4968   rtx_insn *split_point = NULL;
4969   rtx_insn *last = NULL;
4970   bool found_code = false;
4971 
4972   FOR_BB_INSNS (bb, insn)
4973     {
4974       if (any_condjump_p (insn))
4975 	split_point = last;
4976       else if (NONDEBUG_INSN_P (insn))
4977 	found_code = true;
4978       last = insn;
4979     }
4980 
4981   /* Did not find everything.  */
4982   if (found_code && split_point)
4983     return split_block (bb, split_point)->dest;
4984   else
4985     return NULL;
4986 }
4987 
4988 /* Return 1 if BB ends with a call, possibly followed by some
4989    instructions that must stay with the call, 0 otherwise.  */
4990 
4991 static bool
rtl_block_ends_with_call_p(basic_block bb)4992 rtl_block_ends_with_call_p (basic_block bb)
4993 {
4994   rtx_insn *insn = BB_END (bb);
4995 
4996   while (!CALL_P (insn)
4997 	 && insn != BB_HEAD (bb)
4998 	 && (keep_with_call_p (insn)
4999 	     || NOTE_P (insn)
5000 	     || DEBUG_INSN_P (insn)))
5001     insn = PREV_INSN (insn);
5002   return (CALL_P (insn));
5003 }
5004 
5005 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
5006 
5007 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)5008 rtl_block_ends_with_condjump_p (const_basic_block bb)
5009 {
5010   return any_condjump_p (BB_END (bb));
5011 }
5012 
5013 /* Return true if we need to add fake edge to exit.
5014    Helper function for rtl_flow_call_edges_add.  */
5015 
5016 static bool
need_fake_edge_p(const rtx_insn * insn)5017 need_fake_edge_p (const rtx_insn *insn)
5018 {
5019   if (!INSN_P (insn))
5020     return false;
5021 
5022   if ((CALL_P (insn)
5023        && !SIBLING_CALL_P (insn)
5024        && !find_reg_note (insn, REG_NORETURN, NULL)
5025        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
5026     return true;
5027 
5028   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5029 	   && MEM_VOLATILE_P (PATTERN (insn)))
5030 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
5031 	      && asm_noperands (insn) != -1
5032 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
5033 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
5034 }
5035 
5036 /* Add fake edges to the function exit for any non constant and non noreturn
5037    calls, volatile inline assembly in the bitmap of blocks specified by
5038    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
5039    that were split.
5040 
5041    The goal is to expose cases in which entering a basic block does not imply
5042    that all subsequent instructions must be executed.  */
5043 
5044 static int
rtl_flow_call_edges_add(sbitmap blocks)5045 rtl_flow_call_edges_add (sbitmap blocks)
5046 {
5047   int i;
5048   int blocks_split = 0;
5049   int last_bb = last_basic_block_for_fn (cfun);
5050   bool check_last_block = false;
5051 
5052   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
5053     return 0;
5054 
5055   if (! blocks)
5056     check_last_block = true;
5057   else
5058     check_last_block = bitmap_bit_p (blocks,
5059 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
5060 
5061   /* In the last basic block, before epilogue generation, there will be
5062      a fallthru edge to EXIT.  Special care is required if the last insn
5063      of the last basic block is a call because make_edge folds duplicate
5064      edges, which would result in the fallthru edge also being marked
5065      fake, which would result in the fallthru edge being removed by
5066      remove_fake_edges, which would result in an invalid CFG.
5067 
5068      Moreover, we can't elide the outgoing fake edge, since the block
5069      profiler needs to take this into account in order to solve the minimal
5070      spanning tree in the case that the call doesn't return.
5071 
5072      Handle this by adding a dummy instruction in a new last basic block.  */
5073   if (check_last_block)
5074     {
5075       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5076       rtx_insn *insn = BB_END (bb);
5077 
5078       /* Back up past insns that must be kept in the same block as a call.  */
5079       while (insn != BB_HEAD (bb)
5080 	     && keep_with_call_p (insn))
5081 	insn = PREV_INSN (insn);
5082 
5083       if (need_fake_edge_p (insn))
5084 	{
5085 	  edge e;
5086 
5087 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5088 	  if (e)
5089 	    {
5090 	      insert_insn_on_edge (gen_use (const0_rtx), e);
5091 	      commit_edge_insertions ();
5092 	    }
5093 	}
5094     }
5095 
5096   /* Now add fake edges to the function exit for any non constant
5097      calls since there is no way that we can determine if they will
5098      return or not...  */
5099 
5100   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5101     {
5102       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5103       rtx_insn *insn;
5104       rtx_insn *prev_insn;
5105 
5106       if (!bb)
5107 	continue;
5108 
5109       if (blocks && !bitmap_bit_p (blocks, i))
5110 	continue;
5111 
5112       for (insn = BB_END (bb); ; insn = prev_insn)
5113 	{
5114 	  prev_insn = PREV_INSN (insn);
5115 	  if (need_fake_edge_p (insn))
5116 	    {
5117 	      edge e;
5118 	      rtx_insn *split_at_insn = insn;
5119 
5120 	      /* Don't split the block between a call and an insn that should
5121 		 remain in the same block as the call.  */
5122 	      if (CALL_P (insn))
5123 		while (split_at_insn != BB_END (bb)
5124 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
5125 		  split_at_insn = NEXT_INSN (split_at_insn);
5126 
5127 	      /* The handling above of the final block before the epilogue
5128 		 should be enough to verify that there is no edge to the exit
5129 		 block in CFG already.  Calling make_edge in such case would
5130 		 cause us to mark that edge as fake and remove it later.  */
5131 
5132 	      if (flag_checking && split_at_insn == BB_END (bb))
5133 		{
5134 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5135 		  gcc_assert (e == NULL);
5136 		}
5137 
5138 	      /* Note that the following may create a new basic block
5139 		 and renumber the existing basic blocks.  */
5140 	      if (split_at_insn != BB_END (bb))
5141 		{
5142 		  e = split_block (bb, split_at_insn);
5143 		  if (e)
5144 		    blocks_split++;
5145 		}
5146 
5147 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5148 	      ne->probability = profile_probability::guessed_never ();
5149 	    }
5150 
5151 	  if (insn == BB_HEAD (bb))
5152 	    break;
5153 	}
5154     }
5155 
5156   if (blocks_split)
5157     verify_flow_info ();
5158 
5159   return blocks_split;
5160 }
5161 
5162 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
5163    the conditional branch target, SECOND_HEAD should be the fall-thru
5164    there is no need to handle this here the loop versioning code handles
5165    this.  the reason for SECON_HEAD is that it is needed for condition
5166    in trees, and this should be of the same type since it is a hook.  */
5167 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)5168 rtl_lv_add_condition_to_bb (basic_block first_head ,
5169 			    basic_block second_head ATTRIBUTE_UNUSED,
5170 			    basic_block cond_bb, void *comp_rtx)
5171 {
5172   rtx_code_label *label;
5173   rtx_insn *seq, *jump;
5174   rtx op0 = XEXP ((rtx)comp_rtx, 0);
5175   rtx op1 = XEXP ((rtx)comp_rtx, 1);
5176   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5177   machine_mode mode;
5178 
5179 
5180   label = block_label (first_head);
5181   mode = GET_MODE (op0);
5182   if (mode == VOIDmode)
5183     mode = GET_MODE (op1);
5184 
5185   start_sequence ();
5186   op0 = force_operand (op0, NULL_RTX);
5187   op1 = force_operand (op1, NULL_RTX);
5188   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5189 			   profile_probability::uninitialized ());
5190   jump = get_last_insn ();
5191   JUMP_LABEL (jump) = label;
5192   LABEL_NUSES (label)++;
5193   seq = get_insns ();
5194   end_sequence ();
5195 
5196   /* Add the new cond, in the new head.  */
5197   emit_insn_after (seq, BB_END (cond_bb));
5198 }
5199 
5200 
5201 /* Given a block B with unconditional branch at its end, get the
5202    store the return the branch edge and the fall-thru edge in
5203    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
5204 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)5205 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5206 			   edge *fallthru_edge)
5207 {
5208   edge e = EDGE_SUCC (b, 0);
5209 
5210   if (e->flags & EDGE_FALLTHRU)
5211     {
5212       *fallthru_edge = e;
5213       *branch_edge = EDGE_SUCC (b, 1);
5214     }
5215   else
5216     {
5217       *branch_edge = e;
5218       *fallthru_edge = EDGE_SUCC (b, 1);
5219     }
5220 }
5221 
5222 void
init_rtl_bb_info(basic_block bb)5223 init_rtl_bb_info (basic_block bb)
5224 {
5225   gcc_assert (!bb->il.x.rtl);
5226   bb->il.x.head_ = NULL;
5227   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5228 }
5229 
5230 static bool
rtl_bb_info_initialized_p(basic_block bb)5231 rtl_bb_info_initialized_p (basic_block bb)
5232 {
5233   return bb->il.x.rtl;
5234 }
5235 
5236 /* Returns true if it is possible to remove edge E by redirecting
5237    it to the destination of the other edge from E->src.  */
5238 
5239 static bool
rtl_can_remove_branch_p(const_edge e)5240 rtl_can_remove_branch_p (const_edge e)
5241 {
5242   const_basic_block src = e->src;
5243   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5244   const rtx_insn *insn = BB_END (src);
5245   rtx set;
5246 
5247   /* The conditions are taken from try_redirect_by_replacing_jump.  */
5248   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5249     return false;
5250 
5251   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5252     return false;
5253 
5254   if (BB_PARTITION (src) != BB_PARTITION (target))
5255     return false;
5256 
5257   if (!onlyjump_p (insn)
5258       || tablejump_p (insn, NULL, NULL))
5259     return false;
5260 
5261   set = single_set (insn);
5262   if (!set || side_effects_p (set))
5263     return false;
5264 
5265   return true;
5266 }
5267 
5268 static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)5269 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5270 {
5271   bb = cfg_layout_duplicate_bb (bb, id);
5272   bb->aux = NULL;
5273   return bb;
5274 }
5275 
5276 /* Do book-keeping of basic block BB for the profile consistency checker.
5277    Store the counting in RECORD.  */
5278 static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)5279 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5280 {
5281   rtx_insn *insn;
5282   FOR_BB_INSNS (bb, insn)
5283     if (INSN_P (insn))
5284       {
5285 	record->size += insn_cost (insn, false);
5286 	if (profile_info)
5287 	  {
5288 	    if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
5289 		&& ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
5290 		&& bb->count.ipa ().initialized_p ())
5291 	      record->time
5292 		+= insn_cost (insn, true) * bb->count.ipa ().to_gcov_type ();
5293 	  }
5294 	else if (bb->count.initialized_p ()
5295 		 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5296 	  record->time
5297 	    += insn_cost (insn, true)
5298 	       * bb->count.to_sreal_scale
5299 		      (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
5300 	else
5301 	  record->time += insn_cost (insn, true);
5302       }
5303 }
5304 
5305 /* Implementation of CFG manipulation for linearized RTL.  */
5306 struct cfg_hooks rtl_cfg_hooks = {
5307   "rtl",
5308   rtl_verify_flow_info,
5309   rtl_dump_bb,
5310   rtl_dump_bb_for_graph,
5311   rtl_create_basic_block,
5312   rtl_redirect_edge_and_branch,
5313   rtl_redirect_edge_and_branch_force,
5314   rtl_can_remove_branch_p,
5315   rtl_delete_block,
5316   rtl_split_block,
5317   rtl_move_block_after,
5318   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5319   rtl_merge_blocks,
5320   rtl_predict_edge,
5321   rtl_predicted_by_p,
5322   cfg_layout_can_duplicate_bb_p,
5323   rtl_duplicate_bb,
5324   rtl_split_edge,
5325   rtl_make_forwarder_block,
5326   rtl_tidy_fallthru_edge,
5327   rtl_force_nonfallthru,
5328   rtl_block_ends_with_call_p,
5329   rtl_block_ends_with_condjump_p,
5330   rtl_flow_call_edges_add,
5331   NULL, /* execute_on_growing_pred */
5332   NULL, /* execute_on_shrinking_pred */
5333   NULL, /* duplicate loop for trees */
5334   NULL, /* lv_add_condition_to_bb */
5335   NULL, /* lv_adjust_loop_header_phi*/
5336   NULL, /* extract_cond_bb_edges */
5337   NULL, /* flush_pending_stmts */
5338   rtl_block_empty_p, /* block_empty_p */
5339   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5340   rtl_account_profile_record,
5341 };
5342 
5343 /* Implementation of CFG manipulation for cfg layout RTL, where
5344    basic block connected via fallthru edges does not have to be adjacent.
5345    This representation will hopefully become the default one in future
5346    version of the compiler.  */
5347 
5348 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5349   "cfglayout mode",
5350   rtl_verify_flow_info_1,
5351   rtl_dump_bb,
5352   rtl_dump_bb_for_graph,
5353   cfg_layout_create_basic_block,
5354   cfg_layout_redirect_edge_and_branch,
5355   cfg_layout_redirect_edge_and_branch_force,
5356   rtl_can_remove_branch_p,
5357   cfg_layout_delete_block,
5358   cfg_layout_split_block,
5359   rtl_move_block_after,
5360   cfg_layout_can_merge_blocks_p,
5361   cfg_layout_merge_blocks,
5362   rtl_predict_edge,
5363   rtl_predicted_by_p,
5364   cfg_layout_can_duplicate_bb_p,
5365   cfg_layout_duplicate_bb,
5366   cfg_layout_split_edge,
5367   rtl_make_forwarder_block,
5368   NULL, /* tidy_fallthru_edge */
5369   rtl_force_nonfallthru,
5370   rtl_block_ends_with_call_p,
5371   rtl_block_ends_with_condjump_p,
5372   rtl_flow_call_edges_add,
5373   NULL, /* execute_on_growing_pred */
5374   NULL, /* execute_on_shrinking_pred */
5375   duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
5376   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5377   NULL, /* lv_adjust_loop_header_phi*/
5378   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5379   NULL, /* flush_pending_stmts */
5380   rtl_block_empty_p, /* block_empty_p */
5381   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5382   rtl_account_profile_record,
5383 };
5384 
5385 #include "gt-cfgrtl.h"
5386 
5387 #if __GNUC__ >= 10
5388 #  pragma GCC diagnostic pop
5389 #endif
5390