1 /* Control flow functions for trees.
2    Copyright (C) 2001-2020 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66 
67 /* This file contains functions for building the Control Flow Graph (CFG)
68    for a function tree.  */
69 
70 /* Local declarations.  */
71 
72 /* Initial capacity for the basic block array.  */
73 static const int initial_cfg_capacity = 20;
74 
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
77    via their CASE_CHAIN field, which we clear after we're done with the
78    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 
80    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81    update the case vector in response to edge redirections.
82 
83    Right now this table is set up and torn down at key points in the
84    compilation process.  It would be nice if we could make the table
85    more persistent.  The key is getting notification of changes to
86    the CFG (particularly edge removal, creation and redirection).  */
87 
88 static hash_map<edge, tree> *edge_to_cases;
89 
90 /* If we record edge_to_cases, this bitmap will hold indexes
91    of basic blocks that end in a GIMPLE_SWITCH which we touched
92    due to edge manipulations.  */
93 
94 static bitmap touched_switch_bbs;
95 
96 /* CFG statistics.  */
97 struct cfg_stats_d
98 {
99   long num_merged_labels;
100 };
101 
102 static struct cfg_stats_d cfg_stats;
103 
104 /* Data to pass to replace_block_vars_by_duplicates_1.  */
105 struct replace_decls_d
106 {
107   hash_map<tree, tree> *vars_map;
108   tree to_context;
109 };
110 
111 /* Hash table to store last discriminator assigned for each locus.  */
112 struct locus_discrim_map
113 {
114   int location_line;
115   int discriminator;
116 };
117 
118 /* Hashtable helpers.  */
119 
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122   static inline hashval_t hash (const locus_discrim_map *);
123   static inline bool equal (const locus_discrim_map *,
124 			    const locus_discrim_map *);
125 };
126 
127 /* Trivial hash function for a location_t.  ITEM is a pointer to
128    a hash table entry that maps a location_t to a discriminator.  */
129 
130 inline hashval_t
hash(const locus_discrim_map * item)131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133   return item->location_line;
134 }
135 
136 /* Equality function for the locus-to-discriminator map.  A and B
137    point to the two hash table entries to compare.  */
138 
139 inline bool
equal(const locus_discrim_map * a,const locus_discrim_map * b)140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 			     const locus_discrim_map *b)
142 {
143   return a->location_line == b->location_line;
144 }
145 
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147 
148 /* Basic blocks and flowgraphs.  */
149 static void make_blocks (gimple_seq);
150 
151 /* Edges.  */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160 
161 /* Various helpers.  */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168 
169 /* Flowgraph optimization and cleanup.  */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175 
176 void
init_empty_tree_cfg_for_function(struct function * fn)177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179   /* Initialize the basic block array.  */
180   init_flow (fn);
181   profile_status_for_fn (fn) = PROFILE_ABSENT;
182   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 			 initial_cfg_capacity);
187 
188   /* Build a mapping of labels to their associated blocks.  */
189   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 			 initial_cfg_capacity);
192 
193   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195 
196   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197     = EXIT_BLOCK_PTR_FOR_FN (fn);
198   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199     = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201 
202 void
init_empty_tree_cfg(void)203 init_empty_tree_cfg (void)
204 {
205   init_empty_tree_cfg_for_function (cfun);
206 }
207 
208 /*---------------------------------------------------------------------------
209 			      Create basic blocks
210 ---------------------------------------------------------------------------*/
211 
212 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
213    statements to be added to the flowgraph.  */
214 
215 static void
build_gimple_cfg(gimple_seq seq)216 build_gimple_cfg (gimple_seq seq)
217 {
218   /* Register specific gimple functions.  */
219   gimple_register_cfg_hooks ();
220 
221   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222 
223   init_empty_tree_cfg ();
224 
225   make_blocks (seq);
226 
227   /* Make sure there is always at least one block, even if it's empty.  */
228   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230 
231   /* Adjust the size of the array.  */
232   if (basic_block_info_for_fn (cfun)->length ()
233       < (size_t) n_basic_blocks_for_fn (cfun))
234     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 			   n_basic_blocks_for_fn (cfun));
236 
237   /* To speed up statement iterator walks, we first purge dead labels.  */
238   cleanup_dead_labels ();
239 
240   /* Group case nodes to reduce the number of edges.
241      We do this after cleaning up dead labels because otherwise we miss
242      a lot of obvious case merging opportunities.  */
243   group_case_labels ();
244 
245   /* Create the edges of the flowgraph.  */
246   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247   make_edges ();
248   assign_discriminators ();
249   cleanup_dead_labels ();
250   delete discriminator_per_locus;
251   discriminator_per_locus = NULL;
252 }
253 
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255    them and propagate the information to LOOP.  We assume that the annotations
256    come immediately before the condition in BB, if any.  */
257 
258 static void
replace_loop_annotate_in_block(basic_block bb,class loop * loop)259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261   gimple_stmt_iterator gsi = gsi_last_bb (bb);
262   gimple *stmt = gsi_stmt (gsi);
263 
264   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265     return;
266 
267   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268     {
269       stmt = gsi_stmt (gsi);
270       if (gimple_code (stmt) != GIMPLE_CALL)
271 	break;
272       if (!gimple_call_internal_p (stmt)
273 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 	break;
275 
276       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 	{
278 	case annot_expr_ivdep_kind:
279 	  loop->safelen = INT_MAX;
280 	  break;
281 	case annot_expr_unroll_kind:
282 	  loop->unroll
283 	    = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 	  cfun->has_unroll = true;
285 	  break;
286 	case annot_expr_no_vector_kind:
287 	  loop->dont_vectorize = true;
288 	  break;
289 	case annot_expr_vector_kind:
290 	  loop->force_vectorize = true;
291 	  cfun->has_force_vectorize_loops = true;
292 	  break;
293 	case annot_expr_parallel_kind:
294 	  loop->can_be_parallel = true;
295 	  loop->safelen = INT_MAX;
296 	  break;
297 	default:
298 	  gcc_unreachable ();
299 	}
300 
301       stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 				  gimple_call_arg (stmt, 0));
303       gsi_replace (&gsi, stmt, true);
304     }
305 }
306 
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308    them and propagate the information to the loop.  We assume that the
309    annotations come immediately before the condition of the loop.  */
310 
311 static void
replace_loop_annotate(void)312 replace_loop_annotate (void)
313 {
314   class loop *loop;
315   basic_block bb;
316   gimple_stmt_iterator gsi;
317   gimple *stmt;
318 
319   FOR_EACH_LOOP (loop, 0)
320     {
321       /* First look into the header.  */
322       replace_loop_annotate_in_block (loop->header, loop);
323 
324       /* Then look into the latch, if any.  */
325       if (loop->latch)
326 	replace_loop_annotate_in_block (loop->latch, loop);
327 
328       /* Push the global flag_finite_loops state down to individual loops.  */
329       loop->finite_p = flag_finite_loops;
330     }
331 
332   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
333   FOR_EACH_BB_FN (bb, cfun)
334     {
335       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 	{
337 	  stmt = gsi_stmt (gsi);
338 	  if (gimple_code (stmt) != GIMPLE_CALL)
339 	    continue;
340 	  if (!gimple_call_internal_p (stmt)
341 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 	    continue;
343 
344 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 	    {
346 	    case annot_expr_ivdep_kind:
347 	    case annot_expr_unroll_kind:
348 	    case annot_expr_no_vector_kind:
349 	    case annot_expr_vector_kind:
350 	    case annot_expr_parallel_kind:
351 	      break;
352 	    default:
353 	      gcc_unreachable ();
354 	    }
355 
356 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 				      gimple_call_arg (stmt, 0));
359 	  gsi_replace (&gsi, stmt, true);
360 	}
361     }
362 }
363 
364 static unsigned int
execute_build_cfg(void)365 execute_build_cfg (void)
366 {
367   gimple_seq body = gimple_body (current_function_decl);
368 
369   build_gimple_cfg (body);
370   gimple_set_body (current_function_decl, NULL);
371   if (dump_file && (dump_flags & TDF_DETAILS))
372     {
373       fprintf (dump_file, "Scope blocks:\n");
374       dump_scope_blocks (dump_file, dump_flags);
375     }
376   cleanup_tree_cfg ();
377   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
378   replace_loop_annotate ();
379   return 0;
380 }
381 
382 namespace {
383 
384 const pass_data pass_data_build_cfg =
385 {
386   GIMPLE_PASS, /* type */
387   "cfg", /* name */
388   OPTGROUP_NONE, /* optinfo_flags */
389   TV_TREE_CFG, /* tv_id */
390   PROP_gimple_leh, /* properties_required */
391   ( PROP_cfg | PROP_loops ), /* properties_provided */
392   0, /* properties_destroyed */
393   0, /* todo_flags_start */
394   0, /* todo_flags_finish */
395 };
396 
397 class pass_build_cfg : public gimple_opt_pass
398 {
399 public:
pass_build_cfg(gcc::context * ctxt)400   pass_build_cfg (gcc::context *ctxt)
401     : gimple_opt_pass (pass_data_build_cfg, ctxt)
402   {}
403 
404   /* opt_pass methods: */
execute(function *)405   virtual unsigned int execute (function *) { return execute_build_cfg (); }
406 
407 }; // class pass_build_cfg
408 
409 } // anon namespace
410 
411 gimple_opt_pass *
make_pass_build_cfg(gcc::context * ctxt)412 make_pass_build_cfg (gcc::context *ctxt)
413 {
414   return new pass_build_cfg (ctxt);
415 }
416 
417 
418 /* Return true if T is a computed goto.  */
419 
420 bool
computed_goto_p(gimple * t)421 computed_goto_p (gimple *t)
422 {
423   return (gimple_code (t) == GIMPLE_GOTO
424 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 }
426 
427 /* Returns true if the sequence of statements STMTS only contains
428    a call to __builtin_unreachable ().  */
429 
430 bool
gimple_seq_unreachable_p(gimple_seq stmts)431 gimple_seq_unreachable_p (gimple_seq stmts)
432 {
433   if (stmts == NULL
434       /* Return false if -fsanitize=unreachable, we don't want to
435 	 optimize away those calls, but rather turn them into
436 	 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
437 	 later.  */
438       || sanitize_flags_p (SANITIZE_UNREACHABLE))
439     return false;
440 
441   gimple_stmt_iterator gsi = gsi_last (stmts);
442 
443   if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
444     return false;
445 
446   for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
447     {
448       gimple *stmt = gsi_stmt (gsi);
449       if (gimple_code (stmt) != GIMPLE_LABEL
450 	  && !is_gimple_debug (stmt)
451 	  && !gimple_clobber_p (stmt))
452       return false;
453     }
454   return true;
455 }
456 
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458    the other edge points to a bb with just __builtin_unreachable ().
459    I.e. return true for C->M edge in:
460    <bb C>:
461    ...
462    if (something)
463      goto <bb N>;
464    else
465      goto <bb M>;
466    <bb N>:
467    __builtin_unreachable ();
468    <bb M>:  */
469 
470 bool
assert_unreachable_fallthru_edge_p(edge e)471 assert_unreachable_fallthru_edge_p (edge e)
472 {
473   basic_block pred_bb = e->src;
474   gimple *last = last_stmt (pred_bb);
475   if (last && gimple_code (last) == GIMPLE_COND)
476     {
477       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478       if (other_bb == e->dest)
479 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480       if (EDGE_COUNT (other_bb->succs) == 0)
481 	return gimple_seq_unreachable_p (bb_seq (other_bb));
482     }
483   return false;
484 }
485 
486 
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488    could alter control flow except via eh. We initialize the flag at
489    CFG build time and only ever clear it later.  */
490 
491 static void
gimple_call_initialize_ctrl_altering(gimple * stmt)492 gimple_call_initialize_ctrl_altering (gimple *stmt)
493 {
494   int flags = gimple_call_flags (stmt);
495 
496   /* A call alters control flow if it can make an abnormal goto.  */
497   if (call_can_make_abnormal_goto (stmt)
498       /* A call also alters control flow if it does not return.  */
499       || flags & ECF_NORETURN
500       /* TM ending statements have backedges out of the transaction.
501 	 Return true so we split the basic block containing them.
502 	 Note that the TM_BUILTIN test is merely an optimization.  */
503       || ((flags & ECF_TM_BUILTIN)
504 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505       /* BUILT_IN_RETURN call is same as return statement.  */
506       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
507       /* IFN_UNIQUE should be the last insn, to make checking for it
508 	 as cheap as possible.  */
509       || (gimple_call_internal_p (stmt)
510 	  && gimple_call_internal_unique_p (stmt)))
511     gimple_call_set_ctrl_altering (stmt, true);
512   else
513     gimple_call_set_ctrl_altering (stmt, false);
514 }
515 
516 
517 /* Insert SEQ after BB and build a flowgraph.  */
518 
519 static basic_block
make_blocks_1(gimple_seq seq,basic_block bb)520 make_blocks_1 (gimple_seq seq, basic_block bb)
521 {
522   gimple_stmt_iterator i = gsi_start (seq);
523   gimple *stmt = NULL;
524   gimple *prev_stmt = NULL;
525   bool start_new_block = true;
526   bool first_stmt_of_seq = true;
527 
528   while (!gsi_end_p (i))
529     {
530       /* PREV_STMT should only be set to a debug stmt if the debug
531 	 stmt is before nondebug stmts.  Once stmt reaches a nondebug
532 	 nonlabel, prev_stmt will be set to it, so that
533 	 stmt_starts_bb_p will know to start a new block if a label is
534 	 found.  However, if stmt was a label after debug stmts only,
535 	 keep the label in prev_stmt even if we find further debug
536 	 stmts, for there may be other labels after them, and they
537 	 should land in the same block.  */
538       if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
539 	prev_stmt = stmt;
540       stmt = gsi_stmt (i);
541 
542       if (stmt && is_gimple_call (stmt))
543 	gimple_call_initialize_ctrl_altering (stmt);
544 
545       /* If the statement starts a new basic block or if we have determined
546 	 in a previous pass that we need to create a new block for STMT, do
547 	 so now.  */
548       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
549 	{
550 	  if (!first_stmt_of_seq)
551 	    gsi_split_seq_before (&i, &seq);
552 	  bb = create_basic_block (seq, bb);
553 	  start_new_block = false;
554 	  prev_stmt = NULL;
555 	}
556 
557       /* Now add STMT to BB and create the subgraphs for special statement
558 	 codes.  */
559       gimple_set_bb (stmt, bb);
560 
561       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
562 	 next iteration.  */
563       if (stmt_ends_bb_p (stmt))
564 	{
565 	  /* If the stmt can make abnormal goto use a new temporary
566 	     for the assignment to the LHS.  This makes sure the old value
567 	     of the LHS is available on the abnormal edge.  Otherwise
568 	     we will end up with overlapping life-ranges for abnormal
569 	     SSA names.  */
570 	  if (gimple_has_lhs (stmt)
571 	      && stmt_can_make_abnormal_goto (stmt)
572 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
573 	    {
574 	      tree lhs = gimple_get_lhs (stmt);
575 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
576 	      gimple *s = gimple_build_assign (lhs, tmp);
577 	      gimple_set_location (s, gimple_location (stmt));
578 	      gimple_set_block (s, gimple_block (stmt));
579 	      gimple_set_lhs (stmt, tmp);
580 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
581 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
582 		DECL_GIMPLE_REG_P (tmp) = 1;
583 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
584 	    }
585 	  start_new_block = true;
586 	}
587 
588       gsi_next (&i);
589       first_stmt_of_seq = false;
590     }
591   return bb;
592 }
593 
594 /* Build a flowgraph for the sequence of stmts SEQ.  */
595 
596 static void
make_blocks(gimple_seq seq)597 make_blocks (gimple_seq seq)
598 {
599   /* Look for debug markers right before labels, and move the debug
600      stmts after the labels.  Accepting labels among debug markers
601      adds no value, just complexity; if we wanted to annotate labels
602      with view numbers (so sequencing among markers would matter) or
603      somesuch, we're probably better off still moving the labels, but
604      adding other debug annotations in their original positions or
605      emitting nonbind or bind markers associated with the labels in
606      the original position of the labels.
607 
608      Moving labels would probably be simpler, but we can't do that:
609      moving labels assigns label ids to them, and doing so because of
610      debug markers makes for -fcompare-debug and possibly even codegen
611      differences.  So, we have to move the debug stmts instead.  To
612      that end, we scan SEQ backwards, marking the position of the
613      latest (earliest we find) label, and moving debug stmts that are
614      not separated from it by nondebug nonlabel stmts after the
615      label.  */
616   if (MAY_HAVE_DEBUG_MARKER_STMTS)
617     {
618       gimple_stmt_iterator label = gsi_none ();
619 
620       for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
621 	{
622 	  gimple *stmt = gsi_stmt (i);
623 
624 	  /* If this is the first label we encounter (latest in SEQ)
625 	     before nondebug stmts, record its position.  */
626 	  if (is_a <glabel *> (stmt))
627 	    {
628 	      if (gsi_end_p (label))
629 		label = i;
630 	      continue;
631 	    }
632 
633 	  /* Without a recorded label position to move debug stmts to,
634 	     there's nothing to do.  */
635 	  if (gsi_end_p (label))
636 	    continue;
637 
638 	  /* Move the debug stmt at I after LABEL.  */
639 	  if (is_gimple_debug (stmt))
640 	    {
641 	      gcc_assert (gimple_debug_nonbind_marker_p (stmt));
642 	      /* As STMT is removed, I advances to the stmt after
643 		 STMT, so the gsi_prev in the for "increment"
644 		 expression gets us to the stmt we're to visit after
645 		 STMT.  LABEL, however, would advance to the moved
646 		 stmt if we passed it to gsi_move_after, so pass it a
647 		 copy instead, so as to keep LABEL pointing to the
648 		 LABEL.  */
649 	      gimple_stmt_iterator copy = label;
650 	      gsi_move_after (&i, &copy);
651 	      continue;
652 	    }
653 
654 	  /* There aren't any (more?) debug stmts before label, so
655 	     there isn't anything else to move after it.  */
656 	  label = gsi_none ();
657 	}
658     }
659 
660   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
661 }
662 
663 /* Create and return a new empty basic block after bb AFTER.  */
664 
665 static basic_block
create_bb(void * h,void * e,basic_block after)666 create_bb (void *h, void *e, basic_block after)
667 {
668   basic_block bb;
669 
670   gcc_assert (!e);
671 
672   /* Create and initialize a new basic block.  Since alloc_block uses
673      GC allocation that clears memory to allocate a basic block, we do
674      not have to clear the newly allocated basic block here.  */
675   bb = alloc_block ();
676 
677   bb->index = last_basic_block_for_fn (cfun);
678   bb->flags = BB_NEW;
679   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
680 
681   /* Add the new block to the linked list of blocks.  */
682   link_block (bb, after);
683 
684   /* Grow the basic block array if needed.  */
685   if ((size_t) last_basic_block_for_fn (cfun)
686       == basic_block_info_for_fn (cfun)->length ())
687     {
688       size_t new_size =
689 	(last_basic_block_for_fn (cfun)
690 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
691       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
692     }
693 
694   /* Add the newly created block to the array.  */
695   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
696 
697   n_basic_blocks_for_fn (cfun)++;
698   last_basic_block_for_fn (cfun)++;
699 
700   return bb;
701 }
702 
703 
704 /*---------------------------------------------------------------------------
705 				 Edge creation
706 ---------------------------------------------------------------------------*/
707 
708 /* If basic block BB has an abnormal edge to a basic block
709    containing IFN_ABNORMAL_DISPATCHER internal call, return
710    that the dispatcher's basic block, otherwise return NULL.  */
711 
712 basic_block
get_abnormal_succ_dispatcher(basic_block bb)713 get_abnormal_succ_dispatcher (basic_block bb)
714 {
715   edge e;
716   edge_iterator ei;
717 
718   FOR_EACH_EDGE (e, ei, bb->succs)
719     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
720       {
721 	gimple_stmt_iterator gsi
722 	  = gsi_start_nondebug_after_labels_bb (e->dest);
723 	gimple *g = gsi_stmt (gsi);
724 	if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
725 	  return e->dest;
726       }
727   return NULL;
728 }
729 
730 /* Helper function for make_edges.  Create a basic block with
731    with ABNORMAL_DISPATCHER internal call in it if needed, and
732    create abnormal edges from BBS to it and from it to FOR_BB
733    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
734 
735 static void
handle_abnormal_edges(basic_block * dispatcher_bbs,basic_block for_bb,int * bb_to_omp_idx,auto_vec<basic_block> * bbs,bool computed_goto)736 handle_abnormal_edges (basic_block *dispatcher_bbs,
737 		       basic_block for_bb, int *bb_to_omp_idx,
738 		       auto_vec<basic_block> *bbs, bool computed_goto)
739 {
740   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741   unsigned int idx = 0;
742   basic_block bb;
743   bool inner = false;
744 
745   if (bb_to_omp_idx)
746     {
747       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748       if (bb_to_omp_idx[for_bb->index] != 0)
749 	inner = true;
750     }
751 
752   /* If the dispatcher has been created already, then there are basic
753      blocks with abnormal edges to it, so just make a new edge to
754      for_bb.  */
755   if (*dispatcher == NULL)
756     {
757       /* Check if there are any basic blocks that need to have
758 	 abnormal edges to this dispatcher.  If there are none, return
759 	 early.  */
760       if (bb_to_omp_idx == NULL)
761 	{
762 	  if (bbs->is_empty ())
763 	    return;
764 	}
765       else
766 	{
767 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 	      break;
770 	  if (bb == NULL)
771 	    return;
772 	}
773 
774       /* Create the dispatcher bb.  */
775       *dispatcher = create_basic_block (NULL, for_bb);
776       if (computed_goto)
777 	{
778 	  /* Factor computed gotos into a common computed goto site.  Also
779 	     record the location of that site so that we can un-factor the
780 	     gotos after we have converted back to normal form.  */
781 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
782 
783 	  /* Create the destination of the factored goto.  Each original
784 	     computed goto will put its desired destination into this
785 	     variable and jump to the label we create immediately below.  */
786 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
787 
788 	  /* Build a label for the new block which will contain the
789 	     factored computed goto.  */
790 	  tree factored_label_decl
791 	    = create_artificial_label (UNKNOWN_LOCATION);
792 	  gimple *factored_computed_goto_label
793 	    = gimple_build_label (factored_label_decl);
794 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
795 
796 	  /* Build our new computed goto.  */
797 	  gimple *factored_computed_goto = gimple_build_goto (var);
798 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
799 
800 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
801 	    {
802 	      if (bb_to_omp_idx
803 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 		continue;
805 
806 	      gsi = gsi_last_bb (bb);
807 	      gimple *last = gsi_stmt (gsi);
808 
809 	      gcc_assert (computed_goto_p (last));
810 
811 	      /* Copy the original computed goto's destination into VAR.  */
812 	      gimple *assignment
813 		= gimple_build_assign (var, gimple_goto_dest (last));
814 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
815 
816 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 	      e->goto_locus = gimple_location (last);
818 	      gsi_remove (&gsi, true);
819 	    }
820 	}
821       else
822 	{
823 	  tree arg = inner ? boolean_true_node : boolean_false_node;
824 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 						 1, arg);
826 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
827 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
828 
829 	  /* Create predecessor edges of the dispatcher.  */
830 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
831 	    {
832 	      if (bb_to_omp_idx
833 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
834 		continue;
835 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
836 	    }
837 	}
838     }
839 
840   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
841 }
842 
843 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
844    computed goto, returns 2 when it ends with a statement that
845    might return to this function via an nonlocal goto, otherwise
846    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
847 
848 static int
make_edges_bb(basic_block bb,struct omp_region ** pcur_region,int * pomp_index)849 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
850 {
851   gimple *last = last_stmt (bb);
852   bool fallthru = false;
853   int ret = 0;
854 
855   if (!last)
856     return ret;
857 
858   switch (gimple_code (last))
859     {
860     case GIMPLE_GOTO:
861       if (make_goto_expr_edges (bb))
862 	ret = 1;
863       fallthru = false;
864       break;
865     case GIMPLE_RETURN:
866       {
867 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 	e->goto_locus = gimple_location (last);
869 	fallthru = false;
870       }
871       break;
872     case GIMPLE_COND:
873       make_cond_expr_edges (bb);
874       fallthru = false;
875       break;
876     case GIMPLE_SWITCH:
877       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
878       fallthru = false;
879       break;
880     case GIMPLE_RESX:
881       make_eh_edges (last);
882       fallthru = false;
883       break;
884     case GIMPLE_EH_DISPATCH:
885       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
886       break;
887 
888     case GIMPLE_CALL:
889       /* If this function receives a nonlocal goto, then we need to
890 	 make edges from this call site to all the nonlocal goto
891 	 handlers.  */
892       if (stmt_can_make_abnormal_goto (last))
893 	ret = 2;
894 
895       /* If this statement has reachable exception handlers, then
896 	 create abnormal edges to them.  */
897       make_eh_edges (last);
898 
899       /* BUILTIN_RETURN is really a return statement.  */
900       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
901 	{
902 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
903 	  fallthru = false;
904 	}
905       /* Some calls are known not to return.  */
906       else
907 	fallthru = !gimple_call_noreturn_p (last);
908       break;
909 
910     case GIMPLE_ASSIGN:
911       /* A GIMPLE_ASSIGN may throw internally and thus be considered
912 	 control-altering.  */
913       if (is_ctrl_altering_stmt (last))
914 	make_eh_edges (last);
915       fallthru = true;
916       break;
917 
918     case GIMPLE_ASM:
919       make_gimple_asm_edges (bb);
920       fallthru = true;
921       break;
922 
923     CASE_GIMPLE_OMP:
924       fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
925       break;
926 
927     case GIMPLE_TRANSACTION:
928       {
929         gtransaction *txn = as_a <gtransaction *> (last);
930 	tree label1 = gimple_transaction_label_norm (txn);
931 	tree label2 = gimple_transaction_label_uninst (txn);
932 
933 	if (label1)
934 	  make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
935 	if (label2)
936 	  make_edge (bb, label_to_block (cfun, label2),
937 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
938 
939 	tree label3 = gimple_transaction_label_over (txn);
940 	if (gimple_transaction_subcode (txn)
941 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
942 	  make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
943 
944 	fallthru = false;
945       }
946       break;
947 
948     default:
949       gcc_assert (!stmt_ends_bb_p (last));
950       fallthru = true;
951       break;
952     }
953 
954   if (fallthru)
955     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
956 
957   return ret;
958 }
959 
960 /* Join all the blocks in the flowgraph.  */
961 
962 static void
make_edges(void)963 make_edges (void)
964 {
965   basic_block bb;
966   struct omp_region *cur_region = NULL;
967   auto_vec<basic_block> ab_edge_goto;
968   auto_vec<basic_block> ab_edge_call;
969   int *bb_to_omp_idx = NULL;
970   int cur_omp_region_idx = 0;
971 
972   /* Create an edge from entry to the first block with executable
973      statements in it.  */
974   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 	     EDGE_FALLTHRU);
977 
978   /* Traverse the basic block array placing edges.  */
979   FOR_EACH_BB_FN (bb, cfun)
980     {
981       int mer;
982 
983       if (bb_to_omp_idx)
984 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
985 
986       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987       if (mer == 1)
988 	ab_edge_goto.safe_push (bb);
989       else if (mer == 2)
990 	ab_edge_call.safe_push (bb);
991 
992       if (cur_region && bb_to_omp_idx == NULL)
993 	bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
994     }
995 
996   /* Computed gotos are hell to deal with, especially if there are
997      lots of them with a large number of destinations.  So we factor
998      them to a common computed goto location before we build the
999      edge list.  After we convert back to normal form, we will un-factor
1000      the computed gotos since factoring introduces an unwanted jump.
1001      For non-local gotos and abnormal edges from calls to calls that return
1002      twice or forced labels, factor the abnormal edges too, by having all
1003      abnormal edges from the calls go to a common artificial basic block
1004      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005      basic block to all forced labels and calls returning twice.
1006      We do this per-OpenMP structured block, because those regions
1007      are guaranteed to be single entry single exit by the standard,
1008      so it is not allowed to enter or exit such regions abnormally this way,
1009      thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010      must not transfer control across SESE region boundaries.  */
1011   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1012     {
1013       gimple_stmt_iterator gsi;
1014       basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015       basic_block *dispatcher_bbs = dispatcher_bb_array;
1016       int count = n_basic_blocks_for_fn (cfun);
1017 
1018       if (bb_to_omp_idx)
1019 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1020 
1021       FOR_EACH_BB_FN (bb, cfun)
1022 	{
1023 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1024 	    {
1025 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 	      tree target;
1027 
1028 	      if (!label_stmt)
1029 		break;
1030 
1031 	      target = gimple_label_label (label_stmt);
1032 
1033 	      /* Make an edge to every label block that has been marked as a
1034 		 potential target for a computed goto or a non-local goto.  */
1035 	      if (FORCED_LABEL (target))
1036 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1037 				       &ab_edge_goto, true);
1038 	      if (DECL_NONLOCAL (target))
1039 		{
1040 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1041 					 &ab_edge_call, false);
1042 		  break;
1043 		}
1044 	    }
1045 
1046 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 	    gsi_next_nondebug (&gsi);
1048 	  if (!gsi_end_p (gsi))
1049 	    {
1050 	      /* Make an edge to every setjmp-like call.  */
1051 	      gimple *call_stmt = gsi_stmt (gsi);
1052 	      if (is_gimple_call (call_stmt)
1053 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 		      || gimple_call_builtin_p (call_stmt,
1055 						BUILT_IN_SETJMP_RECEIVER)))
1056 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1057 				       &ab_edge_call, false);
1058 	    }
1059 	}
1060 
1061       if (bb_to_omp_idx)
1062 	XDELETE (dispatcher_bbs);
1063     }
1064 
1065   XDELETE (bb_to_omp_idx);
1066 
1067   omp_free_regions ();
1068 }
1069 
1070 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
1071    needed.  Returns true if new bbs were created.
1072    Note: This is transitional code, and should not be used for new code.  We
1073    should be able to get rid of this by rewriting all target va-arg
1074    gimplification hooks to use an interface gimple_build_cond_value as described
1075    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
1076 
1077 bool
gimple_find_sub_bbs(gimple_seq seq,gimple_stmt_iterator * gsi)1078 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1079 {
1080   gimple *stmt = gsi_stmt (*gsi);
1081   basic_block bb = gimple_bb (stmt);
1082   basic_block lastbb, afterbb;
1083   int old_num_bbs = n_basic_blocks_for_fn (cfun);
1084   edge e;
1085   lastbb = make_blocks_1 (seq, bb);
1086   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1087     return false;
1088   e = split_block (bb, stmt);
1089   /* Move e->dest to come after the new basic blocks.  */
1090   afterbb = e->dest;
1091   unlink_block (afterbb);
1092   link_block (afterbb, lastbb);
1093   redirect_edge_succ (e, bb->next_bb);
1094   bb = bb->next_bb;
1095   while (bb != afterbb)
1096     {
1097       struct omp_region *cur_region = NULL;
1098       profile_count cnt = profile_count::zero ();
1099       bool all = true;
1100 
1101       int cur_omp_region_idx = 0;
1102       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1103       gcc_assert (!mer && !cur_region);
1104       add_bb_to_loop (bb, afterbb->loop_father);
1105 
1106       edge e;
1107       edge_iterator ei;
1108       FOR_EACH_EDGE (e, ei, bb->preds)
1109 	{
1110 	  if (e->count ().initialized_p ())
1111 	    cnt += e->count ();
1112 	  else
1113 	    all = false;
1114 	}
1115       tree_guess_outgoing_edge_probabilities (bb);
1116       if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1117         bb->count = cnt;
1118 
1119       bb = bb->next_bb;
1120     }
1121   return true;
1122 }
1123 
1124 /* Find the next available discriminator value for LOCUS.  The
1125    discriminator distinguishes among several basic blocks that
1126    share a common locus, allowing for more accurate sample-based
1127    profiling.  */
1128 
1129 static int
next_discriminator_for_locus(int line)1130 next_discriminator_for_locus (int line)
1131 {
1132   struct locus_discrim_map item;
1133   struct locus_discrim_map **slot;
1134 
1135   item.location_line = line;
1136   item.discriminator = 0;
1137   slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1138   gcc_assert (slot);
1139   if (*slot == HTAB_EMPTY_ENTRY)
1140     {
1141       *slot = XNEW (struct locus_discrim_map);
1142       gcc_assert (*slot);
1143       (*slot)->location_line = line;
1144       (*slot)->discriminator = 0;
1145     }
1146   (*slot)->discriminator++;
1147   return (*slot)->discriminator;
1148 }
1149 
1150 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1151 
1152 static bool
same_line_p(location_t locus1,expanded_location * from,location_t locus2)1153 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1154 {
1155   expanded_location to;
1156 
1157   if (locus1 == locus2)
1158     return true;
1159 
1160   to = expand_location (locus2);
1161 
1162   if (from->line != to.line)
1163     return false;
1164   if (from->file == to.file)
1165     return true;
1166   return (from->file != NULL
1167           && to.file != NULL
1168           && filename_cmp (from->file, to.file) == 0);
1169 }
1170 
1171 /* Assign discriminators to each basic block.  */
1172 
1173 static void
assign_discriminators(void)1174 assign_discriminators (void)
1175 {
1176   basic_block bb;
1177 
1178   FOR_EACH_BB_FN (bb, cfun)
1179     {
1180       edge e;
1181       edge_iterator ei;
1182       gimple *last = last_stmt (bb);
1183       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1184 
1185       if (locus == UNKNOWN_LOCATION)
1186 	continue;
1187 
1188       expanded_location locus_e = expand_location (locus);
1189 
1190       FOR_EACH_EDGE (e, ei, bb->succs)
1191 	{
1192 	  gimple *first = first_non_label_stmt (e->dest);
1193 	  gimple *last = last_stmt (e->dest);
1194 	  if ((first && same_line_p (locus, &locus_e,
1195 				     gimple_location (first)))
1196 	      || (last && same_line_p (locus, &locus_e,
1197 				       gimple_location (last))))
1198 	    {
1199 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1200 		bb->discriminator
1201 		  = next_discriminator_for_locus (locus_e.line);
1202 	      else
1203 		e->dest->discriminator
1204 		  = next_discriminator_for_locus (locus_e.line);
1205 	    }
1206 	}
1207     }
1208 }
1209 
1210 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1211 
1212 static void
make_cond_expr_edges(basic_block bb)1213 make_cond_expr_edges (basic_block bb)
1214 {
1215   gcond *entry = as_a <gcond *> (last_stmt (bb));
1216   gimple *then_stmt, *else_stmt;
1217   basic_block then_bb, else_bb;
1218   tree then_label, else_label;
1219   edge e;
1220 
1221   gcc_assert (entry);
1222   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1223 
1224   /* Entry basic blocks for each component.  */
1225   then_label = gimple_cond_true_label (entry);
1226   else_label = gimple_cond_false_label (entry);
1227   then_bb = label_to_block (cfun, then_label);
1228   else_bb = label_to_block (cfun, else_label);
1229   then_stmt = first_stmt (then_bb);
1230   else_stmt = first_stmt (else_bb);
1231 
1232   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1233   e->goto_locus = gimple_location (then_stmt);
1234   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1235   if (e)
1236     e->goto_locus = gimple_location (else_stmt);
1237 
1238   /* We do not need the labels anymore.  */
1239   gimple_cond_set_true_label (entry, NULL_TREE);
1240   gimple_cond_set_false_label (entry, NULL_TREE);
1241 }
1242 
1243 
1244 /* Called for each element in the hash table (P) as we delete the
1245    edge to cases hash table.
1246 
1247    Clear all the CASE_CHAINs to prevent problems with copying of
1248    SWITCH_EXPRs and structure sharing rules, then free the hash table
1249    element.  */
1250 
1251 bool
edge_to_cases_cleanup(edge const &,tree const & value,void *)1252 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1253 {
1254   tree t, next;
1255 
1256   for (t = value; t; t = next)
1257     {
1258       next = CASE_CHAIN (t);
1259       CASE_CHAIN (t) = NULL;
1260     }
1261 
1262   return true;
1263 }
1264 
1265 /* Start recording information mapping edges to case labels.  */
1266 
1267 void
start_recording_case_labels(void)1268 start_recording_case_labels (void)
1269 {
1270   gcc_assert (edge_to_cases == NULL);
1271   edge_to_cases = new hash_map<edge, tree>;
1272   touched_switch_bbs = BITMAP_ALLOC (NULL);
1273 }
1274 
1275 /* Return nonzero if we are recording information for case labels.  */
1276 
1277 static bool
recording_case_labels_p(void)1278 recording_case_labels_p (void)
1279 {
1280   return (edge_to_cases != NULL);
1281 }
1282 
1283 /* Stop recording information mapping edges to case labels and
1284    remove any information we have recorded.  */
1285 void
end_recording_case_labels(void)1286 end_recording_case_labels (void)
1287 {
1288   bitmap_iterator bi;
1289   unsigned i;
1290   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1291   delete edge_to_cases;
1292   edge_to_cases = NULL;
1293   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1294     {
1295       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1296       if (bb)
1297 	{
1298 	  gimple *stmt = last_stmt (bb);
1299 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1300 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1301 	}
1302     }
1303   BITMAP_FREE (touched_switch_bbs);
1304 }
1305 
1306 /* If we are inside a {start,end}_recording_cases block, then return
1307    a chain of CASE_LABEL_EXPRs from T which reference E.
1308 
1309    Otherwise return NULL.  */
1310 
1311 static tree
get_cases_for_edge(edge e,gswitch * t)1312 get_cases_for_edge (edge e, gswitch *t)
1313 {
1314   tree *slot;
1315   size_t i, n;
1316 
1317   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1318      chains available.  Return NULL so the caller can detect this case.  */
1319   if (!recording_case_labels_p ())
1320     return NULL;
1321 
1322   slot = edge_to_cases->get (e);
1323   if (slot)
1324     return *slot;
1325 
1326   /* If we did not find E in the hash table, then this must be the first
1327      time we have been queried for information about E & T.  Add all the
1328      elements from T to the hash table then perform the query again.  */
1329 
1330   n = gimple_switch_num_labels (t);
1331   for (i = 0; i < n; i++)
1332     {
1333       tree elt = gimple_switch_label (t, i);
1334       tree lab = CASE_LABEL (elt);
1335       basic_block label_bb = label_to_block (cfun, lab);
1336       edge this_edge = find_edge (e->src, label_bb);
1337 
1338       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1339 	 a new chain.  */
1340       tree &s = edge_to_cases->get_or_insert (this_edge);
1341       CASE_CHAIN (elt) = s;
1342       s = elt;
1343     }
1344 
1345   return *edge_to_cases->get (e);
1346 }
1347 
1348 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1349 
1350 static void
make_gimple_switch_edges(gswitch * entry,basic_block bb)1351 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1352 {
1353   size_t i, n;
1354 
1355   n = gimple_switch_num_labels (entry);
1356 
1357   for (i = 0; i < n; ++i)
1358     {
1359       basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1360       make_edge (bb, label_bb, 0);
1361     }
1362 }
1363 
1364 
1365 /* Return the basic block holding label DEST.  */
1366 
1367 basic_block
label_to_block(struct function * ifun,tree dest)1368 label_to_block (struct function *ifun, tree dest)
1369 {
1370   int uid = LABEL_DECL_UID (dest);
1371 
1372   /* We would die hard when faced by an undefined label.  Emit a label to
1373      the very first basic block.  This will hopefully make even the dataflow
1374      and undefined variable warnings quite right.  */
1375   if (seen_error () && uid < 0)
1376     {
1377       gimple_stmt_iterator gsi =
1378 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1379       gimple *stmt;
1380 
1381       stmt = gimple_build_label (dest);
1382       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1383       uid = LABEL_DECL_UID (dest);
1384     }
1385   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1386     return NULL;
1387   return (*ifun->cfg->x_label_to_block_map)[uid];
1388 }
1389 
1390 /* Create edges for a goto statement at block BB.  Returns true
1391    if abnormal edges should be created.  */
1392 
1393 static bool
make_goto_expr_edges(basic_block bb)1394 make_goto_expr_edges (basic_block bb)
1395 {
1396   gimple_stmt_iterator last = gsi_last_bb (bb);
1397   gimple *goto_t = gsi_stmt (last);
1398 
1399   /* A simple GOTO creates normal edges.  */
1400   if (simple_goto_p (goto_t))
1401     {
1402       tree dest = gimple_goto_dest (goto_t);
1403       basic_block label_bb = label_to_block (cfun, dest);
1404       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1405       e->goto_locus = gimple_location (goto_t);
1406       gsi_remove (&last, true);
1407       return false;
1408     }
1409 
1410   /* A computed GOTO creates abnormal edges.  */
1411   return true;
1412 }
1413 
1414 /* Create edges for an asm statement with labels at block BB.  */
1415 
1416 static void
make_gimple_asm_edges(basic_block bb)1417 make_gimple_asm_edges (basic_block bb)
1418 {
1419   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1420   int i, n = gimple_asm_nlabels (stmt);
1421 
1422   for (i = 0; i < n; ++i)
1423     {
1424       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1425       basic_block label_bb = label_to_block (cfun, label);
1426       make_edge (bb, label_bb, 0);
1427     }
1428 }
1429 
1430 /*---------------------------------------------------------------------------
1431 			       Flowgraph analysis
1432 ---------------------------------------------------------------------------*/
1433 
1434 /* Cleanup useless labels in basic blocks.  This is something we wish
1435    to do early because it allows us to group case labels before creating
1436    the edges for the CFG, and it speeds up block statement iterators in
1437    all passes later on.
1438    We rerun this pass after CFG is created, to get rid of the labels that
1439    are no longer referenced.  After then we do not run it any more, since
1440    (almost) no new labels should be created.  */
1441 
1442 /* A map from basic block index to the leading label of that block.  */
1443 struct label_record
1444 {
1445   /* The label.  */
1446   tree label;
1447 
1448   /* True if the label is referenced from somewhere.  */
1449   bool used;
1450 };
1451 
1452 /* Given LABEL return the first label in the same basic block.  */
1453 
1454 static tree
main_block_label(tree label,label_record * label_for_bb)1455 main_block_label (tree label, label_record *label_for_bb)
1456 {
1457   basic_block bb = label_to_block (cfun, label);
1458   tree main_label = label_for_bb[bb->index].label;
1459 
1460   /* label_to_block possibly inserted undefined label into the chain.  */
1461   if (!main_label)
1462     {
1463       label_for_bb[bb->index].label = label;
1464       main_label = label;
1465     }
1466 
1467   label_for_bb[bb->index].used = true;
1468   return main_label;
1469 }
1470 
1471 /* Clean up redundant labels within the exception tree.  */
1472 
1473 static void
cleanup_dead_labels_eh(label_record * label_for_bb)1474 cleanup_dead_labels_eh (label_record *label_for_bb)
1475 {
1476   eh_landing_pad lp;
1477   eh_region r;
1478   tree lab;
1479   int i;
1480 
1481   if (cfun->eh == NULL)
1482     return;
1483 
1484   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1485     if (lp && lp->post_landing_pad)
1486       {
1487 	lab = main_block_label (lp->post_landing_pad, label_for_bb);
1488 	if (lab != lp->post_landing_pad)
1489 	  {
1490 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1491 	    EH_LANDING_PAD_NR (lab) = lp->index;
1492 	  }
1493       }
1494 
1495   FOR_ALL_EH_REGION (r)
1496     switch (r->type)
1497       {
1498       case ERT_CLEANUP:
1499       case ERT_MUST_NOT_THROW:
1500 	break;
1501 
1502       case ERT_TRY:
1503 	{
1504 	  eh_catch c;
1505 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1506 	    {
1507 	      lab = c->label;
1508 	      if (lab)
1509 		c->label = main_block_label (lab, label_for_bb);
1510 	    }
1511 	}
1512 	break;
1513 
1514       case ERT_ALLOWED_EXCEPTIONS:
1515 	lab = r->u.allowed.label;
1516 	if (lab)
1517 	  r->u.allowed.label = main_block_label (lab, label_for_bb);
1518 	break;
1519       }
1520 }
1521 
1522 
1523 /* Cleanup redundant labels.  This is a three-step process:
1524      1) Find the leading label for each block.
1525      2) Redirect all references to labels to the leading labels.
1526      3) Cleanup all useless labels.  */
1527 
1528 void
cleanup_dead_labels(void)1529 cleanup_dead_labels (void)
1530 {
1531   basic_block bb;
1532   label_record *label_for_bb = XCNEWVEC (struct label_record,
1533 					 last_basic_block_for_fn (cfun));
1534 
1535   /* Find a suitable label for each block.  We use the first user-defined
1536      label if there is one, or otherwise just the first label we see.  */
1537   FOR_EACH_BB_FN (bb, cfun)
1538     {
1539       gimple_stmt_iterator i;
1540 
1541       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1542 	{
1543 	  tree label;
1544 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1545 
1546 	  if (!label_stmt)
1547 	    break;
1548 
1549 	  label = gimple_label_label (label_stmt);
1550 
1551 	  /* If we have not yet seen a label for the current block,
1552 	     remember this one and see if there are more labels.  */
1553 	  if (!label_for_bb[bb->index].label)
1554 	    {
1555 	      label_for_bb[bb->index].label = label;
1556 	      continue;
1557 	    }
1558 
1559 	  /* If we did see a label for the current block already, but it
1560 	     is an artificially created label, replace it if the current
1561 	     label is a user defined label.  */
1562 	  if (!DECL_ARTIFICIAL (label)
1563 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1564 	    {
1565 	      label_for_bb[bb->index].label = label;
1566 	      break;
1567 	    }
1568 	}
1569     }
1570 
1571   /* Now redirect all jumps/branches to the selected label.
1572      First do so for each block ending in a control statement.  */
1573   FOR_EACH_BB_FN (bb, cfun)
1574     {
1575       gimple *stmt = last_stmt (bb);
1576       tree label, new_label;
1577 
1578       if (!stmt)
1579 	continue;
1580 
1581       switch (gimple_code (stmt))
1582 	{
1583 	case GIMPLE_COND:
1584 	  {
1585 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1586 	    label = gimple_cond_true_label (cond_stmt);
1587 	    if (label)
1588 	      {
1589 		new_label = main_block_label (label, label_for_bb);
1590 		if (new_label != label)
1591 		  gimple_cond_set_true_label (cond_stmt, new_label);
1592 	      }
1593 
1594 	    label = gimple_cond_false_label (cond_stmt);
1595 	    if (label)
1596 	      {
1597 		new_label = main_block_label (label, label_for_bb);
1598 		if (new_label != label)
1599 		  gimple_cond_set_false_label (cond_stmt, new_label);
1600 	      }
1601 	  }
1602 	  break;
1603 
1604 	case GIMPLE_SWITCH:
1605 	  {
1606 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1607 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1608 
1609 	    /* Replace all destination labels.  */
1610 	    for (i = 0; i < n; ++i)
1611 	      {
1612 		tree case_label = gimple_switch_label (switch_stmt, i);
1613 		label = CASE_LABEL (case_label);
1614 		new_label = main_block_label (label, label_for_bb);
1615 		if (new_label != label)
1616 		  CASE_LABEL (case_label) = new_label;
1617 	      }
1618 	    break;
1619 	  }
1620 
1621 	case GIMPLE_ASM:
1622 	  {
1623 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1624 	    int i, n = gimple_asm_nlabels (asm_stmt);
1625 
1626 	    for (i = 0; i < n; ++i)
1627 	      {
1628 		tree cons = gimple_asm_label_op (asm_stmt, i);
1629 		tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1630 		TREE_VALUE (cons) = label;
1631 	      }
1632 	    break;
1633 	  }
1634 
1635 	/* We have to handle gotos until they're removed, and we don't
1636 	   remove them until after we've created the CFG edges.  */
1637 	case GIMPLE_GOTO:
1638 	  if (!computed_goto_p (stmt))
1639 	    {
1640 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1641 	      label = gimple_goto_dest (goto_stmt);
1642 	      new_label = main_block_label (label, label_for_bb);
1643 	      if (new_label != label)
1644 		gimple_goto_set_dest (goto_stmt, new_label);
1645 	    }
1646 	  break;
1647 
1648 	case GIMPLE_TRANSACTION:
1649 	  {
1650 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1651 
1652 	    label = gimple_transaction_label_norm (txn);
1653 	    if (label)
1654 	      {
1655 		new_label = main_block_label (label, label_for_bb);
1656 		if (new_label != label)
1657 		  gimple_transaction_set_label_norm (txn, new_label);
1658 	      }
1659 
1660 	    label = gimple_transaction_label_uninst (txn);
1661 	    if (label)
1662 	      {
1663 		new_label = main_block_label (label, label_for_bb);
1664 		if (new_label != label)
1665 		  gimple_transaction_set_label_uninst (txn, new_label);
1666 	      }
1667 
1668 	    label = gimple_transaction_label_over (txn);
1669 	    if (label)
1670 	      {
1671 		new_label = main_block_label (label, label_for_bb);
1672 		if (new_label != label)
1673 		  gimple_transaction_set_label_over (txn, new_label);
1674 	      }
1675 	  }
1676 	  break;
1677 
1678 	default:
1679 	  break;
1680       }
1681     }
1682 
1683   /* Do the same for the exception region tree labels.  */
1684   cleanup_dead_labels_eh (label_for_bb);
1685 
1686   /* Finally, purge dead labels.  All user-defined labels and labels that
1687      can be the target of non-local gotos and labels which have their
1688      address taken are preserved.  */
1689   FOR_EACH_BB_FN (bb, cfun)
1690     {
1691       gimple_stmt_iterator i;
1692       tree label_for_this_bb = label_for_bb[bb->index].label;
1693 
1694       if (!label_for_this_bb)
1695 	continue;
1696 
1697       /* If the main label of the block is unused, we may still remove it.  */
1698       if (!label_for_bb[bb->index].used)
1699 	label_for_this_bb = NULL;
1700 
1701       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1702 	{
1703 	  tree label;
1704 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1705 
1706 	  if (!label_stmt)
1707 	    break;
1708 
1709 	  label = gimple_label_label (label_stmt);
1710 
1711 	  if (label == label_for_this_bb
1712 	      || !DECL_ARTIFICIAL (label)
1713 	      || DECL_NONLOCAL (label)
1714 	      || FORCED_LABEL (label))
1715 	    gsi_next (&i);
1716 	  else
1717 	    gsi_remove (&i, true);
1718 	}
1719     }
1720 
1721   free (label_for_bb);
1722 }
1723 
1724 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1725    the ones jumping to the same label.
1726    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1727 
1728 bool
group_case_labels_stmt(gswitch * stmt)1729 group_case_labels_stmt (gswitch *stmt)
1730 {
1731   int old_size = gimple_switch_num_labels (stmt);
1732   int i, next_index, new_size;
1733   basic_block default_bb = NULL;
1734   hash_set<tree> *removed_labels = NULL;
1735 
1736   default_bb = gimple_switch_default_bb (cfun, stmt);
1737 
1738   /* Look for possible opportunities to merge cases.  */
1739   new_size = i = 1;
1740   while (i < old_size)
1741     {
1742       tree base_case, base_high;
1743       basic_block base_bb;
1744 
1745       base_case = gimple_switch_label (stmt, i);
1746 
1747       gcc_assert (base_case);
1748       base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1749 
1750       /* Discard cases that have the same destination as the default case or
1751 	 whose destination blocks have already been removed as unreachable.  */
1752       if (base_bb == NULL
1753 	  || base_bb == default_bb
1754 	  || (removed_labels
1755 	      && removed_labels->contains (CASE_LABEL (base_case))))
1756 	{
1757 	  i++;
1758 	  continue;
1759 	}
1760 
1761       base_high = CASE_HIGH (base_case)
1762 	  ? CASE_HIGH (base_case)
1763 	  : CASE_LOW (base_case);
1764       next_index = i + 1;
1765 
1766       /* Try to merge case labels.  Break out when we reach the end
1767 	 of the label vector or when we cannot merge the next case
1768 	 label with the current one.  */
1769       while (next_index < old_size)
1770 	{
1771 	  tree merge_case = gimple_switch_label (stmt, next_index);
1772 	  basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1773 	  wide_int bhp1 = wi::to_wide (base_high) + 1;
1774 
1775 	  /* Merge the cases if they jump to the same place,
1776 	     and their ranges are consecutive.  */
1777 	  if (merge_bb == base_bb
1778 	      && (removed_labels == NULL
1779 		  || !removed_labels->contains (CASE_LABEL (merge_case)))
1780 	      && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1781 	    {
1782 	      base_high
1783 		= (CASE_HIGH (merge_case)
1784 		   ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1785 	      CASE_HIGH (base_case) = base_high;
1786 	      next_index++;
1787 	    }
1788 	  else
1789 	    break;
1790 	}
1791 
1792       /* Discard cases that have an unreachable destination block.  */
1793       if (EDGE_COUNT (base_bb->succs) == 0
1794 	  && gimple_seq_unreachable_p (bb_seq (base_bb))
1795 	  /* Don't optimize this if __builtin_unreachable () is the
1796 	     implicitly added one by the C++ FE too early, before
1797 	     -Wreturn-type can be diagnosed.  We'll optimize it later
1798 	     during switchconv pass or any other cfg cleanup.  */
1799 	  && (gimple_in_ssa_p (cfun)
1800 	      || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1801 		  != BUILTINS_LOCATION)))
1802 	{
1803 	  edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1804 	  if (base_edge != NULL)
1805 	    {
1806 	      for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1807 		   !gsi_end_p (gsi); gsi_next (&gsi))
1808 		if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1809 		  {
1810 		    if (FORCED_LABEL (gimple_label_label (stmt))
1811 			|| DECL_NONLOCAL (gimple_label_label (stmt)))
1812 		      {
1813 			/* Forced/non-local labels aren't going to be removed,
1814 			   but they will be moved to some neighbouring basic
1815 			   block. If some later case label refers to one of
1816 			   those labels, we should throw that case away rather
1817 			   than keeping it around and refering to some random
1818 			   other basic block without an edge to it.  */
1819 			if (removed_labels == NULL)
1820 			  removed_labels = new hash_set<tree>;
1821 			removed_labels->add (gimple_label_label (stmt));
1822 		      }
1823 		  }
1824 		else
1825 		  break;
1826 	      remove_edge_and_dominated_blocks (base_edge);
1827 	    }
1828 	  i = next_index;
1829 	  continue;
1830 	}
1831 
1832       if (new_size < i)
1833 	gimple_switch_set_label (stmt, new_size,
1834 				 gimple_switch_label (stmt, i));
1835       i = next_index;
1836       new_size++;
1837     }
1838 
1839   gcc_assert (new_size <= old_size);
1840 
1841   if (new_size < old_size)
1842     gimple_switch_set_num_labels (stmt, new_size);
1843 
1844   delete removed_labels;
1845   return new_size < old_size;
1846 }
1847 
1848 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1849    and scan the sorted vector of cases.  Combine the ones jumping to the
1850    same label.  */
1851 
1852 bool
group_case_labels(void)1853 group_case_labels (void)
1854 {
1855   basic_block bb;
1856   bool changed = false;
1857 
1858   FOR_EACH_BB_FN (bb, cfun)
1859     {
1860       gimple *stmt = last_stmt (bb);
1861       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1862 	changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1863     }
1864 
1865   return changed;
1866 }
1867 
1868 /* Checks whether we can merge block B into block A.  */
1869 
1870 static bool
gimple_can_merge_blocks_p(basic_block a,basic_block b)1871 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1872 {
1873   gimple *stmt;
1874 
1875   if (!single_succ_p (a))
1876     return false;
1877 
1878   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1879     return false;
1880 
1881   if (single_succ (a) != b)
1882     return false;
1883 
1884   if (!single_pred_p (b))
1885     return false;
1886 
1887   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1888       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1889     return false;
1890 
1891   /* If A ends by a statement causing exceptions or something similar, we
1892      cannot merge the blocks.  */
1893   stmt = last_stmt (a);
1894   if (stmt && stmt_ends_bb_p (stmt))
1895     return false;
1896 
1897   /* Do not allow a block with only a non-local label to be merged.  */
1898   if (stmt)
1899     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1900       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1901 	return false;
1902 
1903   /* Examine the labels at the beginning of B.  */
1904   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1905        gsi_next (&gsi))
1906     {
1907       tree lab;
1908       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1909       if (!label_stmt)
1910 	break;
1911       lab = gimple_label_label (label_stmt);
1912 
1913       /* Do not remove user forced labels or for -O0 any user labels.  */
1914       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1915 	return false;
1916     }
1917 
1918   /* Protect simple loop latches.  We only want to avoid merging
1919      the latch with the loop header or with a block in another
1920      loop in this case.  */
1921   if (current_loops
1922       && b->loop_father->latch == b
1923       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1924       && (b->loop_father->header == a
1925 	  || b->loop_father != a->loop_father))
1926     return false;
1927 
1928   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1929      is not up-to-date and a name-mapping is registered, we cannot eliminate
1930      any phis.  Symbols marked for renaming are never a problem though.  */
1931   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1932        gsi_next (&gsi))
1933     {
1934       gphi *phi = gsi.phi ();
1935       /* Technically only new names matter.  */
1936       if (name_registered_for_update_p (PHI_RESULT (phi)))
1937 	return false;
1938     }
1939 
1940   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1941   if (!optimize
1942       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1943     {
1944       location_t goto_locus = single_succ_edge (a)->goto_locus;
1945       gimple_stmt_iterator prev, next;
1946       prev = gsi_last_nondebug_bb (a);
1947       next = gsi_after_labels (b);
1948       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1949 	gsi_next_nondebug (&next);
1950       if ((gsi_end_p (prev)
1951 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1952 	  && (gsi_end_p (next)
1953 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1954 	return false;
1955     }
1956 
1957   return true;
1958 }
1959 
1960 /* Replaces all uses of NAME by VAL.  */
1961 
1962 void
replace_uses_by(tree name,tree val)1963 replace_uses_by (tree name, tree val)
1964 {
1965   imm_use_iterator imm_iter;
1966   use_operand_p use;
1967   gimple *stmt;
1968   edge e;
1969 
1970   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1971     {
1972       /* Mark the block if we change the last stmt in it.  */
1973       if (cfgcleanup_altered_bbs
1974 	  && stmt_ends_bb_p (stmt))
1975 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1976 
1977       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1978         {
1979 	  replace_exp (use, val);
1980 
1981 	  if (gimple_code (stmt) == GIMPLE_PHI)
1982 	    {
1983 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1984 				       PHI_ARG_INDEX_FROM_USE (use));
1985 	      if (e->flags & EDGE_ABNORMAL
1986 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1987 		{
1988 		  /* This can only occur for virtual operands, since
1989 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1990 		     would prevent replacement.  */
1991 		  gcc_checking_assert (virtual_operand_p (name));
1992 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1993 		}
1994 	    }
1995 	}
1996 
1997       if (gimple_code (stmt) != GIMPLE_PHI)
1998 	{
1999 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2000 	  gimple *orig_stmt = stmt;
2001 	  size_t i;
2002 
2003 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
2004 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
2005 	     only change sth from non-invariant to invariant, and only
2006 	     when propagating constants.  */
2007 	  if (is_gimple_min_invariant (val))
2008 	    for (i = 0; i < gimple_num_ops (stmt); i++)
2009 	      {
2010 		tree op = gimple_op (stmt, i);
2011 		/* Operands may be empty here.  For example, the labels
2012 		   of a GIMPLE_COND are nulled out following the creation
2013 		   of the corresponding CFG edges.  */
2014 		if (op && TREE_CODE (op) == ADDR_EXPR)
2015 		  recompute_tree_invariant_for_addr_expr (op);
2016 	      }
2017 
2018 	  if (fold_stmt (&gsi))
2019 	    stmt = gsi_stmt (gsi);
2020 
2021 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2022 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
2023 
2024 	  update_stmt (stmt);
2025 	}
2026     }
2027 
2028   gcc_checking_assert (has_zero_uses (name));
2029 
2030   /* Also update the trees stored in loop structures.  */
2031   if (current_loops)
2032     {
2033       class loop *loop;
2034 
2035       FOR_EACH_LOOP (loop, 0)
2036 	{
2037 	  substitute_in_loop_info (loop, name, val);
2038 	}
2039     }
2040 }
2041 
2042 /* Merge block B into block A.  */
2043 
2044 static void
gimple_merge_blocks(basic_block a,basic_block b)2045 gimple_merge_blocks (basic_block a, basic_block b)
2046 {
2047   gimple_stmt_iterator last, gsi;
2048   gphi_iterator psi;
2049 
2050   if (dump_file)
2051     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2052 
2053   /* Remove all single-valued PHI nodes from block B of the form
2054      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
2055   gsi = gsi_last_bb (a);
2056   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2057     {
2058       gimple *phi = gsi_stmt (psi);
2059       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2060       gimple *copy;
2061       bool may_replace_uses = (virtual_operand_p (def)
2062 			       || may_propagate_copy (def, use));
2063 
2064       /* In case we maintain loop closed ssa form, do not propagate arguments
2065 	 of loop exit phi nodes.  */
2066       if (current_loops
2067 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2068 	  && !virtual_operand_p (def)
2069 	  && TREE_CODE (use) == SSA_NAME
2070 	  && a->loop_father != b->loop_father)
2071 	may_replace_uses = false;
2072 
2073       if (!may_replace_uses)
2074 	{
2075 	  gcc_assert (!virtual_operand_p (def));
2076 
2077 	  /* Note that just emitting the copies is fine -- there is no problem
2078 	     with ordering of phi nodes.  This is because A is the single
2079 	     predecessor of B, therefore results of the phi nodes cannot
2080 	     appear as arguments of the phi nodes.  */
2081 	  copy = gimple_build_assign (def, use);
2082 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2083           remove_phi_node (&psi, false);
2084 	}
2085       else
2086         {
2087 	  /* If we deal with a PHI for virtual operands, we can simply
2088 	     propagate these without fussing with folding or updating
2089 	     the stmt.  */
2090 	  if (virtual_operand_p (def))
2091 	    {
2092 	      imm_use_iterator iter;
2093 	      use_operand_p use_p;
2094 	      gimple *stmt;
2095 
2096 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2097 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2098 		  SET_USE (use_p, use);
2099 
2100 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2101 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2102 	    }
2103 	  else
2104             replace_uses_by (def, use);
2105 
2106           remove_phi_node (&psi, true);
2107         }
2108     }
2109 
2110   /* Ensure that B follows A.  */
2111   move_block_after (b, a);
2112 
2113   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2114   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2115 
2116   /* Remove labels from B and set gimple_bb to A for other statements.  */
2117   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2118     {
2119       gimple *stmt = gsi_stmt (gsi);
2120       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2121 	{
2122 	  tree label = gimple_label_label (label_stmt);
2123 	  int lp_nr;
2124 
2125 	  gsi_remove (&gsi, false);
2126 
2127 	  /* Now that we can thread computed gotos, we might have
2128 	     a situation where we have a forced label in block B
2129 	     However, the label at the start of block B might still be
2130 	     used in other ways (think about the runtime checking for
2131 	     Fortran assigned gotos).  So we cannot just delete the
2132 	     label.  Instead we move the label to the start of block A.  */
2133 	  if (FORCED_LABEL (label))
2134 	    {
2135 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2136 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2137 	    }
2138 	  /* Other user labels keep around in a form of a debug stmt.  */
2139 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2140 	    {
2141 	      gimple *dbg = gimple_build_debug_bind (label,
2142 						     integer_zero_node,
2143 						     stmt);
2144 	      gimple_debug_bind_reset_value (dbg);
2145 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2146 	    }
2147 
2148 	  lp_nr = EH_LANDING_PAD_NR (label);
2149 	  if (lp_nr)
2150 	    {
2151 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2152 	      lp->post_landing_pad = NULL;
2153 	    }
2154 	}
2155       else
2156 	{
2157 	  gimple_set_bb (stmt, a);
2158 	  gsi_next (&gsi);
2159 	}
2160     }
2161 
2162   /* When merging two BBs, if their counts are different, the larger count
2163      is selected as the new bb count. This is to handle inconsistent
2164      profiles.  */
2165   if (a->loop_father == b->loop_father)
2166     {
2167       a->count = a->count.merge (b->count);
2168     }
2169 
2170   /* Merge the sequences.  */
2171   last = gsi_last_bb (a);
2172   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2173   set_bb_seq (b, NULL);
2174 
2175   if (cfgcleanup_altered_bbs)
2176     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2177 }
2178 
2179 
2180 /* Return the one of two successors of BB that is not reachable by a
2181    complex edge, if there is one.  Else, return BB.  We use
2182    this in optimizations that use post-dominators for their heuristics,
2183    to catch the cases in C++ where function calls are involved.  */
2184 
2185 basic_block
single_noncomplex_succ(basic_block bb)2186 single_noncomplex_succ (basic_block bb)
2187 {
2188   edge e0, e1;
2189   if (EDGE_COUNT (bb->succs) != 2)
2190     return bb;
2191 
2192   e0 = EDGE_SUCC (bb, 0);
2193   e1 = EDGE_SUCC (bb, 1);
2194   if (e0->flags & EDGE_COMPLEX)
2195     return e1->dest;
2196   if (e1->flags & EDGE_COMPLEX)
2197     return e0->dest;
2198 
2199   return bb;
2200 }
2201 
2202 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2203 
2204 void
notice_special_calls(gcall * call)2205 notice_special_calls (gcall *call)
2206 {
2207   int flags = gimple_call_flags (call);
2208 
2209   if (flags & ECF_MAY_BE_ALLOCA)
2210     cfun->calls_alloca = true;
2211   if (flags & ECF_RETURNS_TWICE)
2212     cfun->calls_setjmp = true;
2213 }
2214 
2215 
2216 /* Clear flags set by notice_special_calls.  Used by dead code removal
2217    to update the flags.  */
2218 
2219 void
clear_special_calls(void)2220 clear_special_calls (void)
2221 {
2222   cfun->calls_alloca = false;
2223   cfun->calls_setjmp = false;
2224 }
2225 
2226 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2227 
2228 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)2229 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2230 {
2231   /* Since this block is no longer reachable, we can just delete all
2232      of its PHI nodes.  */
2233   remove_phi_nodes (bb);
2234 
2235   /* Remove edges to BB's successors.  */
2236   while (EDGE_COUNT (bb->succs) > 0)
2237     remove_edge (EDGE_SUCC (bb, 0));
2238 }
2239 
2240 
2241 /* Remove statements of basic block BB.  */
2242 
2243 static void
remove_bb(basic_block bb)2244 remove_bb (basic_block bb)
2245 {
2246   gimple_stmt_iterator i;
2247 
2248   if (dump_file)
2249     {
2250       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2251       if (dump_flags & TDF_DETAILS)
2252 	{
2253 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2254 	  fprintf (dump_file, "\n");
2255 	}
2256     }
2257 
2258   if (current_loops)
2259     {
2260       class loop *loop = bb->loop_father;
2261 
2262       /* If a loop gets removed, clean up the information associated
2263 	 with it.  */
2264       if (loop->latch == bb
2265 	  || loop->header == bb)
2266 	free_numbers_of_iterations_estimates (loop);
2267     }
2268 
2269   /* Remove all the instructions in the block.  */
2270   if (bb_seq (bb) != NULL)
2271     {
2272       /* Walk backwards so as to get a chance to substitute all
2273 	 released DEFs into debug stmts.  See
2274 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2275 	 details.  */
2276       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2277 	{
2278 	  gimple *stmt = gsi_stmt (i);
2279 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2280 	  if (label_stmt
2281 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2282 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2283 	    {
2284 	      basic_block new_bb;
2285 	      gimple_stmt_iterator new_gsi;
2286 
2287 	      /* A non-reachable non-local label may still be referenced.
2288 		 But it no longer needs to carry the extra semantics of
2289 		 non-locality.  */
2290 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2291 		{
2292 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2293 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2294 		}
2295 
2296 	      new_bb = bb->prev_bb;
2297 	      /* Don't move any labels into ENTRY block.  */
2298 	      if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2299 		{
2300 		  new_bb = single_succ (new_bb);
2301 		  gcc_assert (new_bb != bb);
2302 		}
2303 	      new_gsi = gsi_after_labels (new_bb);
2304 	      gsi_remove (&i, false);
2305 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2306 	    }
2307 	  else
2308 	    {
2309 	      /* Release SSA definitions.  */
2310 	      release_defs (stmt);
2311 	      gsi_remove (&i, true);
2312 	    }
2313 
2314 	  if (gsi_end_p (i))
2315 	    i = gsi_last_bb (bb);
2316 	  else
2317 	    gsi_prev (&i);
2318 	}
2319     }
2320 
2321   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2322   bb->il.gimple.seq = NULL;
2323   bb->il.gimple.phi_nodes = NULL;
2324 }
2325 
2326 
2327 /* Given a basic block BB and a value VAL for use in the final statement
2328    of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2329    the edge that will be taken out of the block.
2330    If VAL is NULL_TREE, then the current value of the final statement's
2331    predicate or index is used.
2332    If the value does not match a unique edge, NULL is returned.  */
2333 
2334 edge
find_taken_edge(basic_block bb,tree val)2335 find_taken_edge (basic_block bb, tree val)
2336 {
2337   gimple *stmt;
2338 
2339   stmt = last_stmt (bb);
2340 
2341   /* Handle ENTRY and EXIT.  */
2342   if (!stmt)
2343     return NULL;
2344 
2345   if (gimple_code (stmt) == GIMPLE_COND)
2346     return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2347 
2348   if (gimple_code (stmt) == GIMPLE_SWITCH)
2349     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2350 
2351   if (computed_goto_p (stmt))
2352     {
2353       /* Only optimize if the argument is a label, if the argument is
2354 	 not a label then we cannot construct a proper CFG.
2355 
2356          It may be the case that we only need to allow the LABEL_REF to
2357          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2358          appear inside a LABEL_EXPR just to be safe.  */
2359       if (val
2360 	  && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2361 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2362 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2363     }
2364 
2365   /* Otherwise we only know the taken successor edge if it's unique.  */
2366   return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2367 }
2368 
2369 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2370    statement, determine which of the outgoing edges will be taken out of the
2371    block.  Return NULL if either edge may be taken.  */
2372 
2373 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2374 find_taken_edge_computed_goto (basic_block bb, tree val)
2375 {
2376   basic_block dest;
2377   edge e = NULL;
2378 
2379   dest = label_to_block (cfun, val);
2380   if (dest)
2381     e = find_edge (bb, dest);
2382 
2383   /* It's possible for find_edge to return NULL here on invalid code
2384      that abuses the labels-as-values extension (e.g. code that attempts to
2385      jump *between* functions via stored labels-as-values; PR 84136).
2386      If so, then we simply return that NULL for the edge.
2387      We don't currently have a way of detecting such invalid code, so we
2388      can't assert that it was the case when a NULL edge occurs here.  */
2389 
2390   return e;
2391 }
2392 
2393 /* Given COND_STMT and a constant value VAL for use as the predicate,
2394    determine which of the two edges will be taken out of
2395    the statement's block.  Return NULL if either edge may be taken.
2396    If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2397    is used.  */
2398 
2399 static edge
find_taken_edge_cond_expr(const gcond * cond_stmt,tree val)2400 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2401 {
2402   edge true_edge, false_edge;
2403 
2404   if (val == NULL_TREE)
2405     {
2406       /* Use the current value of the predicate.  */
2407       if (gimple_cond_true_p (cond_stmt))
2408 	val = integer_one_node;
2409       else if (gimple_cond_false_p (cond_stmt))
2410 	val = integer_zero_node;
2411       else
2412 	return NULL;
2413     }
2414   else if (TREE_CODE (val) != INTEGER_CST)
2415     return NULL;
2416 
2417   extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2418 				       &true_edge, &false_edge);
2419 
2420   return (integer_zerop (val) ? false_edge : true_edge);
2421 }
2422 
2423 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2424    which edge will be taken out of the statement's block.  Return NULL if any
2425    edge may be taken.
2426    If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2427    is used.  */
2428 
2429 edge
find_taken_edge_switch_expr(const gswitch * switch_stmt,tree val)2430 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2431 {
2432   basic_block dest_bb;
2433   edge e;
2434   tree taken_case;
2435 
2436   if (gimple_switch_num_labels (switch_stmt) == 1)
2437     taken_case = gimple_switch_default_label (switch_stmt);
2438   else
2439     {
2440       if (val == NULL_TREE)
2441 	val = gimple_switch_index (switch_stmt);
2442       if (TREE_CODE (val) != INTEGER_CST)
2443 	return NULL;
2444       else
2445 	taken_case = find_case_label_for_value (switch_stmt, val);
2446     }
2447   dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2448 
2449   e = find_edge (gimple_bb (switch_stmt), dest_bb);
2450   gcc_assert (e);
2451   return e;
2452 }
2453 
2454 
2455 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2456    We can make optimal use here of the fact that the case labels are
2457    sorted: We can do a binary search for a case matching VAL.  */
2458 
2459 tree
find_case_label_for_value(const gswitch * switch_stmt,tree val)2460 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2461 {
2462   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2463   tree default_case = gimple_switch_default_label (switch_stmt);
2464 
2465   for (low = 0, high = n; high - low > 1; )
2466     {
2467       size_t i = (high + low) / 2;
2468       tree t = gimple_switch_label (switch_stmt, i);
2469       int cmp;
2470 
2471       /* Cache the result of comparing CASE_LOW and val.  */
2472       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2473 
2474       if (cmp > 0)
2475 	high = i;
2476       else
2477 	low = i;
2478 
2479       if (CASE_HIGH (t) == NULL)
2480 	{
2481 	  /* A singe-valued case label.  */
2482 	  if (cmp == 0)
2483 	    return t;
2484 	}
2485       else
2486 	{
2487 	  /* A case range.  We can only handle integer ranges.  */
2488 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2489 	    return t;
2490 	}
2491     }
2492 
2493   return default_case;
2494 }
2495 
2496 
2497 /* Dump a basic block on stderr.  */
2498 
2499 void
gimple_debug_bb(basic_block bb)2500 gimple_debug_bb (basic_block bb)
2501 {
2502   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2503 }
2504 
2505 
2506 /* Dump basic block with index N on stderr.  */
2507 
2508 basic_block
gimple_debug_bb_n(int n)2509 gimple_debug_bb_n (int n)
2510 {
2511   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2512   return BASIC_BLOCK_FOR_FN (cfun, n);
2513 }
2514 
2515 
2516 /* Dump the CFG on stderr.
2517 
2518    FLAGS are the same used by the tree dumping functions
2519    (see TDF_* in dumpfile.h).  */
2520 
2521 void
gimple_debug_cfg(dump_flags_t flags)2522 gimple_debug_cfg (dump_flags_t flags)
2523 {
2524   gimple_dump_cfg (stderr, flags);
2525 }
2526 
2527 
2528 /* Dump the program showing basic block boundaries on the given FILE.
2529 
2530    FLAGS are the same used by the tree dumping functions (see TDF_* in
2531    tree.h).  */
2532 
2533 void
gimple_dump_cfg(FILE * file,dump_flags_t flags)2534 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2535 {
2536   if (flags & TDF_DETAILS)
2537     {
2538       dump_function_header (file, current_function_decl, flags);
2539       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2540 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2541 	       last_basic_block_for_fn (cfun));
2542 
2543       brief_dump_cfg (file, flags);
2544       fprintf (file, "\n");
2545     }
2546 
2547   if (flags & TDF_STATS)
2548     dump_cfg_stats (file);
2549 
2550   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2551 }
2552 
2553 
2554 /* Dump CFG statistics on FILE.  */
2555 
2556 void
dump_cfg_stats(FILE * file)2557 dump_cfg_stats (FILE *file)
2558 {
2559   static long max_num_merged_labels = 0;
2560   unsigned long size, total = 0;
2561   long num_edges;
2562   basic_block bb;
2563   const char * const fmt_str   = "%-30s%-13s%12s\n";
2564   const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2565   const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2566   const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2567   const char *funcname = current_function_name ();
2568 
2569   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2570 
2571   fprintf (file, "---------------------------------------------------------\n");
2572   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2573   fprintf (file, fmt_str, "", "  instances  ", "used ");
2574   fprintf (file, "---------------------------------------------------------\n");
2575 
2576   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2577   total += size;
2578   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2579 	   SIZE_AMOUNT (size));
2580 
2581   num_edges = 0;
2582   FOR_EACH_BB_FN (bb, cfun)
2583     num_edges += EDGE_COUNT (bb->succs);
2584   size = num_edges * sizeof (class edge_def);
2585   total += size;
2586   fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2587 
2588   fprintf (file, "---------------------------------------------------------\n");
2589   fprintf (file, fmt_str_3, "Total memory used by CFG data",
2590 	   SIZE_AMOUNT (total));
2591   fprintf (file, "---------------------------------------------------------\n");
2592   fprintf (file, "\n");
2593 
2594   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2595     max_num_merged_labels = cfg_stats.num_merged_labels;
2596 
2597   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2598 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2599 
2600   fprintf (file, "\n");
2601 }
2602 
2603 
2604 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2605    linked in the final executable.  */
2606 
2607 DEBUG_FUNCTION void
debug_cfg_stats(void)2608 debug_cfg_stats (void)
2609 {
2610   dump_cfg_stats (stderr);
2611 }
2612 
2613 /*---------------------------------------------------------------------------
2614 			     Miscellaneous helpers
2615 ---------------------------------------------------------------------------*/
2616 
2617 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2618    flow.  Transfers of control flow associated with EH are excluded.  */
2619 
2620 static bool
call_can_make_abnormal_goto(gimple * t)2621 call_can_make_abnormal_goto (gimple *t)
2622 {
2623   /* If the function has no non-local labels, then a call cannot make an
2624      abnormal transfer of control.  */
2625   if (!cfun->has_nonlocal_label
2626       && !cfun->calls_setjmp)
2627    return false;
2628 
2629   /* Likewise if the call has no side effects.  */
2630   if (!gimple_has_side_effects (t))
2631     return false;
2632 
2633   /* Likewise if the called function is leaf.  */
2634   if (gimple_call_flags (t) & ECF_LEAF)
2635     return false;
2636 
2637   return true;
2638 }
2639 
2640 
2641 /* Return true if T can make an abnormal transfer of control flow.
2642    Transfers of control flow associated with EH are excluded.  */
2643 
2644 bool
stmt_can_make_abnormal_goto(gimple * t)2645 stmt_can_make_abnormal_goto (gimple *t)
2646 {
2647   if (computed_goto_p (t))
2648     return true;
2649   if (is_gimple_call (t))
2650     return call_can_make_abnormal_goto (t);
2651   return false;
2652 }
2653 
2654 
2655 /* Return true if T represents a stmt that always transfers control.  */
2656 
2657 bool
is_ctrl_stmt(gimple * t)2658 is_ctrl_stmt (gimple *t)
2659 {
2660   switch (gimple_code (t))
2661     {
2662     case GIMPLE_COND:
2663     case GIMPLE_SWITCH:
2664     case GIMPLE_GOTO:
2665     case GIMPLE_RETURN:
2666     case GIMPLE_RESX:
2667       return true;
2668     default:
2669       return false;
2670     }
2671 }
2672 
2673 
2674 /* Return true if T is a statement that may alter the flow of control
2675    (e.g., a call to a non-returning function).  */
2676 
2677 bool
is_ctrl_altering_stmt(gimple * t)2678 is_ctrl_altering_stmt (gimple *t)
2679 {
2680   gcc_assert (t);
2681 
2682   switch (gimple_code (t))
2683     {
2684     case GIMPLE_CALL:
2685       /* Per stmt call flag indicates whether the call could alter
2686 	 controlflow.  */
2687       if (gimple_call_ctrl_altering_p (t))
2688 	return true;
2689       break;
2690 
2691     case GIMPLE_EH_DISPATCH:
2692       /* EH_DISPATCH branches to the individual catch handlers at
2693 	 this level of a try or allowed-exceptions region.  It can
2694 	 fallthru to the next statement as well.  */
2695       return true;
2696 
2697     case GIMPLE_ASM:
2698       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2699 	return true;
2700       break;
2701 
2702     CASE_GIMPLE_OMP:
2703       /* OpenMP directives alter control flow.  */
2704       return true;
2705 
2706     case GIMPLE_TRANSACTION:
2707       /* A transaction start alters control flow.  */
2708       return true;
2709 
2710     default:
2711       break;
2712     }
2713 
2714   /* If a statement can throw, it alters control flow.  */
2715   return stmt_can_throw_internal (cfun, t);
2716 }
2717 
2718 
2719 /* Return true if T is a simple local goto.  */
2720 
2721 bool
simple_goto_p(gimple * t)2722 simple_goto_p (gimple *t)
2723 {
2724   return (gimple_code (t) == GIMPLE_GOTO
2725 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2726 }
2727 
2728 
2729 /* Return true if STMT should start a new basic block.  PREV_STMT is
2730    the statement preceding STMT.  It is used when STMT is a label or a
2731    case label.  Labels should only start a new basic block if their
2732    previous statement wasn't a label.  Otherwise, sequence of labels
2733    would generate unnecessary basic blocks that only contain a single
2734    label.  */
2735 
2736 static inline bool
stmt_starts_bb_p(gimple * stmt,gimple * prev_stmt)2737 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2738 {
2739   if (stmt == NULL)
2740     return false;
2741 
2742   /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2743      any nondebug stmts in the block.  We don't want to start another
2744      block in this case: the debug stmt will already have started the
2745      one STMT would start if we weren't outputting debug stmts.  */
2746   if (prev_stmt && is_gimple_debug (prev_stmt))
2747     return false;
2748 
2749   /* Labels start a new basic block only if the preceding statement
2750      wasn't a label of the same type.  This prevents the creation of
2751      consecutive blocks that have nothing but a single label.  */
2752   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2753     {
2754       /* Nonlocal and computed GOTO targets always start a new block.  */
2755       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2756 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2757 	return true;
2758 
2759       if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2760 	{
2761 	  if (DECL_NONLOCAL (gimple_label_label (plabel))
2762 	      || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2763 	    return true;
2764 
2765 	  cfg_stats.num_merged_labels++;
2766 	  return false;
2767 	}
2768       else
2769 	return true;
2770     }
2771   else if (gimple_code (stmt) == GIMPLE_CALL)
2772     {
2773       if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2774 	/* setjmp acts similar to a nonlocal GOTO target and thus should
2775 	   start a new block.  */
2776 	return true;
2777       if (gimple_call_internal_p (stmt, IFN_PHI)
2778 	  && prev_stmt
2779 	  && gimple_code (prev_stmt) != GIMPLE_LABEL
2780 	  && (gimple_code (prev_stmt) != GIMPLE_CALL
2781 	      || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2782 	/* PHI nodes start a new block unless preceeded by a label
2783 	   or another PHI.  */
2784 	return true;
2785     }
2786 
2787   return false;
2788 }
2789 
2790 
2791 /* Return true if T should end a basic block.  */
2792 
2793 bool
stmt_ends_bb_p(gimple * t)2794 stmt_ends_bb_p (gimple *t)
2795 {
2796   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2797 }
2798 
2799 /* Remove block annotations and other data structures.  */
2800 
2801 void
delete_tree_cfg_annotations(struct function * fn)2802 delete_tree_cfg_annotations (struct function *fn)
2803 {
2804   vec_free (label_to_block_map_for_fn (fn));
2805 }
2806 
2807 /* Return the virtual phi in BB.  */
2808 
2809 gphi *
get_virtual_phi(basic_block bb)2810 get_virtual_phi (basic_block bb)
2811 {
2812   for (gphi_iterator gsi = gsi_start_phis (bb);
2813        !gsi_end_p (gsi);
2814        gsi_next (&gsi))
2815     {
2816       gphi *phi = gsi.phi ();
2817 
2818       if (virtual_operand_p (PHI_RESULT (phi)))
2819 	return phi;
2820     }
2821 
2822   return NULL;
2823 }
2824 
2825 /* Return the first statement in basic block BB.  */
2826 
2827 gimple *
first_stmt(basic_block bb)2828 first_stmt (basic_block bb)
2829 {
2830   gimple_stmt_iterator i = gsi_start_bb (bb);
2831   gimple *stmt = NULL;
2832 
2833   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2834     {
2835       gsi_next (&i);
2836       stmt = NULL;
2837     }
2838   return stmt;
2839 }
2840 
2841 /* Return the first non-label statement in basic block BB.  */
2842 
2843 static gimple *
first_non_label_stmt(basic_block bb)2844 first_non_label_stmt (basic_block bb)
2845 {
2846   gimple_stmt_iterator i = gsi_start_bb (bb);
2847   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2848     gsi_next (&i);
2849   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2850 }
2851 
2852 /* Return the last statement in basic block BB.  */
2853 
2854 gimple *
last_stmt(basic_block bb)2855 last_stmt (basic_block bb)
2856 {
2857   gimple_stmt_iterator i = gsi_last_bb (bb);
2858   gimple *stmt = NULL;
2859 
2860   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2861     {
2862       gsi_prev (&i);
2863       stmt = NULL;
2864     }
2865   return stmt;
2866 }
2867 
2868 /* Return the last statement of an otherwise empty block.  Return NULL
2869    if the block is totally empty, or if it contains more than one
2870    statement.  */
2871 
2872 gimple *
last_and_only_stmt(basic_block bb)2873 last_and_only_stmt (basic_block bb)
2874 {
2875   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2876   gimple *last, *prev;
2877 
2878   if (gsi_end_p (i))
2879     return NULL;
2880 
2881   last = gsi_stmt (i);
2882   gsi_prev_nondebug (&i);
2883   if (gsi_end_p (i))
2884     return last;
2885 
2886   /* Empty statements should no longer appear in the instruction stream.
2887      Everything that might have appeared before should be deleted by
2888      remove_useless_stmts, and the optimizers should just gsi_remove
2889      instead of smashing with build_empty_stmt.
2890 
2891      Thus the only thing that should appear here in a block containing
2892      one executable statement is a label.  */
2893   prev = gsi_stmt (i);
2894   if (gimple_code (prev) == GIMPLE_LABEL)
2895     return last;
2896   else
2897     return NULL;
2898 }
2899 
2900 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2901 
2902 static void
reinstall_phi_args(edge new_edge,edge old_edge)2903 reinstall_phi_args (edge new_edge, edge old_edge)
2904 {
2905   edge_var_map *vm;
2906   int i;
2907   gphi_iterator phis;
2908 
2909   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2910   if (!v)
2911     return;
2912 
2913   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2914        v->iterate (i, &vm) && !gsi_end_p (phis);
2915        i++, gsi_next (&phis))
2916     {
2917       gphi *phi = phis.phi ();
2918       tree result = redirect_edge_var_map_result (vm);
2919       tree arg = redirect_edge_var_map_def (vm);
2920 
2921       gcc_assert (result == gimple_phi_result (phi));
2922 
2923       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2924     }
2925 
2926   redirect_edge_var_map_clear (old_edge);
2927 }
2928 
2929 /* Returns the basic block after which the new basic block created
2930    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2931    near its "logical" location.  This is of most help to humans looking
2932    at debugging dumps.  */
2933 
2934 basic_block
split_edge_bb_loc(edge edge_in)2935 split_edge_bb_loc (edge edge_in)
2936 {
2937   basic_block dest = edge_in->dest;
2938   basic_block dest_prev = dest->prev_bb;
2939 
2940   if (dest_prev)
2941     {
2942       edge e = find_edge (dest_prev, dest);
2943       if (e && !(e->flags & EDGE_COMPLEX))
2944 	return edge_in->src;
2945     }
2946   return dest_prev;
2947 }
2948 
2949 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2950    Abort on abnormal edges.  */
2951 
2952 static basic_block
gimple_split_edge(edge edge_in)2953 gimple_split_edge (edge edge_in)
2954 {
2955   basic_block new_bb, after_bb, dest;
2956   edge new_edge, e;
2957 
2958   /* Abnormal edges cannot be split.  */
2959   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2960 
2961   dest = edge_in->dest;
2962 
2963   after_bb = split_edge_bb_loc (edge_in);
2964 
2965   new_bb = create_empty_bb (after_bb);
2966   new_bb->count = edge_in->count ();
2967 
2968   e = redirect_edge_and_branch (edge_in, new_bb);
2969   gcc_assert (e == edge_in);
2970 
2971   new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2972   reinstall_phi_args (new_edge, e);
2973 
2974   return new_bb;
2975 }
2976 
2977 
2978 /* Verify properties of the address expression T whose base should be
2979    TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true.  */
2980 
2981 static bool
verify_address(tree t,bool verify_addressable)2982 verify_address (tree t, bool verify_addressable)
2983 {
2984   bool old_constant;
2985   bool old_side_effects;
2986   bool new_constant;
2987   bool new_side_effects;
2988 
2989   old_constant = TREE_CONSTANT (t);
2990   old_side_effects = TREE_SIDE_EFFECTS (t);
2991 
2992   recompute_tree_invariant_for_addr_expr (t);
2993   new_side_effects = TREE_SIDE_EFFECTS (t);
2994   new_constant = TREE_CONSTANT (t);
2995 
2996   if (old_constant != new_constant)
2997     {
2998       error ("constant not recomputed when %<ADDR_EXPR%> changed");
2999       return true;
3000     }
3001   if (old_side_effects != new_side_effects)
3002     {
3003       error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3004       return true;
3005     }
3006 
3007   tree base = TREE_OPERAND (t, 0);
3008   while (handled_component_p (base))
3009     base = TREE_OPERAND (base, 0);
3010 
3011   if (!(VAR_P (base)
3012 	|| TREE_CODE (base) == PARM_DECL
3013 	|| TREE_CODE (base) == RESULT_DECL))
3014     return false;
3015 
3016   if (DECL_GIMPLE_REG_P (base))
3017     {
3018       error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
3019       return true;
3020     }
3021 
3022   if (verify_addressable && !TREE_ADDRESSABLE (base))
3023     {
3024       error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3025       return true;
3026     }
3027 
3028   return false;
3029 }
3030 
3031 
3032 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3033    Returns true if there is an error, otherwise false.  */
3034 
3035 static bool
verify_types_in_gimple_min_lval(tree expr)3036 verify_types_in_gimple_min_lval (tree expr)
3037 {
3038   tree op;
3039 
3040   if (is_gimple_id (expr))
3041     return false;
3042 
3043   if (TREE_CODE (expr) != TARGET_MEM_REF
3044       && TREE_CODE (expr) != MEM_REF)
3045     {
3046       error ("invalid expression for min lvalue");
3047       return true;
3048     }
3049 
3050   /* TARGET_MEM_REFs are strange beasts.  */
3051   if (TREE_CODE (expr) == TARGET_MEM_REF)
3052     return false;
3053 
3054   op = TREE_OPERAND (expr, 0);
3055   if (!is_gimple_val (op))
3056     {
3057       error ("invalid operand in indirect reference");
3058       debug_generic_stmt (op);
3059       return true;
3060     }
3061   /* Memory references now generally can involve a value conversion.  */
3062 
3063   return false;
3064 }
3065 
3066 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3067    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3068    if there is an error, otherwise false.  */
3069 
3070 static bool
verify_types_in_gimple_reference(tree expr,bool require_lvalue)3071 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3072 {
3073   const char *code_name = get_tree_code_name (TREE_CODE (expr));
3074 
3075   if (TREE_CODE (expr) == REALPART_EXPR
3076       || TREE_CODE (expr) == IMAGPART_EXPR
3077       || TREE_CODE (expr) == BIT_FIELD_REF)
3078     {
3079       tree op = TREE_OPERAND (expr, 0);
3080       if (!is_gimple_reg_type (TREE_TYPE (expr)))
3081 	{
3082 	  error ("non-scalar %qs", code_name);
3083 	  return true;
3084 	}
3085 
3086       if (TREE_CODE (expr) == BIT_FIELD_REF)
3087 	{
3088 	  tree t1 = TREE_OPERAND (expr, 1);
3089 	  tree t2 = TREE_OPERAND (expr, 2);
3090 	  poly_uint64 size, bitpos;
3091 	  if (!poly_int_tree_p (t1, &size)
3092 	      || !poly_int_tree_p (t2, &bitpos)
3093 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3094 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3095 	    {
3096 	      error ("invalid position or size operand to %qs", code_name);
3097 	      return true;
3098 	    }
3099 	  if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3100 	      && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3101 	    {
3102 	      error ("integral result type precision does not match "
3103 		     "field size of %qs", code_name);
3104 	      return true;
3105 	    }
3106 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3107 		   && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3108 		   && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3109 				size))
3110 	    {
3111 	      error ("mode size of non-integral result does not "
3112 		     "match field size of %qs",
3113 		     code_name);
3114 	      return true;
3115 	    }
3116 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3117 	      && !type_has_mode_precision_p (TREE_TYPE (op)))
3118 	    {
3119 	      error ("%qs of non-mode-precision operand", code_name);
3120 	      return true;
3121 	    }
3122 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3123 	      && maybe_gt (size + bitpos,
3124 			   tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3125 	    {
3126 	      error ("position plus size exceeds size of referenced object in "
3127 		     "%qs", code_name);
3128 	      return true;
3129 	    }
3130 	}
3131 
3132       if ((TREE_CODE (expr) == REALPART_EXPR
3133 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3134 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3135 					 TREE_TYPE (TREE_TYPE (op))))
3136 	{
3137 	  error ("type mismatch in %qs reference", code_name);
3138 	  debug_generic_stmt (TREE_TYPE (expr));
3139 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3140 	  return true;
3141 	}
3142       expr = op;
3143     }
3144 
3145   while (handled_component_p (expr))
3146     {
3147       code_name = get_tree_code_name (TREE_CODE (expr));
3148 
3149       if (TREE_CODE (expr) == REALPART_EXPR
3150 	  || TREE_CODE (expr) == IMAGPART_EXPR
3151 	  || TREE_CODE (expr) == BIT_FIELD_REF)
3152 	{
3153 	  error ("non-top-level %qs", code_name);
3154 	  return true;
3155 	}
3156 
3157       tree op = TREE_OPERAND (expr, 0);
3158 
3159       if (TREE_CODE (expr) == ARRAY_REF
3160 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3161 	{
3162 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3163 	      || (TREE_OPERAND (expr, 2)
3164 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3165 	      || (TREE_OPERAND (expr, 3)
3166 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3167 	    {
3168 	      error ("invalid operands to %qs", code_name);
3169 	      debug_generic_stmt (expr);
3170 	      return true;
3171 	    }
3172 	}
3173 
3174       /* Verify if the reference array element types are compatible.  */
3175       if (TREE_CODE (expr) == ARRAY_REF
3176 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3177 					 TREE_TYPE (TREE_TYPE (op))))
3178 	{
3179 	  error ("type mismatch in %qs", code_name);
3180 	  debug_generic_stmt (TREE_TYPE (expr));
3181 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3182 	  return true;
3183 	}
3184       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3185 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3186 					 TREE_TYPE (TREE_TYPE (op))))
3187 	{
3188 	  error ("type mismatch in %qs", code_name);
3189 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3190 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3191 	  return true;
3192 	}
3193 
3194       if (TREE_CODE (expr) == COMPONENT_REF)
3195 	{
3196 	  if (TREE_OPERAND (expr, 2)
3197 	      && !is_gimple_val (TREE_OPERAND (expr, 2)))
3198 	    {
3199 	      error ("invalid %qs offset operator", code_name);
3200 	      return true;
3201 	    }
3202 	  if (!useless_type_conversion_p (TREE_TYPE (expr),
3203 					  TREE_TYPE (TREE_OPERAND (expr, 1))))
3204 	    {
3205 	      error ("type mismatch in %qs", code_name);
3206 	      debug_generic_stmt (TREE_TYPE (expr));
3207 	      debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3208 	      return true;
3209 	    }
3210 	}
3211 
3212       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3213 	{
3214 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3215 	     that their operand is not an SSA name or an invariant when
3216 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3217 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3218 	     most invoke undefined behavior.  */
3219 	  if (require_lvalue
3220 	      && (TREE_CODE (op) == SSA_NAME
3221 		  || is_gimple_min_invariant (op)))
3222 	    {
3223 	      error ("conversion of %qs on the left hand side of %qs",
3224 		     get_tree_code_name (TREE_CODE (op)), code_name);
3225 	      debug_generic_stmt (expr);
3226 	      return true;
3227 	    }
3228 	  else if (TREE_CODE (op) == SSA_NAME
3229 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3230 	    {
3231 	      error ("conversion of register to a different size in %qs",
3232 		     code_name);
3233 	      debug_generic_stmt (expr);
3234 	      return true;
3235 	    }
3236 	  else if (!handled_component_p (op))
3237 	    return false;
3238 	}
3239 
3240       expr = op;
3241     }
3242 
3243   code_name = get_tree_code_name (TREE_CODE (expr));
3244 
3245   if (TREE_CODE (expr) == MEM_REF)
3246     {
3247       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3248 	  || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3249 	      && verify_address (TREE_OPERAND (expr, 0), false)))
3250 	{
3251 	  error ("invalid address operand in %qs", code_name);
3252 	  debug_generic_stmt (expr);
3253 	  return true;
3254 	}
3255       if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3256 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3257 	{
3258 	  error ("invalid offset operand in %qs", code_name);
3259 	  debug_generic_stmt (expr);
3260 	  return true;
3261 	}
3262       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3263 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3264 	{
3265 	  error ("invalid clique in %qs", code_name);
3266 	  debug_generic_stmt (expr);
3267 	  return true;
3268 	}
3269     }
3270   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3271     {
3272       if (!TMR_BASE (expr)
3273 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3274 	  || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3275 	      && verify_address (TMR_BASE (expr), false)))
3276 	{
3277 	  error ("invalid address operand in %qs", code_name);
3278 	  return true;
3279 	}
3280       if (!TMR_OFFSET (expr)
3281 	  || !poly_int_tree_p (TMR_OFFSET (expr))
3282 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3283 	{
3284 	  error ("invalid offset operand in %qs", code_name);
3285 	  debug_generic_stmt (expr);
3286 	  return true;
3287 	}
3288       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3289 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3290 	{
3291 	  error ("invalid clique in %qs", code_name);
3292 	  debug_generic_stmt (expr);
3293 	  return true;
3294 	}
3295     }
3296   else if (TREE_CODE (expr) == INDIRECT_REF)
3297     {
3298       error ("%qs in gimple IL", code_name);
3299       debug_generic_stmt (expr);
3300       return true;
3301     }
3302 
3303   return ((require_lvalue || !is_gimple_min_invariant (expr))
3304 	  && verify_types_in_gimple_min_lval (expr));
3305 }
3306 
3307 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3308    list of pointer-to types that is trivially convertible to DEST.  */
3309 
3310 static bool
one_pointer_to_useless_type_conversion_p(tree dest,tree src_obj)3311 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3312 {
3313   tree src;
3314 
3315   if (!TYPE_POINTER_TO (src_obj))
3316     return true;
3317 
3318   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3319     if (useless_type_conversion_p (dest, src))
3320       return true;
3321 
3322   return false;
3323 }
3324 
3325 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3326    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3327 
3328 static bool
valid_fixed_convert_types_p(tree type1,tree type2)3329 valid_fixed_convert_types_p (tree type1, tree type2)
3330 {
3331   return (FIXED_POINT_TYPE_P (type1)
3332 	  && (INTEGRAL_TYPE_P (type2)
3333 	      || SCALAR_FLOAT_TYPE_P (type2)
3334 	      || FIXED_POINT_TYPE_P (type2)));
3335 }
3336 
3337 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3338    is a problem, otherwise false.  */
3339 
3340 static bool
verify_gimple_call(gcall * stmt)3341 verify_gimple_call (gcall *stmt)
3342 {
3343   tree fn = gimple_call_fn (stmt);
3344   tree fntype, fndecl;
3345   unsigned i;
3346 
3347   if (gimple_call_internal_p (stmt))
3348     {
3349       if (fn)
3350 	{
3351 	  error ("gimple call has two targets");
3352 	  debug_generic_stmt (fn);
3353 	  return true;
3354 	}
3355     }
3356   else
3357     {
3358       if (!fn)
3359 	{
3360 	  error ("gimple call has no target");
3361 	  return true;
3362 	}
3363     }
3364 
3365   if (fn && !is_gimple_call_addr (fn))
3366     {
3367       error ("invalid function in gimple call");
3368       debug_generic_stmt (fn);
3369       return true;
3370     }
3371 
3372   if (fn
3373       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3374 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3375 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3376     {
3377       error ("non-function in gimple call");
3378       return true;
3379     }
3380 
3381    fndecl = gimple_call_fndecl (stmt);
3382    if (fndecl
3383        && TREE_CODE (fndecl) == FUNCTION_DECL
3384        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3385        && !DECL_PURE_P (fndecl)
3386        && !TREE_READONLY (fndecl))
3387      {
3388        error ("invalid pure const state for function");
3389        return true;
3390      }
3391 
3392   tree lhs = gimple_call_lhs (stmt);
3393   if (lhs
3394       && (!is_gimple_lvalue (lhs)
3395 	  || verify_types_in_gimple_reference (lhs, true)))
3396     {
3397       error ("invalid LHS in gimple call");
3398       return true;
3399     }
3400 
3401   if (gimple_call_ctrl_altering_p (stmt)
3402       && gimple_call_noreturn_p (stmt)
3403       && should_remove_lhs_p (lhs))
3404     {
3405       error ("LHS in %<noreturn%> call");
3406       return true;
3407     }
3408 
3409   fntype = gimple_call_fntype (stmt);
3410   if (fntype
3411       && lhs
3412       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3413       /* ???  At least C++ misses conversions at assignments from
3414 	 void * call results.
3415 	 For now simply allow arbitrary pointer type conversions.  */
3416       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3417 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3418     {
3419       error ("invalid conversion in gimple call");
3420       debug_generic_stmt (TREE_TYPE (lhs));
3421       debug_generic_stmt (TREE_TYPE (fntype));
3422       return true;
3423     }
3424 
3425   if (gimple_call_chain (stmt)
3426       && !is_gimple_val (gimple_call_chain (stmt)))
3427     {
3428       error ("invalid static chain in gimple call");
3429       debug_generic_stmt (gimple_call_chain (stmt));
3430       return true;
3431     }
3432 
3433   /* If there is a static chain argument, the call should either be
3434      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3435   if (gimple_call_chain (stmt)
3436       && fndecl
3437       && !DECL_STATIC_CHAIN (fndecl))
3438     {
3439       error ("static chain with function that doesn%'t use one");
3440       return true;
3441     }
3442 
3443   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3444     {
3445       switch (DECL_FUNCTION_CODE (fndecl))
3446 	{
3447 	case BUILT_IN_UNREACHABLE:
3448 	case BUILT_IN_TRAP:
3449 	  if (gimple_call_num_args (stmt) > 0)
3450 	    {
3451 	      /* Built-in unreachable with parameters might not be caught by
3452 		 undefined behavior sanitizer.  Front-ends do check users do not
3453 		 call them that way but we also produce calls to
3454 		 __builtin_unreachable internally, for example when IPA figures
3455 		 out a call cannot happen in a legal program.  In such cases,
3456 		 we must make sure arguments are stripped off.  */
3457 	      error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3458 		     "with arguments");
3459 	      return true;
3460 	    }
3461 	  break;
3462 	default:
3463 	  break;
3464 	}
3465     }
3466 
3467   /* ???  The C frontend passes unpromoted arguments in case it
3468      didn't see a function declaration before the call.  So for now
3469      leave the call arguments mostly unverified.  Once we gimplify
3470      unit-at-a-time we have a chance to fix this.  */
3471 
3472   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3473     {
3474       tree arg = gimple_call_arg (stmt, i);
3475       if ((is_gimple_reg_type (TREE_TYPE (arg))
3476 	   && !is_gimple_val (arg))
3477 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3478 	      && !is_gimple_lvalue (arg)))
3479 	{
3480 	  error ("invalid argument to gimple call");
3481 	  debug_generic_expr (arg);
3482 	  return true;
3483 	}
3484     }
3485 
3486   return false;
3487 }
3488 
3489 /* Verifies the gimple comparison with the result type TYPE and
3490    the operands OP0 and OP1, comparison code is CODE.  */
3491 
3492 static bool
verify_gimple_comparison(tree type,tree op0,tree op1,enum tree_code code)3493 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3494 {
3495   tree op0_type = TREE_TYPE (op0);
3496   tree op1_type = TREE_TYPE (op1);
3497 
3498   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3499     {
3500       error ("invalid operands in gimple comparison");
3501       return true;
3502     }
3503 
3504   /* For comparisons we do not have the operations type as the
3505      effective type the comparison is carried out in.  Instead
3506      we require that either the first operand is trivially
3507      convertible into the second, or the other way around.
3508      Because we special-case pointers to void we allow
3509      comparisons of pointers with the same mode as well.  */
3510   if (!useless_type_conversion_p (op0_type, op1_type)
3511       && !useless_type_conversion_p (op1_type, op0_type)
3512       && (!POINTER_TYPE_P (op0_type)
3513 	  || !POINTER_TYPE_P (op1_type)
3514 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3515     {
3516       error ("mismatching comparison operand types");
3517       debug_generic_expr (op0_type);
3518       debug_generic_expr (op1_type);
3519       return true;
3520     }
3521 
3522   /* The resulting type of a comparison may be an effective boolean type.  */
3523   if (INTEGRAL_TYPE_P (type)
3524       && (TREE_CODE (type) == BOOLEAN_TYPE
3525 	  || TYPE_PRECISION (type) == 1))
3526     {
3527       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3528 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3529 	  && code != EQ_EXPR && code != NE_EXPR
3530 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3531 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3532 	{
3533 	  error ("unsupported operation or type for vector comparison"
3534 		 " returning a boolean");
3535 	  debug_generic_expr (op0_type);
3536 	  debug_generic_expr (op1_type);
3537 	  return true;
3538         }
3539     }
3540   /* Or a boolean vector type with the same element count
3541      as the comparison operand types.  */
3542   else if (TREE_CODE (type) == VECTOR_TYPE
3543 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3544     {
3545       if (TREE_CODE (op0_type) != VECTOR_TYPE
3546 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3547         {
3548           error ("non-vector operands in vector comparison");
3549           debug_generic_expr (op0_type);
3550           debug_generic_expr (op1_type);
3551           return true;
3552         }
3553 
3554       if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3555 		    TYPE_VECTOR_SUBPARTS (op0_type)))
3556         {
3557           error ("invalid vector comparison resulting type");
3558           debug_generic_expr (type);
3559           return true;
3560         }
3561     }
3562   else
3563     {
3564       error ("bogus comparison result type");
3565       debug_generic_expr (type);
3566       return true;
3567     }
3568 
3569   return false;
3570 }
3571 
3572 /* Verify a gimple assignment statement STMT with an unary rhs.
3573    Returns true if anything is wrong.  */
3574 
3575 static bool
verify_gimple_assign_unary(gassign * stmt)3576 verify_gimple_assign_unary (gassign *stmt)
3577 {
3578   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3579   tree lhs = gimple_assign_lhs (stmt);
3580   tree lhs_type = TREE_TYPE (lhs);
3581   tree rhs1 = gimple_assign_rhs1 (stmt);
3582   tree rhs1_type = TREE_TYPE (rhs1);
3583 
3584   if (!is_gimple_reg (lhs))
3585     {
3586       error ("non-register as LHS of unary operation");
3587       return true;
3588     }
3589 
3590   if (!is_gimple_val (rhs1))
3591     {
3592       error ("invalid operand in unary operation");
3593       return true;
3594     }
3595 
3596   const char* const code_name = get_tree_code_name (rhs_code);
3597 
3598   /* First handle conversions.  */
3599   switch (rhs_code)
3600     {
3601     CASE_CONVERT:
3602       {
3603 	/* Allow conversions between vectors with the same number of elements,
3604 	   provided that the conversion is OK for the element types too.  */
3605 	if (VECTOR_TYPE_P (lhs_type)
3606 	    && VECTOR_TYPE_P (rhs1_type)
3607 	    && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3608 			 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3609 	  {
3610 	    lhs_type = TREE_TYPE (lhs_type);
3611 	    rhs1_type = TREE_TYPE (rhs1_type);
3612 	  }
3613 	else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3614 	  {
3615 	    error ("invalid vector types in nop conversion");
3616 	    debug_generic_expr (lhs_type);
3617 	    debug_generic_expr (rhs1_type);
3618 	    return true;
3619 	  }
3620 
3621 	/* Allow conversions from pointer type to integral type only if
3622 	   there is no sign or zero extension involved.
3623 	   For targets were the precision of ptrofftype doesn't match that
3624 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3625 	if ((POINTER_TYPE_P (lhs_type)
3626 	     && INTEGRAL_TYPE_P (rhs1_type))
3627 	    || (POINTER_TYPE_P (rhs1_type)
3628 		&& INTEGRAL_TYPE_P (lhs_type)
3629 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3630 		    || ptrofftype_p (lhs_type))))
3631 	  return false;
3632 
3633 	/* Allow conversion from integral to offset type and vice versa.  */
3634 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3635 	     && INTEGRAL_TYPE_P (rhs1_type))
3636 	    || (INTEGRAL_TYPE_P (lhs_type)
3637 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3638 	  return false;
3639 
3640 	/* Otherwise assert we are converting between types of the
3641 	   same kind.  */
3642 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3643 	  {
3644 	    error ("invalid types in nop conversion");
3645 	    debug_generic_expr (lhs_type);
3646 	    debug_generic_expr (rhs1_type);
3647 	    return true;
3648 	  }
3649 
3650 	return false;
3651       }
3652 
3653     case ADDR_SPACE_CONVERT_EXPR:
3654       {
3655 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3656 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3657 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3658 	  {
3659 	    error ("invalid types in address space conversion");
3660 	    debug_generic_expr (lhs_type);
3661 	    debug_generic_expr (rhs1_type);
3662 	    return true;
3663 	  }
3664 
3665 	return false;
3666       }
3667 
3668     case FIXED_CONVERT_EXPR:
3669       {
3670 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3671 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3672 	  {
3673 	    error ("invalid types in fixed-point conversion");
3674 	    debug_generic_expr (lhs_type);
3675 	    debug_generic_expr (rhs1_type);
3676 	    return true;
3677 	  }
3678 
3679 	return false;
3680       }
3681 
3682     case FLOAT_EXPR:
3683       {
3684 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3685 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3686 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3687 	  {
3688 	    error ("invalid types in conversion to floating-point");
3689 	    debug_generic_expr (lhs_type);
3690 	    debug_generic_expr (rhs1_type);
3691 	    return true;
3692 	  }
3693 
3694         return false;
3695       }
3696 
3697     case FIX_TRUNC_EXPR:
3698       {
3699         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3700             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3701                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3702 	  {
3703 	    error ("invalid types in conversion to integer");
3704 	    debug_generic_expr (lhs_type);
3705 	    debug_generic_expr (rhs1_type);
3706 	    return true;
3707 	  }
3708 
3709         return false;
3710       }
3711 
3712     case VEC_UNPACK_HI_EXPR:
3713     case VEC_UNPACK_LO_EXPR:
3714     case VEC_UNPACK_FLOAT_HI_EXPR:
3715     case VEC_UNPACK_FLOAT_LO_EXPR:
3716     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3717     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3718       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3719           || TREE_CODE (lhs_type) != VECTOR_TYPE
3720           || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3721 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3722           || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3723 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3724 	  || ((rhs_code == VEC_UNPACK_HI_EXPR
3725 	       || rhs_code == VEC_UNPACK_LO_EXPR)
3726 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3727 		  != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3728 	  || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3729 	       || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3730 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3731 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3732 	  || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3733 	       || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3734 	      && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3735 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3736 	  || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3737 			2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3738 	      && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3739 		  || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3740 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3741 		       TYPE_VECTOR_SUBPARTS (rhs1_type)))
3742 	{
3743 	  error ("type mismatch in %qs expression", code_name);
3744 	  debug_generic_expr (lhs_type);
3745 	  debug_generic_expr (rhs1_type);
3746 	  return true;
3747         }
3748 
3749       return false;
3750 
3751     case NEGATE_EXPR:
3752     case ABS_EXPR:
3753     case BIT_NOT_EXPR:
3754     case PAREN_EXPR:
3755     case CONJ_EXPR:
3756       break;
3757 
3758     case ABSU_EXPR:
3759       if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3760 	  || !TYPE_UNSIGNED (lhs_type)
3761 	  || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3762 	  || TYPE_UNSIGNED (rhs1_type)
3763 	  || element_precision (lhs_type) != element_precision (rhs1_type))
3764 	{
3765 	  error ("invalid types for %qs", code_name);
3766 	  debug_generic_expr (lhs_type);
3767 	  debug_generic_expr (rhs1_type);
3768 	  return true;
3769 	}
3770       return false;
3771 
3772     case VEC_DUPLICATE_EXPR:
3773       if (TREE_CODE (lhs_type) != VECTOR_TYPE
3774 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3775 	{
3776 	  error ("%qs should be from a scalar to a like vector", code_name);
3777 	  debug_generic_expr (lhs_type);
3778 	  debug_generic_expr (rhs1_type);
3779 	  return true;
3780 	}
3781       return false;
3782 
3783     default:
3784       gcc_unreachable ();
3785     }
3786 
3787   /* For the remaining codes assert there is no conversion involved.  */
3788   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3789     {
3790       error ("non-trivial conversion in unary operation");
3791       debug_generic_expr (lhs_type);
3792       debug_generic_expr (rhs1_type);
3793       return true;
3794     }
3795 
3796   return false;
3797 }
3798 
3799 /* Verify a gimple assignment statement STMT with a binary rhs.
3800    Returns true if anything is wrong.  */
3801 
3802 static bool
verify_gimple_assign_binary(gassign * stmt)3803 verify_gimple_assign_binary (gassign *stmt)
3804 {
3805   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3806   tree lhs = gimple_assign_lhs (stmt);
3807   tree lhs_type = TREE_TYPE (lhs);
3808   tree rhs1 = gimple_assign_rhs1 (stmt);
3809   tree rhs1_type = TREE_TYPE (rhs1);
3810   tree rhs2 = gimple_assign_rhs2 (stmt);
3811   tree rhs2_type = TREE_TYPE (rhs2);
3812 
3813   if (!is_gimple_reg (lhs))
3814     {
3815       error ("non-register as LHS of binary operation");
3816       return true;
3817     }
3818 
3819   if (!is_gimple_val (rhs1)
3820       || !is_gimple_val (rhs2))
3821     {
3822       error ("invalid operands in binary operation");
3823       return true;
3824     }
3825 
3826   const char* const code_name = get_tree_code_name (rhs_code);
3827 
3828   /* First handle operations that involve different types.  */
3829   switch (rhs_code)
3830     {
3831     case COMPLEX_EXPR:
3832       {
3833 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3834 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3835 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3836 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3837 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3838 	  {
3839 	    error ("type mismatch in %qs", code_name);
3840 	    debug_generic_expr (lhs_type);
3841 	    debug_generic_expr (rhs1_type);
3842 	    debug_generic_expr (rhs2_type);
3843 	    return true;
3844 	  }
3845 
3846 	return false;
3847       }
3848 
3849     case LSHIFT_EXPR:
3850     case RSHIFT_EXPR:
3851     case LROTATE_EXPR:
3852     case RROTATE_EXPR:
3853       {
3854 	/* Shifts and rotates are ok on integral types, fixed point
3855 	   types and integer vector types.  */
3856 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3857 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3858 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3859 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3860 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3861 		/* Vector shifts of vectors are also ok.  */
3862 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3863 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3864 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3865 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3866 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3867 	  {
3868 	    error ("type mismatch in %qs", code_name);
3869 	    debug_generic_expr (lhs_type);
3870 	    debug_generic_expr (rhs1_type);
3871 	    debug_generic_expr (rhs2_type);
3872 	    return true;
3873 	  }
3874 
3875 	return false;
3876       }
3877 
3878     case WIDEN_LSHIFT_EXPR:
3879       {
3880         if (!INTEGRAL_TYPE_P (lhs_type)
3881             || !INTEGRAL_TYPE_P (rhs1_type)
3882             || TREE_CODE (rhs2) != INTEGER_CST
3883             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3884           {
3885 	    error ("type mismatch in %qs", code_name);
3886             debug_generic_expr (lhs_type);
3887             debug_generic_expr (rhs1_type);
3888             debug_generic_expr (rhs2_type);
3889             return true;
3890           }
3891 
3892         return false;
3893       }
3894 
3895     case VEC_WIDEN_LSHIFT_HI_EXPR:
3896     case VEC_WIDEN_LSHIFT_LO_EXPR:
3897       {
3898         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3899             || TREE_CODE (lhs_type) != VECTOR_TYPE
3900             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3901             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3902             || TREE_CODE (rhs2) != INTEGER_CST
3903             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3904                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3905           {
3906 	    error ("type mismatch in %qs", code_name);
3907             debug_generic_expr (lhs_type);
3908             debug_generic_expr (rhs1_type);
3909             debug_generic_expr (rhs2_type);
3910             return true;
3911           }
3912 
3913         return false;
3914       }
3915 
3916     case PLUS_EXPR:
3917     case MINUS_EXPR:
3918       {
3919 	tree lhs_etype = lhs_type;
3920 	tree rhs1_etype = rhs1_type;
3921 	tree rhs2_etype = rhs2_type;
3922 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3923 	  {
3924 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3925 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3926 	      {
3927 		error ("invalid non-vector operands to %qs", code_name);
3928 		return true;
3929 	      }
3930 	    lhs_etype = TREE_TYPE (lhs_type);
3931 	    rhs1_etype = TREE_TYPE (rhs1_type);
3932 	    rhs2_etype = TREE_TYPE (rhs2_type);
3933 	  }
3934 	if (POINTER_TYPE_P (lhs_etype)
3935 	    || POINTER_TYPE_P (rhs1_etype)
3936 	    || POINTER_TYPE_P (rhs2_etype))
3937 	  {
3938 	    error ("invalid (pointer) operands %qs", code_name);
3939 	    return true;
3940 	  }
3941 
3942 	/* Continue with generic binary expression handling.  */
3943 	break;
3944       }
3945 
3946     case POINTER_PLUS_EXPR:
3947       {
3948 	if (!POINTER_TYPE_P (rhs1_type)
3949 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3950 	    || !ptrofftype_p (rhs2_type))
3951 	  {
3952 	    error ("type mismatch in %qs", code_name);
3953 	    debug_generic_stmt (lhs_type);
3954 	    debug_generic_stmt (rhs1_type);
3955 	    debug_generic_stmt (rhs2_type);
3956 	    return true;
3957 	  }
3958 
3959 	return false;
3960       }
3961 
3962     case POINTER_DIFF_EXPR:
3963       {
3964 	if (!POINTER_TYPE_P (rhs1_type)
3965 	    || !POINTER_TYPE_P (rhs2_type)
3966 	    /* Because we special-case pointers to void we allow difference
3967 	       of arbitrary pointers with the same mode.  */
3968 	    || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3969 	    || TREE_CODE (lhs_type) != INTEGER_TYPE
3970 	    || TYPE_UNSIGNED (lhs_type)
3971 	    || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3972 	  {
3973 	    error ("type mismatch in %qs", code_name);
3974 	    debug_generic_stmt (lhs_type);
3975 	    debug_generic_stmt (rhs1_type);
3976 	    debug_generic_stmt (rhs2_type);
3977 	    return true;
3978 	  }
3979 
3980 	return false;
3981       }
3982 
3983     case TRUTH_ANDIF_EXPR:
3984     case TRUTH_ORIF_EXPR:
3985     case TRUTH_AND_EXPR:
3986     case TRUTH_OR_EXPR:
3987     case TRUTH_XOR_EXPR:
3988 
3989       gcc_unreachable ();
3990 
3991     case LT_EXPR:
3992     case LE_EXPR:
3993     case GT_EXPR:
3994     case GE_EXPR:
3995     case EQ_EXPR:
3996     case NE_EXPR:
3997     case UNORDERED_EXPR:
3998     case ORDERED_EXPR:
3999     case UNLT_EXPR:
4000     case UNLE_EXPR:
4001     case UNGT_EXPR:
4002     case UNGE_EXPR:
4003     case UNEQ_EXPR:
4004     case LTGT_EXPR:
4005       /* Comparisons are also binary, but the result type is not
4006 	 connected to the operand types.  */
4007       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4008 
4009     case WIDEN_MULT_EXPR:
4010       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4011 	return true;
4012       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4013 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4014 
4015     case WIDEN_SUM_EXPR:
4016       {
4017         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4018 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
4019 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
4020 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4021 		 || (!INTEGRAL_TYPE_P (lhs_type)
4022 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4023 	    || !useless_type_conversion_p (lhs_type, rhs2_type)
4024 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4025 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4026           {
4027 	    error ("type mismatch in %qs", code_name);
4028             debug_generic_expr (lhs_type);
4029             debug_generic_expr (rhs1_type);
4030             debug_generic_expr (rhs2_type);
4031             return true;
4032           }
4033         return false;
4034       }
4035 
4036     case VEC_WIDEN_MULT_HI_EXPR:
4037     case VEC_WIDEN_MULT_LO_EXPR:
4038     case VEC_WIDEN_MULT_EVEN_EXPR:
4039     case VEC_WIDEN_MULT_ODD_EXPR:
4040       {
4041         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4042             || TREE_CODE (lhs_type) != VECTOR_TYPE
4043 	    || !types_compatible_p (rhs1_type, rhs2_type)
4044 	    || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4045 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4046           {
4047 	    error ("type mismatch in %qs", code_name);
4048             debug_generic_expr (lhs_type);
4049             debug_generic_expr (rhs1_type);
4050             debug_generic_expr (rhs2_type);
4051             return true;
4052           }
4053         return false;
4054       }
4055 
4056     case VEC_PACK_TRUNC_EXPR:
4057       /* ???  We currently use VEC_PACK_TRUNC_EXPR to simply concat
4058 	 vector boolean types.  */
4059       if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4060 	  && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4061 	  && types_compatible_p (rhs1_type, rhs2_type)
4062 	  && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4063 		       2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4064 	return false;
4065 
4066       /* Fallthru.  */
4067     case VEC_PACK_SAT_EXPR:
4068     case VEC_PACK_FIX_TRUNC_EXPR:
4069       {
4070         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4071             || TREE_CODE (lhs_type) != VECTOR_TYPE
4072             || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4073 		  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4074 		  && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4075 		 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4076 		     == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4077 	    || !types_compatible_p (rhs1_type, rhs2_type)
4078 	    || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4079 			 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4080 	    || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4081 			 TYPE_VECTOR_SUBPARTS (lhs_type)))
4082           {
4083 	    error ("type mismatch in %qs", code_name);
4084             debug_generic_expr (lhs_type);
4085             debug_generic_expr (rhs1_type);
4086             debug_generic_expr (rhs2_type);
4087             return true;
4088           }
4089 
4090         return false;
4091       }
4092 
4093     case VEC_PACK_FLOAT_EXPR:
4094       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4095 	  || TREE_CODE (lhs_type) != VECTOR_TYPE
4096 	  || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4097 	  || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4098 	  || !types_compatible_p (rhs1_type, rhs2_type)
4099 	  || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4100 		       2 * GET_MODE_SIZE (element_mode (lhs_type)))
4101 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4102 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4103 	{
4104 	  error ("type mismatch in %qs", code_name);
4105 	  debug_generic_expr (lhs_type);
4106 	  debug_generic_expr (rhs1_type);
4107 	  debug_generic_expr (rhs2_type);
4108 	  return true;
4109 	}
4110 
4111       return false;
4112 
4113     case MULT_EXPR:
4114     case MULT_HIGHPART_EXPR:
4115     case TRUNC_DIV_EXPR:
4116     case CEIL_DIV_EXPR:
4117     case FLOOR_DIV_EXPR:
4118     case ROUND_DIV_EXPR:
4119     case TRUNC_MOD_EXPR:
4120     case CEIL_MOD_EXPR:
4121     case FLOOR_MOD_EXPR:
4122     case ROUND_MOD_EXPR:
4123     case RDIV_EXPR:
4124     case EXACT_DIV_EXPR:
4125     case MIN_EXPR:
4126     case MAX_EXPR:
4127     case BIT_IOR_EXPR:
4128     case BIT_XOR_EXPR:
4129     case BIT_AND_EXPR:
4130       /* Continue with generic binary expression handling.  */
4131       break;
4132 
4133     case VEC_SERIES_EXPR:
4134       if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4135 	{
4136 	  error ("type mismatch in %qs", code_name);
4137 	  debug_generic_expr (rhs1_type);
4138 	  debug_generic_expr (rhs2_type);
4139 	  return true;
4140 	}
4141       if (TREE_CODE (lhs_type) != VECTOR_TYPE
4142 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4143 	{
4144 	  error ("vector type expected in %qs", code_name);
4145 	  debug_generic_expr (lhs_type);
4146 	  return true;
4147 	}
4148       return false;
4149 
4150     default:
4151       gcc_unreachable ();
4152     }
4153 
4154   if (!useless_type_conversion_p (lhs_type, rhs1_type)
4155       || !useless_type_conversion_p (lhs_type, rhs2_type))
4156     {
4157       error ("type mismatch in binary expression");
4158       debug_generic_stmt (lhs_type);
4159       debug_generic_stmt (rhs1_type);
4160       debug_generic_stmt (rhs2_type);
4161       return true;
4162     }
4163 
4164   return false;
4165 }
4166 
4167 /* Verify a gimple assignment statement STMT with a ternary rhs.
4168    Returns true if anything is wrong.  */
4169 
4170 static bool
verify_gimple_assign_ternary(gassign * stmt)4171 verify_gimple_assign_ternary (gassign *stmt)
4172 {
4173   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4174   tree lhs = gimple_assign_lhs (stmt);
4175   tree lhs_type = TREE_TYPE (lhs);
4176   tree rhs1 = gimple_assign_rhs1 (stmt);
4177   tree rhs1_type = TREE_TYPE (rhs1);
4178   tree rhs2 = gimple_assign_rhs2 (stmt);
4179   tree rhs2_type = TREE_TYPE (rhs2);
4180   tree rhs3 = gimple_assign_rhs3 (stmt);
4181   tree rhs3_type = TREE_TYPE (rhs3);
4182 
4183   if (!is_gimple_reg (lhs))
4184     {
4185       error ("non-register as LHS of ternary operation");
4186       return true;
4187     }
4188 
4189   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4190        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4191       || !is_gimple_val (rhs2)
4192       || !is_gimple_val (rhs3))
4193     {
4194       error ("invalid operands in ternary operation");
4195       return true;
4196     }
4197 
4198   const char* const code_name = get_tree_code_name (rhs_code);
4199 
4200   /* First handle operations that involve different types.  */
4201   switch (rhs_code)
4202     {
4203     case WIDEN_MULT_PLUS_EXPR:
4204     case WIDEN_MULT_MINUS_EXPR:
4205       if ((!INTEGRAL_TYPE_P (rhs1_type)
4206 	   && !FIXED_POINT_TYPE_P (rhs1_type))
4207 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
4208 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4209 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4210 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4211 	{
4212 	  error ("type mismatch in %qs", code_name);
4213 	  debug_generic_expr (lhs_type);
4214 	  debug_generic_expr (rhs1_type);
4215 	  debug_generic_expr (rhs2_type);
4216 	  debug_generic_expr (rhs3_type);
4217 	  return true;
4218 	}
4219       break;
4220 
4221     case VEC_COND_EXPR:
4222       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4223 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4224 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4225 	{
4226 	  error ("the first argument of a %qs must be of a "
4227 		 "boolean vector type of the same number of elements "
4228 		 "as the result", code_name);
4229 	  debug_generic_expr (lhs_type);
4230 	  debug_generic_expr (rhs1_type);
4231 	  return true;
4232 	}
4233       /* Fallthrough.  */
4234     case COND_EXPR:
4235       if (!is_gimple_val (rhs1)
4236 	  && verify_gimple_comparison (TREE_TYPE (rhs1),
4237 				       TREE_OPERAND (rhs1, 0),
4238 				       TREE_OPERAND (rhs1, 1),
4239 				       TREE_CODE (rhs1)))
4240 	return true;
4241       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4242 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4243 	{
4244 	  error ("type mismatch in %qs", code_name);
4245 	  debug_generic_expr (lhs_type);
4246 	  debug_generic_expr (rhs2_type);
4247 	  debug_generic_expr (rhs3_type);
4248 	  return true;
4249 	}
4250       break;
4251 
4252     case VEC_PERM_EXPR:
4253       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4254 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4255 	{
4256 	  error ("type mismatch in %qs", code_name);
4257 	  debug_generic_expr (lhs_type);
4258 	  debug_generic_expr (rhs1_type);
4259 	  debug_generic_expr (rhs2_type);
4260 	  debug_generic_expr (rhs3_type);
4261 	  return true;
4262 	}
4263 
4264       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4265 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4266 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4267 	{
4268 	  error ("vector types expected in %qs", code_name);
4269 	  debug_generic_expr (lhs_type);
4270 	  debug_generic_expr (rhs1_type);
4271 	  debug_generic_expr (rhs2_type);
4272 	  debug_generic_expr (rhs3_type);
4273 	  return true;
4274 	}
4275 
4276       if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4277 		    TYPE_VECTOR_SUBPARTS (rhs2_type))
4278 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4279 		       TYPE_VECTOR_SUBPARTS (rhs3_type))
4280 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4281 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4282 	{
4283 	  error ("vectors with different element number found in %qs",
4284 		 code_name);
4285 	  debug_generic_expr (lhs_type);
4286 	  debug_generic_expr (rhs1_type);
4287 	  debug_generic_expr (rhs2_type);
4288 	  debug_generic_expr (rhs3_type);
4289 	  return true;
4290 	}
4291 
4292       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4293 	  || (TREE_CODE (rhs3) != VECTOR_CST
4294 	      && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4295 				    (TREE_TYPE (rhs3_type)))
4296 		  != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4297 				       (TREE_TYPE (rhs1_type))))))
4298 	{
4299 	  error ("invalid mask type in %qs", code_name);
4300 	  debug_generic_expr (lhs_type);
4301 	  debug_generic_expr (rhs1_type);
4302 	  debug_generic_expr (rhs2_type);
4303 	  debug_generic_expr (rhs3_type);
4304 	  return true;
4305 	}
4306 
4307       return false;
4308 
4309     case SAD_EXPR:
4310       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4311 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4312 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4313 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4314 	{
4315 	  error ("type mismatch in %qs", code_name);
4316 	  debug_generic_expr (lhs_type);
4317 	  debug_generic_expr (rhs1_type);
4318 	  debug_generic_expr (rhs2_type);
4319 	  debug_generic_expr (rhs3_type);
4320 	  return true;
4321 	}
4322 
4323       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4324 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4325 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4326 	{
4327 	  error ("vector types expected in %qs", code_name);
4328 	  debug_generic_expr (lhs_type);
4329 	  debug_generic_expr (rhs1_type);
4330 	  debug_generic_expr (rhs2_type);
4331 	  debug_generic_expr (rhs3_type);
4332 	  return true;
4333 	}
4334 
4335       return false;
4336 
4337     case BIT_INSERT_EXPR:
4338       if (! useless_type_conversion_p (lhs_type, rhs1_type))
4339 	{
4340 	  error ("type mismatch in %qs", code_name);
4341 	  debug_generic_expr (lhs_type);
4342 	  debug_generic_expr (rhs1_type);
4343 	  return true;
4344 	}
4345       if (! ((INTEGRAL_TYPE_P (rhs1_type)
4346 	      && INTEGRAL_TYPE_P (rhs2_type))
4347 	     /* Vector element insert.  */
4348 	     || (VECTOR_TYPE_P (rhs1_type)
4349 		 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4350 	     /* Aligned sub-vector insert.  */
4351 	     || (VECTOR_TYPE_P (rhs1_type)
4352 		 && VECTOR_TYPE_P (rhs2_type)
4353 		 && types_compatible_p (TREE_TYPE (rhs1_type),
4354 					TREE_TYPE (rhs2_type))
4355 		 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4356 				TYPE_VECTOR_SUBPARTS (rhs2_type))
4357 		 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4358 	{
4359 	  error ("not allowed type combination in %qs", code_name);
4360 	  debug_generic_expr (rhs1_type);
4361 	  debug_generic_expr (rhs2_type);
4362 	  return true;
4363 	}
4364       if (! tree_fits_uhwi_p (rhs3)
4365 	  || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4366 	  || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4367 	{
4368 	  error ("invalid position or size in %qs", code_name);
4369 	  return true;
4370 	}
4371       if (INTEGRAL_TYPE_P (rhs1_type)
4372 	  && !type_has_mode_precision_p (rhs1_type))
4373 	{
4374 	  error ("%qs into non-mode-precision operand", code_name);
4375 	  return true;
4376 	}
4377       if (INTEGRAL_TYPE_P (rhs1_type))
4378 	{
4379 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4380 	  if (bitpos >= TYPE_PRECISION (rhs1_type)
4381 	      || (bitpos + TYPE_PRECISION (rhs2_type)
4382 		  > TYPE_PRECISION (rhs1_type)))
4383 	    {
4384 	      error ("insertion out of range in %qs", code_name);
4385 	      return true;
4386 	    }
4387 	}
4388       else if (VECTOR_TYPE_P (rhs1_type))
4389 	{
4390 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4391 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4392 	  if (bitpos % bitsize != 0)
4393 	    {
4394 	      error ("%qs not at element boundary", code_name);
4395 	      return true;
4396 	    }
4397 	}
4398       return false;
4399 
4400     case DOT_PROD_EXPR:
4401       {
4402         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4403 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
4404 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
4405 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4406 		 || (!INTEGRAL_TYPE_P (lhs_type)
4407 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4408 	    || !types_compatible_p (rhs1_type, rhs2_type)
4409 	    || !useless_type_conversion_p (lhs_type, rhs3_type)
4410 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4411 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4412           {
4413 	    error ("type mismatch in %qs", code_name);
4414             debug_generic_expr (lhs_type);
4415             debug_generic_expr (rhs1_type);
4416             debug_generic_expr (rhs2_type);
4417             return true;
4418           }
4419         return false;
4420       }
4421 
4422     case REALIGN_LOAD_EXPR:
4423       /* FIXME.  */
4424       return false;
4425 
4426     default:
4427       gcc_unreachable ();
4428     }
4429   return false;
4430 }
4431 
4432 /* Verify a gimple assignment statement STMT with a single rhs.
4433    Returns true if anything is wrong.  */
4434 
4435 static bool
verify_gimple_assign_single(gassign * stmt)4436 verify_gimple_assign_single (gassign *stmt)
4437 {
4438   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4439   tree lhs = gimple_assign_lhs (stmt);
4440   tree lhs_type = TREE_TYPE (lhs);
4441   tree rhs1 = gimple_assign_rhs1 (stmt);
4442   tree rhs1_type = TREE_TYPE (rhs1);
4443   bool res = false;
4444 
4445   const char* const code_name = get_tree_code_name (rhs_code);
4446 
4447   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4448     {
4449       error ("non-trivial conversion in %qs", code_name);
4450       debug_generic_expr (lhs_type);
4451       debug_generic_expr (rhs1_type);
4452       return true;
4453     }
4454 
4455   if (gimple_clobber_p (stmt)
4456       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4457     {
4458       error ("%qs LHS in clobber statement",
4459 	     get_tree_code_name (TREE_CODE (lhs)));
4460       debug_generic_expr (lhs);
4461       return true;
4462     }
4463 
4464   if (handled_component_p (lhs)
4465       || TREE_CODE (lhs) == MEM_REF
4466       || TREE_CODE (lhs) == TARGET_MEM_REF)
4467     res |= verify_types_in_gimple_reference (lhs, true);
4468 
4469   /* Special codes we cannot handle via their class.  */
4470   switch (rhs_code)
4471     {
4472     case ADDR_EXPR:
4473       {
4474 	tree op = TREE_OPERAND (rhs1, 0);
4475 	if (!is_gimple_addressable (op))
4476 	  {
4477 	    error ("invalid operand in %qs", code_name);
4478 	    return true;
4479 	  }
4480 
4481 	/* Technically there is no longer a need for matching types, but
4482 	   gimple hygiene asks for this check.  In LTO we can end up
4483 	   combining incompatible units and thus end up with addresses
4484 	   of globals that change their type to a common one.  */
4485 	if (!in_lto_p
4486 	    && !types_compatible_p (TREE_TYPE (op),
4487 				    TREE_TYPE (TREE_TYPE (rhs1)))
4488 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4489 							  TREE_TYPE (op)))
4490 	  {
4491 	    error ("type mismatch in %qs", code_name);
4492 	    debug_generic_stmt (TREE_TYPE (rhs1));
4493 	    debug_generic_stmt (TREE_TYPE (op));
4494 	    return true;
4495 	  }
4496 
4497 	return (verify_address (rhs1, true)
4498 		|| verify_types_in_gimple_reference (op, true));
4499       }
4500 
4501     /* tcc_reference  */
4502     case INDIRECT_REF:
4503       error ("%qs in gimple IL", code_name);
4504       return true;
4505 
4506     case COMPONENT_REF:
4507     case BIT_FIELD_REF:
4508     case ARRAY_REF:
4509     case ARRAY_RANGE_REF:
4510     case VIEW_CONVERT_EXPR:
4511     case REALPART_EXPR:
4512     case IMAGPART_EXPR:
4513     case TARGET_MEM_REF:
4514     case MEM_REF:
4515       if (!is_gimple_reg (lhs)
4516 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4517 	{
4518 	  error ("invalid RHS for gimple memory store: %qs", code_name);
4519 	  debug_generic_stmt (lhs);
4520 	  debug_generic_stmt (rhs1);
4521 	  return true;
4522 	}
4523       return res || verify_types_in_gimple_reference (rhs1, false);
4524 
4525     /* tcc_constant  */
4526     case SSA_NAME:
4527     case INTEGER_CST:
4528     case REAL_CST:
4529     case FIXED_CST:
4530     case COMPLEX_CST:
4531     case VECTOR_CST:
4532     case STRING_CST:
4533       return res;
4534 
4535     /* tcc_declaration  */
4536     case CONST_DECL:
4537       return res;
4538     case VAR_DECL:
4539     case PARM_DECL:
4540       if (!is_gimple_reg (lhs)
4541 	  && !is_gimple_reg (rhs1)
4542 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4543 	{
4544 	  error ("invalid RHS for gimple memory store: %qs", code_name);
4545 	  debug_generic_stmt (lhs);
4546 	  debug_generic_stmt (rhs1);
4547 	  return true;
4548 	}
4549       return res;
4550 
4551     case CONSTRUCTOR:
4552       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4553 	{
4554 	  unsigned int i;
4555 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4556 
4557 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4558 	    return res;
4559 	  /* For vector CONSTRUCTORs we require that either it is empty
4560 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4561 	     (then the element count must be correct to cover the whole
4562 	     outer vector and index must be NULL on all elements, or it is
4563 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4564 	     smaller number of elements (assuming zero filling) and
4565 	     consecutive indexes as compared to NULL indexes (such
4566 	     CONSTRUCTORs can appear in the IL from FEs).  */
4567 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4568 	    {
4569 	      if (elt_t == NULL_TREE)
4570 		{
4571 		  elt_t = TREE_TYPE (elt_v);
4572 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4573 		    {
4574 		      tree elt_t = TREE_TYPE (elt_v);
4575 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4576 						      TREE_TYPE (elt_t)))
4577 			{
4578 			  error ("incorrect type of vector %qs elements",
4579 				 code_name);
4580 			  debug_generic_stmt (rhs1);
4581 			  return true;
4582 			}
4583 		      else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4584 					 * TYPE_VECTOR_SUBPARTS (elt_t),
4585 					 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4586 			{
4587 			  error ("incorrect number of vector %qs elements",
4588 				 code_name);
4589 			  debug_generic_stmt (rhs1);
4590 			  return true;
4591 			}
4592 		    }
4593 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4594 						       elt_t))
4595 		    {
4596 		      error ("incorrect type of vector %qs elements",
4597 			     code_name);
4598 		      debug_generic_stmt (rhs1);
4599 		      return true;
4600 		    }
4601 		  else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4602 				     TYPE_VECTOR_SUBPARTS (rhs1_type)))
4603 		    {
4604 		      error ("incorrect number of vector %qs elements",
4605 			     code_name);
4606 		      debug_generic_stmt (rhs1);
4607 		      return true;
4608 		    }
4609 		}
4610 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4611 		{
4612 		  error ("incorrect type of vector CONSTRUCTOR elements");
4613 		  debug_generic_stmt (rhs1);
4614 		  return true;
4615 		}
4616 	      if (elt_i != NULL_TREE
4617 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4618 		      || TREE_CODE (elt_i) != INTEGER_CST
4619 		      || compare_tree_int (elt_i, i) != 0))
4620 		{
4621 		  error ("vector %qs with non-NULL element index",
4622 			 code_name);
4623 		  debug_generic_stmt (rhs1);
4624 		  return true;
4625 		}
4626 	      if (!is_gimple_val (elt_v))
4627 		{
4628 		  error ("vector %qs element is not a GIMPLE value",
4629 			 code_name);
4630 		  debug_generic_stmt (rhs1);
4631 		  return true;
4632 		}
4633 	    }
4634 	}
4635       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4636 	{
4637 	  error ("non-vector %qs with elements", code_name);
4638 	  debug_generic_stmt (rhs1);
4639 	  return true;
4640 	}
4641       return res;
4642 
4643     case ASSERT_EXPR:
4644       /* FIXME.  */
4645       rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4646       if (rhs1 == boolean_false_node)
4647 	{
4648 	  error ("%qs with an always-false condition", code_name);
4649 	  debug_generic_stmt (rhs1);
4650 	  return true;
4651 	}
4652       break;
4653 
4654     case OBJ_TYPE_REF:
4655     case WITH_SIZE_EXPR:
4656       /* FIXME.  */
4657       return res;
4658 
4659     default:;
4660     }
4661 
4662   return res;
4663 }
4664 
4665 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4666    is a problem, otherwise false.  */
4667 
4668 static bool
verify_gimple_assign(gassign * stmt)4669 verify_gimple_assign (gassign *stmt)
4670 {
4671   switch (gimple_assign_rhs_class (stmt))
4672     {
4673     case GIMPLE_SINGLE_RHS:
4674       return verify_gimple_assign_single (stmt);
4675 
4676     case GIMPLE_UNARY_RHS:
4677       return verify_gimple_assign_unary (stmt);
4678 
4679     case GIMPLE_BINARY_RHS:
4680       return verify_gimple_assign_binary (stmt);
4681 
4682     case GIMPLE_TERNARY_RHS:
4683       return verify_gimple_assign_ternary (stmt);
4684 
4685     default:
4686       gcc_unreachable ();
4687     }
4688 }
4689 
4690 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4691    is a problem, otherwise false.  */
4692 
4693 static bool
verify_gimple_return(greturn * stmt)4694 verify_gimple_return (greturn *stmt)
4695 {
4696   tree op = gimple_return_retval (stmt);
4697   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4698 
4699   /* We cannot test for present return values as we do not fix up missing
4700      return values from the original source.  */
4701   if (op == NULL)
4702     return false;
4703 
4704   if (!is_gimple_val (op)
4705       && TREE_CODE (op) != RESULT_DECL)
4706     {
4707       error ("invalid operand in return statement");
4708       debug_generic_stmt (op);
4709       return true;
4710     }
4711 
4712   if ((TREE_CODE (op) == RESULT_DECL
4713        && DECL_BY_REFERENCE (op))
4714       || (TREE_CODE (op) == SSA_NAME
4715 	  && SSA_NAME_VAR (op)
4716 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4717 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4718     op = TREE_TYPE (op);
4719 
4720   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4721     {
4722       error ("invalid conversion in return statement");
4723       debug_generic_stmt (restype);
4724       debug_generic_stmt (TREE_TYPE (op));
4725       return true;
4726     }
4727 
4728   return false;
4729 }
4730 
4731 
4732 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4733    is a problem, otherwise false.  */
4734 
4735 static bool
verify_gimple_goto(ggoto * stmt)4736 verify_gimple_goto (ggoto *stmt)
4737 {
4738   tree dest = gimple_goto_dest (stmt);
4739 
4740   /* ???  We have two canonical forms of direct goto destinations, a
4741      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4742   if (TREE_CODE (dest) != LABEL_DECL
4743       && (!is_gimple_val (dest)
4744 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4745     {
4746       error ("goto destination is neither a label nor a pointer");
4747       return true;
4748     }
4749 
4750   return false;
4751 }
4752 
4753 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4754    is a problem, otherwise false.  */
4755 
4756 static bool
verify_gimple_switch(gswitch * stmt)4757 verify_gimple_switch (gswitch *stmt)
4758 {
4759   unsigned int i, n;
4760   tree elt, prev_upper_bound = NULL_TREE;
4761   tree index_type, elt_type = NULL_TREE;
4762 
4763   if (!is_gimple_val (gimple_switch_index (stmt)))
4764     {
4765       error ("invalid operand to switch statement");
4766       debug_generic_stmt (gimple_switch_index (stmt));
4767       return true;
4768     }
4769 
4770   index_type = TREE_TYPE (gimple_switch_index (stmt));
4771   if (! INTEGRAL_TYPE_P (index_type))
4772     {
4773       error ("non-integral type switch statement");
4774       debug_generic_expr (index_type);
4775       return true;
4776     }
4777 
4778   elt = gimple_switch_label (stmt, 0);
4779   if (CASE_LOW (elt) != NULL_TREE
4780       || CASE_HIGH (elt) != NULL_TREE
4781       || CASE_CHAIN (elt) != NULL_TREE)
4782     {
4783       error ("invalid default case label in switch statement");
4784       debug_generic_expr (elt);
4785       return true;
4786     }
4787 
4788   n = gimple_switch_num_labels (stmt);
4789   for (i = 1; i < n; i++)
4790     {
4791       elt = gimple_switch_label (stmt, i);
4792 
4793       if (CASE_CHAIN (elt))
4794 	{
4795 	  error ("invalid %<CASE_CHAIN%>");
4796 	  debug_generic_expr (elt);
4797 	  return true;
4798 	}
4799       if (! CASE_LOW (elt))
4800 	{
4801 	  error ("invalid case label in switch statement");
4802 	  debug_generic_expr (elt);
4803 	  return true;
4804 	}
4805       if (CASE_HIGH (elt)
4806 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4807 	{
4808 	  error ("invalid case range in switch statement");
4809 	  debug_generic_expr (elt);
4810 	  return true;
4811 	}
4812 
4813       if (elt_type)
4814 	{
4815 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4816 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4817 	    {
4818 	      error ("type mismatch for case label in switch statement");
4819 	      debug_generic_expr (elt);
4820 	      return true;
4821 	    }
4822 	}
4823       else
4824 	{
4825 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4826 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4827 	    {
4828 	      error ("type precision mismatch in switch statement");
4829 	      return true;
4830 	    }
4831 	}
4832 
4833       if (prev_upper_bound)
4834 	{
4835 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4836 	    {
4837 	      error ("case labels not sorted in switch statement");
4838 	      return true;
4839 	    }
4840 	}
4841 
4842       prev_upper_bound = CASE_HIGH (elt);
4843       if (! prev_upper_bound)
4844 	prev_upper_bound = CASE_LOW (elt);
4845     }
4846 
4847   return false;
4848 }
4849 
4850 /* Verify a gimple debug statement STMT.
4851    Returns true if anything is wrong.  */
4852 
4853 static bool
verify_gimple_debug(gimple * stmt ATTRIBUTE_UNUSED)4854 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4855 {
4856   /* There isn't much that could be wrong in a gimple debug stmt.  A
4857      gimple debug bind stmt, for example, maps a tree, that's usually
4858      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4859      component or member of an aggregate type, to another tree, that
4860      can be an arbitrary expression.  These stmts expand into debug
4861      insns, and are converted to debug notes by var-tracking.c.  */
4862   return false;
4863 }
4864 
4865 /* Verify a gimple label statement STMT.
4866    Returns true if anything is wrong.  */
4867 
4868 static bool
verify_gimple_label(glabel * stmt)4869 verify_gimple_label (glabel *stmt)
4870 {
4871   tree decl = gimple_label_label (stmt);
4872   int uid;
4873   bool err = false;
4874 
4875   if (TREE_CODE (decl) != LABEL_DECL)
4876     return true;
4877   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4878       && DECL_CONTEXT (decl) != current_function_decl)
4879     {
4880       error ("label context is not the current function declaration");
4881       err |= true;
4882     }
4883 
4884   uid = LABEL_DECL_UID (decl);
4885   if (cfun->cfg
4886       && (uid == -1
4887 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4888     {
4889       error ("incorrect entry in %<label_to_block_map%>");
4890       err |= true;
4891     }
4892 
4893   uid = EH_LANDING_PAD_NR (decl);
4894   if (uid)
4895     {
4896       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4897       if (decl != lp->post_landing_pad)
4898 	{
4899 	  error ("incorrect setting of landing pad number");
4900 	  err |= true;
4901 	}
4902     }
4903 
4904   return err;
4905 }
4906 
4907 /* Verify a gimple cond statement STMT.
4908    Returns true if anything is wrong.  */
4909 
4910 static bool
verify_gimple_cond(gcond * stmt)4911 verify_gimple_cond (gcond *stmt)
4912 {
4913   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4914     {
4915       error ("invalid comparison code in gimple cond");
4916       return true;
4917     }
4918   if (!(!gimple_cond_true_label (stmt)
4919 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4920       || !(!gimple_cond_false_label (stmt)
4921 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4922     {
4923       error ("invalid labels in gimple cond");
4924       return true;
4925     }
4926 
4927   return verify_gimple_comparison (boolean_type_node,
4928 				   gimple_cond_lhs (stmt),
4929 				   gimple_cond_rhs (stmt),
4930 				   gimple_cond_code (stmt));
4931 }
4932 
4933 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4934    error, otherwise false.  */
4935 
4936 static bool
verify_gimple_stmt(gimple * stmt)4937 verify_gimple_stmt (gimple *stmt)
4938 {
4939   switch (gimple_code (stmt))
4940     {
4941     case GIMPLE_ASSIGN:
4942       return verify_gimple_assign (as_a <gassign *> (stmt));
4943 
4944     case GIMPLE_LABEL:
4945       return verify_gimple_label (as_a <glabel *> (stmt));
4946 
4947     case GIMPLE_CALL:
4948       return verify_gimple_call (as_a <gcall *> (stmt));
4949 
4950     case GIMPLE_COND:
4951       return verify_gimple_cond (as_a <gcond *> (stmt));
4952 
4953     case GIMPLE_GOTO:
4954       return verify_gimple_goto (as_a <ggoto *> (stmt));
4955 
4956     case GIMPLE_SWITCH:
4957       return verify_gimple_switch (as_a <gswitch *> (stmt));
4958 
4959     case GIMPLE_RETURN:
4960       return verify_gimple_return (as_a <greturn *> (stmt));
4961 
4962     case GIMPLE_ASM:
4963       return false;
4964 
4965     case GIMPLE_TRANSACTION:
4966       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4967 
4968     /* Tuples that do not have tree operands.  */
4969     case GIMPLE_NOP:
4970     case GIMPLE_PREDICT:
4971     case GIMPLE_RESX:
4972     case GIMPLE_EH_DISPATCH:
4973     case GIMPLE_EH_MUST_NOT_THROW:
4974       return false;
4975 
4976     CASE_GIMPLE_OMP:
4977       /* OpenMP directives are validated by the FE and never operated
4978 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4979 	 non-gimple expressions when the main index variable has had
4980 	 its address taken.  This does not affect the loop itself
4981 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4982 	 how to setup the parallel iteration.  */
4983       return false;
4984 
4985     case GIMPLE_DEBUG:
4986       return verify_gimple_debug (stmt);
4987 
4988     default:
4989       gcc_unreachable ();
4990     }
4991 }
4992 
4993 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4994    and false otherwise.  */
4995 
4996 static bool
verify_gimple_phi(gphi * phi)4997 verify_gimple_phi (gphi *phi)
4998 {
4999   bool err = false;
5000   unsigned i;
5001   tree phi_result = gimple_phi_result (phi);
5002   bool virtual_p;
5003 
5004   if (!phi_result)
5005     {
5006       error ("invalid %<PHI%> result");
5007       return true;
5008     }
5009 
5010   virtual_p = virtual_operand_p (phi_result);
5011   if (TREE_CODE (phi_result) != SSA_NAME
5012       || (virtual_p
5013 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5014     {
5015       error ("invalid %<PHI%> result");
5016       err = true;
5017     }
5018 
5019   for (i = 0; i < gimple_phi_num_args (phi); i++)
5020     {
5021       tree t = gimple_phi_arg_def (phi, i);
5022 
5023       if (!t)
5024 	{
5025 	  error ("missing %<PHI%> def");
5026 	  err |= true;
5027 	  continue;
5028 	}
5029       /* Addressable variables do have SSA_NAMEs but they
5030 	 are not considered gimple values.  */
5031       else if ((TREE_CODE (t) == SSA_NAME
5032 		&& virtual_p != virtual_operand_p (t))
5033 	       || (virtual_p
5034 		   && (TREE_CODE (t) != SSA_NAME
5035 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5036 	       || (!virtual_p
5037 		   && !is_gimple_val (t)))
5038 	{
5039 	  error ("invalid %<PHI%> argument");
5040 	  debug_generic_expr (t);
5041 	  err |= true;
5042 	}
5043 #ifdef ENABLE_TYPES_CHECKING
5044       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5045 	{
5046 	  error ("incompatible types in %<PHI%> argument %u", i);
5047 	  debug_generic_stmt (TREE_TYPE (phi_result));
5048 	  debug_generic_stmt (TREE_TYPE (t));
5049 	  err |= true;
5050 	}
5051 #endif
5052     }
5053 
5054   return err;
5055 }
5056 
5057 /* Verify the GIMPLE statements inside the sequence STMTS.  */
5058 
5059 static bool
verify_gimple_in_seq_2(gimple_seq stmts)5060 verify_gimple_in_seq_2 (gimple_seq stmts)
5061 {
5062   gimple_stmt_iterator ittr;
5063   bool err = false;
5064 
5065   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5066     {
5067       gimple *stmt = gsi_stmt (ittr);
5068 
5069       switch (gimple_code (stmt))
5070         {
5071 	case GIMPLE_BIND:
5072 	  err |= verify_gimple_in_seq_2 (
5073                    gimple_bind_body (as_a <gbind *> (stmt)));
5074 	  break;
5075 
5076 	case GIMPLE_TRY:
5077 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5078 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5079 	  break;
5080 
5081 	case GIMPLE_EH_FILTER:
5082 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5083 	  break;
5084 
5085 	case GIMPLE_EH_ELSE:
5086 	  {
5087 	    geh_else *eh_else = as_a <geh_else *> (stmt);
5088 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5089 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5090 	  }
5091 	  break;
5092 
5093 	case GIMPLE_CATCH:
5094 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5095 					   as_a <gcatch *> (stmt)));
5096 	  break;
5097 
5098 	case GIMPLE_TRANSACTION:
5099 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5100 	  break;
5101 
5102 	default:
5103 	  {
5104 	    bool err2 = verify_gimple_stmt (stmt);
5105 	    if (err2)
5106 	      debug_gimple_stmt (stmt);
5107 	    err |= err2;
5108 	  }
5109 	}
5110     }
5111 
5112   return err;
5113 }
5114 
5115 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
5116    is a problem, otherwise false.  */
5117 
5118 static bool
verify_gimple_transaction(gtransaction * stmt)5119 verify_gimple_transaction (gtransaction *stmt)
5120 {
5121   tree lab;
5122 
5123   lab = gimple_transaction_label_norm (stmt);
5124   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5125     return true;
5126   lab = gimple_transaction_label_uninst (stmt);
5127   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5128     return true;
5129   lab = gimple_transaction_label_over (stmt);
5130   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5131     return true;
5132 
5133   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5134 }
5135 
5136 
5137 /* Verify the GIMPLE statements inside the statement list STMTS.  */
5138 
5139 DEBUG_FUNCTION void
verify_gimple_in_seq(gimple_seq stmts)5140 verify_gimple_in_seq (gimple_seq stmts)
5141 {
5142   timevar_push (TV_TREE_STMT_VERIFY);
5143   if (verify_gimple_in_seq_2 (stmts))
5144     internal_error ("%<verify_gimple%> failed");
5145   timevar_pop (TV_TREE_STMT_VERIFY);
5146 }
5147 
5148 /* Return true when the T can be shared.  */
5149 
5150 static bool
tree_node_can_be_shared(tree t)5151 tree_node_can_be_shared (tree t)
5152 {
5153   if (IS_TYPE_OR_DECL_P (t)
5154       || TREE_CODE (t) == SSA_NAME
5155       || TREE_CODE (t) == IDENTIFIER_NODE
5156       || TREE_CODE (t) == CASE_LABEL_EXPR
5157       || is_gimple_min_invariant (t))
5158     return true;
5159 
5160   if (t == error_mark_node)
5161     return true;
5162 
5163   return false;
5164 }
5165 
5166 /* Called via walk_tree.  Verify tree sharing.  */
5167 
5168 static tree
verify_node_sharing_1(tree * tp,int * walk_subtrees,void * data)5169 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5170 {
5171   hash_set<void *> *visited = (hash_set<void *> *) data;
5172 
5173   if (tree_node_can_be_shared (*tp))
5174     {
5175       *walk_subtrees = false;
5176       return NULL;
5177     }
5178 
5179   if (visited->add (*tp))
5180     return *tp;
5181 
5182   return NULL;
5183 }
5184 
5185 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
5186 
5187 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)5188 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5189 {
5190   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5191   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5192 }
5193 
5194 static bool eh_error_found;
5195 bool
verify_eh_throw_stmt_node(gimple * const & stmt,const int &,hash_set<gimple * > * visited)5196 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5197 			   hash_set<gimple *> *visited)
5198 {
5199   if (!visited->contains (stmt))
5200     {
5201       error ("dead statement in EH table");
5202       debug_gimple_stmt (stmt);
5203       eh_error_found = true;
5204     }
5205   return true;
5206 }
5207 
5208 /* Verify if the location LOCs block is in BLOCKS.  */
5209 
5210 static bool
verify_location(hash_set<tree> * blocks,location_t loc)5211 verify_location (hash_set<tree> *blocks, location_t loc)
5212 {
5213   tree block = LOCATION_BLOCK (loc);
5214   if (block != NULL_TREE
5215       && !blocks->contains (block))
5216     {
5217       error ("location references block not in block tree");
5218       return true;
5219     }
5220   if (block != NULL_TREE)
5221     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5222   return false;
5223 }
5224 
5225 /* Called via walk_tree.  Verify that expressions have no blocks.  */
5226 
5227 static tree
verify_expr_no_block(tree * tp,int * walk_subtrees,void *)5228 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5229 {
5230   if (!EXPR_P (*tp))
5231     {
5232       *walk_subtrees = false;
5233       return NULL;
5234     }
5235 
5236   location_t loc = EXPR_LOCATION (*tp);
5237   if (LOCATION_BLOCK (loc) != NULL)
5238     return *tp;
5239 
5240   return NULL;
5241 }
5242 
5243 /* Called via walk_tree.  Verify locations of expressions.  */
5244 
5245 static tree
verify_expr_location_1(tree * tp,int * walk_subtrees,void * data)5246 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5247 {
5248   hash_set<tree> *blocks = (hash_set<tree> *) data;
5249   tree t = *tp;
5250 
5251   /* ???  This doesn't really belong here but there's no good place to
5252      stick this remainder of old verify_expr.  */
5253   /* ???  This barfs on debug stmts which contain binds to vars with
5254      different function context.  */
5255 #if 0
5256   if (VAR_P (t)
5257       || TREE_CODE (t) == PARM_DECL
5258       || TREE_CODE (t) == RESULT_DECL)
5259     {
5260       tree context = decl_function_context (t);
5261       if (context != cfun->decl
5262 	  && !SCOPE_FILE_SCOPE_P (context)
5263 	  && !TREE_STATIC (t)
5264 	  && !DECL_EXTERNAL (t))
5265 	{
5266 	  error ("local declaration from a different function");
5267 	  return t;
5268 	}
5269     }
5270 #endif
5271 
5272   if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5273     {
5274       tree x = DECL_DEBUG_EXPR (t);
5275       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5276       if (addr)
5277 	return addr;
5278     }
5279   if ((VAR_P (t)
5280        || TREE_CODE (t) == PARM_DECL
5281        || TREE_CODE (t) == RESULT_DECL)
5282       && DECL_HAS_VALUE_EXPR_P (t))
5283     {
5284       tree x = DECL_VALUE_EXPR (t);
5285       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5286       if (addr)
5287 	return addr;
5288     }
5289 
5290   if (!EXPR_P (t))
5291     {
5292       *walk_subtrees = false;
5293       return NULL;
5294     }
5295 
5296   location_t loc = EXPR_LOCATION (t);
5297   if (verify_location (blocks, loc))
5298     return t;
5299 
5300   return NULL;
5301 }
5302 
5303 /* Called via walk_gimple_op.  Verify locations of expressions.  */
5304 
5305 static tree
verify_expr_location(tree * tp,int * walk_subtrees,void * data)5306 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5307 {
5308   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5309   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5310 }
5311 
5312 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
5313 
5314 static void
collect_subblocks(hash_set<tree> * blocks,tree block)5315 collect_subblocks (hash_set<tree> *blocks, tree block)
5316 {
5317   tree t;
5318   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5319     {
5320       blocks->add (t);
5321       collect_subblocks (blocks, t);
5322     }
5323 }
5324 
5325 /* Disable warnings about missing quoting in GCC diagnostics for
5326    the verification errors.  Their format strings don't follow
5327    GCC diagnostic conventions and trigger an ICE in the end.  */
5328 #if __GNUC__ >= 10
5329 #  pragma GCC diagnostic push
5330 #  pragma GCC diagnostic ignored "-Wformat-diag"
5331 #endif
5332 
5333 /* Verify the GIMPLE statements in the CFG of FN.  */
5334 
5335 DEBUG_FUNCTION void
verify_gimple_in_cfg(struct function * fn,bool verify_nothrow)5336 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5337 {
5338   basic_block bb;
5339   bool err = false;
5340 
5341   timevar_push (TV_TREE_STMT_VERIFY);
5342   hash_set<void *> visited;
5343   hash_set<gimple *> visited_throwing_stmts;
5344 
5345   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
5346   hash_set<tree> blocks;
5347   if (DECL_INITIAL (fn->decl))
5348     {
5349       blocks.add (DECL_INITIAL (fn->decl));
5350       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5351     }
5352 
5353   FOR_EACH_BB_FN (bb, fn)
5354     {
5355       gimple_stmt_iterator gsi;
5356       edge_iterator ei;
5357       edge e;
5358 
5359       for (gphi_iterator gpi = gsi_start_phis (bb);
5360 	   !gsi_end_p (gpi);
5361 	   gsi_next (&gpi))
5362 	{
5363 	  gphi *phi = gpi.phi ();
5364 	  bool err2 = false;
5365 	  unsigned i;
5366 
5367 	  if (gimple_bb (phi) != bb)
5368 	    {
5369 	      error ("gimple_bb (phi) is set to a wrong basic block");
5370 	      err2 = true;
5371 	    }
5372 
5373 	  err2 |= verify_gimple_phi (phi);
5374 
5375 	  /* Only PHI arguments have locations.  */
5376 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
5377 	    {
5378 	      error ("PHI node with location");
5379 	      err2 = true;
5380 	    }
5381 
5382 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5383 	    {
5384 	      tree arg = gimple_phi_arg_def (phi, i);
5385 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5386 				     &visited, NULL);
5387 	      if (addr)
5388 		{
5389 		  error ("incorrect sharing of tree nodes");
5390 		  debug_generic_expr (addr);
5391 		  err2 |= true;
5392 		}
5393 	      location_t loc = gimple_phi_arg_location (phi, i);
5394 	      if (virtual_operand_p (gimple_phi_result (phi))
5395 		  && loc != UNKNOWN_LOCATION)
5396 		{
5397 		  error ("virtual PHI with argument locations");
5398 		  err2 = true;
5399 		}
5400 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5401 	      if (addr)
5402 		{
5403 		  debug_generic_expr (addr);
5404 		  err2 = true;
5405 		}
5406 	      err2 |= verify_location (&blocks, loc);
5407 	    }
5408 
5409 	  if (err2)
5410 	    debug_gimple_stmt (phi);
5411 	  err |= err2;
5412 	}
5413 
5414       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5415 	{
5416 	  gimple *stmt = gsi_stmt (gsi);
5417 	  bool err2 = false;
5418 	  struct walk_stmt_info wi;
5419 	  tree addr;
5420 	  int lp_nr;
5421 
5422 	  if (gimple_bb (stmt) != bb)
5423 	    {
5424 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5425 	      err2 = true;
5426 	    }
5427 
5428 	  err2 |= verify_gimple_stmt (stmt);
5429 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5430 
5431 	  memset (&wi, 0, sizeof (wi));
5432 	  wi.info = (void *) &visited;
5433 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5434 	  if (addr)
5435 	    {
5436 	      error ("incorrect sharing of tree nodes");
5437 	      debug_generic_expr (addr);
5438 	      err2 |= true;
5439 	    }
5440 
5441 	  memset (&wi, 0, sizeof (wi));
5442 	  wi.info = (void *) &blocks;
5443 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5444 	  if (addr)
5445 	    {
5446 	      debug_generic_expr (addr);
5447 	      err2 |= true;
5448 	    }
5449 
5450 	  /* If the statement is marked as part of an EH region, then it is
5451 	     expected that the statement could throw.  Verify that when we
5452 	     have optimizations that simplify statements such that we prove
5453 	     that they cannot throw, that we update other data structures
5454 	     to match.  */
5455 	  lp_nr = lookup_stmt_eh_lp (stmt);
5456 	  if (lp_nr != 0)
5457 	    visited_throwing_stmts.add (stmt);
5458 	  if (lp_nr > 0)
5459 	    {
5460 	      if (!stmt_could_throw_p (cfun, stmt))
5461 		{
5462 		  if (verify_nothrow)
5463 		    {
5464 		      error ("statement marked for throw, but doesn%'t");
5465 		      err2 |= true;
5466 		    }
5467 		}
5468 	      else if (!gsi_one_before_end_p (gsi))
5469 		{
5470 		  error ("statement marked for throw in middle of block");
5471 		  err2 |= true;
5472 		}
5473 	    }
5474 
5475 	  if (err2)
5476 	    debug_gimple_stmt (stmt);
5477 	  err |= err2;
5478 	}
5479 
5480       FOR_EACH_EDGE (e, ei, bb->succs)
5481 	if (e->goto_locus != UNKNOWN_LOCATION)
5482 	  err |= verify_location (&blocks, e->goto_locus);
5483     }
5484 
5485   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5486   eh_error_found = false;
5487   if (eh_table)
5488     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5489       (&visited_throwing_stmts);
5490 
5491   if (err || eh_error_found)
5492     internal_error ("verify_gimple failed");
5493 
5494   verify_histograms ();
5495   timevar_pop (TV_TREE_STMT_VERIFY);
5496 }
5497 
5498 
5499 /* Verifies that the flow information is OK.  */
5500 
5501 static int
gimple_verify_flow_info(void)5502 gimple_verify_flow_info (void)
5503 {
5504   int err = 0;
5505   basic_block bb;
5506   gimple_stmt_iterator gsi;
5507   gimple *stmt;
5508   edge e;
5509   edge_iterator ei;
5510 
5511   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5512       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5513     {
5514       error ("ENTRY_BLOCK has IL associated with it");
5515       err = 1;
5516     }
5517 
5518   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5519       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5520     {
5521       error ("EXIT_BLOCK has IL associated with it");
5522       err = 1;
5523     }
5524 
5525   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5526     if (e->flags & EDGE_FALLTHRU)
5527       {
5528 	error ("fallthru to exit from bb %d", e->src->index);
5529 	err = 1;
5530       }
5531 
5532   FOR_EACH_BB_FN (bb, cfun)
5533     {
5534       bool found_ctrl_stmt = false;
5535 
5536       stmt = NULL;
5537 
5538       /* Skip labels on the start of basic block.  */
5539       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5540 	{
5541 	  tree label;
5542 	  gimple *prev_stmt = stmt;
5543 
5544 	  stmt = gsi_stmt (gsi);
5545 
5546 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5547 	    break;
5548 
5549 	  label = gimple_label_label (as_a <glabel *> (stmt));
5550 	  if (prev_stmt && DECL_NONLOCAL (label))
5551 	    {
5552 	      error ("nonlocal label ");
5553 	      print_generic_expr (stderr, label);
5554 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5555 		       bb->index);
5556 	      err = 1;
5557 	    }
5558 
5559 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5560 	    {
5561 	      error ("EH landing pad label ");
5562 	      print_generic_expr (stderr, label);
5563 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5564 		       bb->index);
5565 	      err = 1;
5566 	    }
5567 
5568 	  if (label_to_block (cfun, label) != bb)
5569 	    {
5570 	      error ("label ");
5571 	      print_generic_expr (stderr, label);
5572 	      fprintf (stderr, " to block does not match in bb %d",
5573 		       bb->index);
5574 	      err = 1;
5575 	    }
5576 
5577 	  if (decl_function_context (label) != current_function_decl)
5578 	    {
5579 	      error ("label ");
5580 	      print_generic_expr (stderr, label);
5581 	      fprintf (stderr, " has incorrect context in bb %d",
5582 		       bb->index);
5583 	      err = 1;
5584 	    }
5585 	}
5586 
5587       /* Verify that body of basic block BB is free of control flow.  */
5588       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5589 	{
5590 	  gimple *stmt = gsi_stmt (gsi);
5591 
5592 	  if (found_ctrl_stmt)
5593 	    {
5594 	      error ("control flow in the middle of basic block %d",
5595 		     bb->index);
5596 	      err = 1;
5597 	    }
5598 
5599 	  if (stmt_ends_bb_p (stmt))
5600 	    found_ctrl_stmt = true;
5601 
5602 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5603 	    {
5604 	      error ("label ");
5605 	      print_generic_expr (stderr, gimple_label_label (label_stmt));
5606 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5607 	      err = 1;
5608 	    }
5609 	}
5610 
5611       gsi = gsi_last_nondebug_bb (bb);
5612       if (gsi_end_p (gsi))
5613 	continue;
5614 
5615       stmt = gsi_stmt (gsi);
5616 
5617       if (gimple_code (stmt) == GIMPLE_LABEL)
5618 	continue;
5619 
5620       err |= verify_eh_edges (stmt);
5621 
5622       if (is_ctrl_stmt (stmt))
5623 	{
5624 	  FOR_EACH_EDGE (e, ei, bb->succs)
5625 	    if (e->flags & EDGE_FALLTHRU)
5626 	      {
5627 		error ("fallthru edge after a control statement in bb %d",
5628 		       bb->index);
5629 		err = 1;
5630 	      }
5631 	}
5632 
5633       if (gimple_code (stmt) != GIMPLE_COND)
5634 	{
5635 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5636 	     after anything else but if statement.  */
5637 	  FOR_EACH_EDGE (e, ei, bb->succs)
5638 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5639 	      {
5640 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5641 		       bb->index);
5642 		err = 1;
5643 	      }
5644 	}
5645 
5646       switch (gimple_code (stmt))
5647 	{
5648 	case GIMPLE_COND:
5649 	  {
5650 	    edge true_edge;
5651 	    edge false_edge;
5652 
5653 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5654 
5655 	    if (!true_edge
5656 		|| !false_edge
5657 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5658 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5659 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5660 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5661 		|| EDGE_COUNT (bb->succs) >= 3)
5662 	      {
5663 		error ("wrong outgoing edge flags at end of bb %d",
5664 		       bb->index);
5665 		err = 1;
5666 	      }
5667 	  }
5668 	  break;
5669 
5670 	case GIMPLE_GOTO:
5671 	  if (simple_goto_p (stmt))
5672 	    {
5673 	      error ("explicit goto at end of bb %d", bb->index);
5674 	      err = 1;
5675 	    }
5676 	  else
5677 	    {
5678 	      /* FIXME.  We should double check that the labels in the
5679 		 destination blocks have their address taken.  */
5680 	      FOR_EACH_EDGE (e, ei, bb->succs)
5681 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5682 				 | EDGE_FALSE_VALUE))
5683 		    || !(e->flags & EDGE_ABNORMAL))
5684 		  {
5685 		    error ("wrong outgoing edge flags at end of bb %d",
5686 			   bb->index);
5687 		    err = 1;
5688 		  }
5689 	    }
5690 	  break;
5691 
5692 	case GIMPLE_CALL:
5693 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5694 	    break;
5695 	  /* fallthru */
5696 	case GIMPLE_RETURN:
5697 	  if (!single_succ_p (bb)
5698 	      || (single_succ_edge (bb)->flags
5699 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5700 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5701 	    {
5702 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5703 	      err = 1;
5704 	    }
5705 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5706 	    {
5707 	      error ("return edge does not point to exit in bb %d",
5708 		     bb->index);
5709 	      err = 1;
5710 	    }
5711 	  break;
5712 
5713 	case GIMPLE_SWITCH:
5714 	  {
5715 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5716 	    tree prev;
5717 	    edge e;
5718 	    size_t i, n;
5719 
5720 	    n = gimple_switch_num_labels (switch_stmt);
5721 
5722 	    /* Mark all the destination basic blocks.  */
5723 	    for (i = 0; i < n; ++i)
5724 	      {
5725 		basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5726 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5727 		label_bb->aux = (void *)1;
5728 	      }
5729 
5730 	    /* Verify that the case labels are sorted.  */
5731 	    prev = gimple_switch_label (switch_stmt, 0);
5732 	    for (i = 1; i < n; ++i)
5733 	      {
5734 		tree c = gimple_switch_label (switch_stmt, i);
5735 		if (!CASE_LOW (c))
5736 		  {
5737 		    error ("found default case not at the start of "
5738 			   "case vector");
5739 		    err = 1;
5740 		    continue;
5741 		  }
5742 		if (CASE_LOW (prev)
5743 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5744 		  {
5745 		    error ("case labels not sorted: ");
5746 		    print_generic_expr (stderr, prev);
5747 		    fprintf (stderr," is greater than ");
5748 		    print_generic_expr (stderr, c);
5749 		    fprintf (stderr," but comes before it.\n");
5750 		    err = 1;
5751 		  }
5752 		prev = c;
5753 	      }
5754 	    /* VRP will remove the default case if it can prove it will
5755 	       never be executed.  So do not verify there always exists
5756 	       a default case here.  */
5757 
5758 	    FOR_EACH_EDGE (e, ei, bb->succs)
5759 	      {
5760 		if (!e->dest->aux)
5761 		  {
5762 		    error ("extra outgoing edge %d->%d",
5763 			   bb->index, e->dest->index);
5764 		    err = 1;
5765 		  }
5766 
5767 		e->dest->aux = (void *)2;
5768 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5769 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5770 		  {
5771 		    error ("wrong outgoing edge flags at end of bb %d",
5772 			   bb->index);
5773 		    err = 1;
5774 		  }
5775 	      }
5776 
5777 	    /* Check that we have all of them.  */
5778 	    for (i = 0; i < n; ++i)
5779 	      {
5780 		basic_block label_bb = gimple_switch_label_bb (cfun,
5781 							       switch_stmt, i);
5782 
5783 		if (label_bb->aux != (void *)2)
5784 		  {
5785 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5786 		    err = 1;
5787 		  }
5788 	      }
5789 
5790 	    FOR_EACH_EDGE (e, ei, bb->succs)
5791 	      e->dest->aux = (void *)0;
5792 	  }
5793 	  break;
5794 
5795 	case GIMPLE_EH_DISPATCH:
5796 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5797 	  break;
5798 
5799 	default:
5800 	  break;
5801 	}
5802     }
5803 
5804   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5805     verify_dominators (CDI_DOMINATORS);
5806 
5807   return err;
5808 }
5809 
5810 #if __GNUC__ >= 10
5811 #  pragma GCC diagnostic pop
5812 #endif
5813 
5814 /* Updates phi nodes after creating a forwarder block joined
5815    by edge FALLTHRU.  */
5816 
5817 static void
gimple_make_forwarder_block(edge fallthru)5818 gimple_make_forwarder_block (edge fallthru)
5819 {
5820   edge e;
5821   edge_iterator ei;
5822   basic_block dummy, bb;
5823   tree var;
5824   gphi_iterator gsi;
5825   bool forward_location_p;
5826 
5827   dummy = fallthru->src;
5828   bb = fallthru->dest;
5829 
5830   if (single_pred_p (bb))
5831     return;
5832 
5833   /* We can forward location info if we have only one predecessor.  */
5834   forward_location_p = single_pred_p (dummy);
5835 
5836   /* If we redirected a branch we must create new PHI nodes at the
5837      start of BB.  */
5838   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5839     {
5840       gphi *phi, *new_phi;
5841 
5842       phi = gsi.phi ();
5843       var = gimple_phi_result (phi);
5844       new_phi = create_phi_node (var, bb);
5845       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5846       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5847 		   forward_location_p
5848 		   ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5849     }
5850 
5851   /* Add the arguments we have stored on edges.  */
5852   FOR_EACH_EDGE (e, ei, bb->preds)
5853     {
5854       if (e == fallthru)
5855 	continue;
5856 
5857       flush_pending_stmts (e);
5858     }
5859 }
5860 
5861 
5862 /* Return a non-special label in the head of basic block BLOCK.
5863    Create one if it doesn't exist.  */
5864 
5865 tree
gimple_block_label(basic_block bb)5866 gimple_block_label (basic_block bb)
5867 {
5868   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5869   bool first = true;
5870   tree label;
5871   glabel *stmt;
5872 
5873   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5874     {
5875       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5876       if (!stmt)
5877 	break;
5878       label = gimple_label_label (stmt);
5879       if (!DECL_NONLOCAL (label))
5880 	{
5881 	  if (!first)
5882 	    gsi_move_before (&i, &s);
5883 	  return label;
5884 	}
5885     }
5886 
5887   label = create_artificial_label (UNKNOWN_LOCATION);
5888   stmt = gimple_build_label (label);
5889   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5890   return label;
5891 }
5892 
5893 
5894 /* Attempt to perform edge redirection by replacing a possibly complex
5895    jump instruction by a goto or by removing the jump completely.
5896    This can apply only if all edges now point to the same block.  The
5897    parameters and return values are equivalent to
5898    redirect_edge_and_branch.  */
5899 
5900 static edge
gimple_try_redirect_by_replacing_jump(edge e,basic_block target)5901 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5902 {
5903   basic_block src = e->src;
5904   gimple_stmt_iterator i;
5905   gimple *stmt;
5906 
5907   /* We can replace or remove a complex jump only when we have exactly
5908      two edges.  */
5909   if (EDGE_COUNT (src->succs) != 2
5910       /* Verify that all targets will be TARGET.  Specifically, the
5911 	 edge that is not E must also go to TARGET.  */
5912       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5913     return NULL;
5914 
5915   i = gsi_last_bb (src);
5916   if (gsi_end_p (i))
5917     return NULL;
5918 
5919   stmt = gsi_stmt (i);
5920 
5921   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5922     {
5923       gsi_remove (&i, true);
5924       e = ssa_redirect_edge (e, target);
5925       e->flags = EDGE_FALLTHRU;
5926       return e;
5927     }
5928 
5929   return NULL;
5930 }
5931 
5932 
5933 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5934    edge representing the redirected branch.  */
5935 
5936 static edge
gimple_redirect_edge_and_branch(edge e,basic_block dest)5937 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5938 {
5939   basic_block bb = e->src;
5940   gimple_stmt_iterator gsi;
5941   edge ret;
5942   gimple *stmt;
5943 
5944   if (e->flags & EDGE_ABNORMAL)
5945     return NULL;
5946 
5947   if (e->dest == dest)
5948     return NULL;
5949 
5950   if (e->flags & EDGE_EH)
5951     return redirect_eh_edge (e, dest);
5952 
5953   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5954     {
5955       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5956       if (ret)
5957 	return ret;
5958     }
5959 
5960   gsi = gsi_last_nondebug_bb (bb);
5961   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5962 
5963   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5964     {
5965     case GIMPLE_COND:
5966       /* For COND_EXPR, we only need to redirect the edge.  */
5967       break;
5968 
5969     case GIMPLE_GOTO:
5970       /* No non-abnormal edges should lead from a non-simple goto, and
5971 	 simple ones should be represented implicitly.  */
5972       gcc_unreachable ();
5973 
5974     case GIMPLE_SWITCH:
5975       {
5976 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5977 	tree label = gimple_block_label (dest);
5978         tree cases = get_cases_for_edge (e, switch_stmt);
5979 
5980 	/* If we have a list of cases associated with E, then use it
5981 	   as it's a lot faster than walking the entire case vector.  */
5982 	if (cases)
5983 	  {
5984 	    edge e2 = find_edge (e->src, dest);
5985 	    tree last, first;
5986 
5987 	    first = cases;
5988 	    while (cases)
5989 	      {
5990 		last = cases;
5991 		CASE_LABEL (cases) = label;
5992 		cases = CASE_CHAIN (cases);
5993 	      }
5994 
5995 	    /* If there was already an edge in the CFG, then we need
5996 	       to move all the cases associated with E to E2.  */
5997 	    if (e2)
5998 	      {
5999 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
6000 
6001 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
6002 		CASE_CHAIN (cases2) = first;
6003 	      }
6004 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6005 	  }
6006 	else
6007 	  {
6008 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
6009 
6010 	    for (i = 0; i < n; i++)
6011 	      {
6012 		tree elt = gimple_switch_label (switch_stmt, i);
6013 		if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6014 		  CASE_LABEL (elt) = label;
6015 	      }
6016 	  }
6017       }
6018       break;
6019 
6020     case GIMPLE_ASM:
6021       {
6022 	gasm *asm_stmt = as_a <gasm *> (stmt);
6023 	int i, n = gimple_asm_nlabels (asm_stmt);
6024 	tree label = NULL;
6025 
6026 	for (i = 0; i < n; ++i)
6027 	  {
6028 	    tree cons = gimple_asm_label_op (asm_stmt, i);
6029 	    if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6030 	      {
6031 		if (!label)
6032 		  label = gimple_block_label (dest);
6033 		TREE_VALUE (cons) = label;
6034 	      }
6035 	  }
6036 
6037 	/* If we didn't find any label matching the former edge in the
6038 	   asm labels, we must be redirecting the fallthrough
6039 	   edge.  */
6040 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6041       }
6042       break;
6043 
6044     case GIMPLE_RETURN:
6045       gsi_remove (&gsi, true);
6046       e->flags |= EDGE_FALLTHRU;
6047       break;
6048 
6049     case GIMPLE_OMP_RETURN:
6050     case GIMPLE_OMP_CONTINUE:
6051     case GIMPLE_OMP_SECTIONS_SWITCH:
6052     case GIMPLE_OMP_FOR:
6053       /* The edges from OMP constructs can be simply redirected.  */
6054       break;
6055 
6056     case GIMPLE_EH_DISPATCH:
6057       if (!(e->flags & EDGE_FALLTHRU))
6058 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6059       break;
6060 
6061     case GIMPLE_TRANSACTION:
6062       if (e->flags & EDGE_TM_ABORT)
6063 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6064 				           gimple_block_label (dest));
6065       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6066 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6067 				             gimple_block_label (dest));
6068       else
6069 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6070 				           gimple_block_label (dest));
6071       break;
6072 
6073     default:
6074       /* Otherwise it must be a fallthru edge, and we don't need to
6075 	 do anything besides redirecting it.  */
6076       gcc_assert (e->flags & EDGE_FALLTHRU);
6077       break;
6078     }
6079 
6080   /* Update/insert PHI nodes as necessary.  */
6081 
6082   /* Now update the edges in the CFG.  */
6083   e = ssa_redirect_edge (e, dest);
6084 
6085   return e;
6086 }
6087 
6088 /* Returns true if it is possible to remove edge E by redirecting
6089    it to the destination of the other edge from E->src.  */
6090 
6091 static bool
gimple_can_remove_branch_p(const_edge e)6092 gimple_can_remove_branch_p (const_edge e)
6093 {
6094   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6095     return false;
6096 
6097   return true;
6098 }
6099 
6100 /* Simple wrapper, as we can always redirect fallthru edges.  */
6101 
6102 static basic_block
gimple_redirect_edge_and_branch_force(edge e,basic_block dest)6103 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6104 {
6105   e = gimple_redirect_edge_and_branch (e, dest);
6106   gcc_assert (e);
6107 
6108   return NULL;
6109 }
6110 
6111 
6112 /* Splits basic block BB after statement STMT (but at least after the
6113    labels).  If STMT is NULL, BB is split just after the labels.  */
6114 
6115 static basic_block
gimple_split_block(basic_block bb,void * stmt)6116 gimple_split_block (basic_block bb, void *stmt)
6117 {
6118   gimple_stmt_iterator gsi;
6119   gimple_stmt_iterator gsi_tgt;
6120   gimple_seq list;
6121   basic_block new_bb;
6122   edge e;
6123   edge_iterator ei;
6124 
6125   new_bb = create_empty_bb (bb);
6126 
6127   /* Redirect the outgoing edges.  */
6128   new_bb->succs = bb->succs;
6129   bb->succs = NULL;
6130   FOR_EACH_EDGE (e, ei, new_bb->succs)
6131     e->src = new_bb;
6132 
6133   /* Get a stmt iterator pointing to the first stmt to move.  */
6134   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6135     gsi = gsi_after_labels (bb);
6136   else
6137     {
6138       gsi = gsi_for_stmt ((gimple *) stmt);
6139       gsi_next (&gsi);
6140     }
6141 
6142   /* Move everything from GSI to the new basic block.  */
6143   if (gsi_end_p (gsi))
6144     return new_bb;
6145 
6146   /* Split the statement list - avoid re-creating new containers as this
6147      brings ugly quadratic memory consumption in the inliner.
6148      (We are still quadratic since we need to update stmt BB pointers,
6149      sadly.)  */
6150   gsi_split_seq_before (&gsi, &list);
6151   set_bb_seq (new_bb, list);
6152   for (gsi_tgt = gsi_start (list);
6153        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6154     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6155 
6156   return new_bb;
6157 }
6158 
6159 
6160 /* Moves basic block BB after block AFTER.  */
6161 
6162 static bool
gimple_move_block_after(basic_block bb,basic_block after)6163 gimple_move_block_after (basic_block bb, basic_block after)
6164 {
6165   if (bb->prev_bb == after)
6166     return true;
6167 
6168   unlink_block (bb);
6169   link_block (bb, after);
6170 
6171   return true;
6172 }
6173 
6174 
6175 /* Return TRUE if block BB has no executable statements, otherwise return
6176    FALSE.  */
6177 
6178 static bool
gimple_empty_block_p(basic_block bb)6179 gimple_empty_block_p (basic_block bb)
6180 {
6181   /* BB must have no executable statements.  */
6182   gimple_stmt_iterator gsi = gsi_after_labels (bb);
6183   if (phi_nodes (bb))
6184     return false;
6185   while (!gsi_end_p (gsi))
6186     {
6187       gimple *stmt = gsi_stmt (gsi);
6188       if (is_gimple_debug (stmt))
6189 	;
6190       else if (gimple_code (stmt) == GIMPLE_NOP
6191 	       || gimple_code (stmt) == GIMPLE_PREDICT)
6192 	;
6193       else
6194 	return false;
6195       gsi_next (&gsi);
6196     }
6197   return true;
6198 }
6199 
6200 
6201 /* Split a basic block if it ends with a conditional branch and if the
6202    other part of the block is not empty.  */
6203 
6204 static basic_block
gimple_split_block_before_cond_jump(basic_block bb)6205 gimple_split_block_before_cond_jump (basic_block bb)
6206 {
6207   gimple *last, *split_point;
6208   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6209   if (gsi_end_p (gsi))
6210     return NULL;
6211   last = gsi_stmt (gsi);
6212   if (gimple_code (last) != GIMPLE_COND
6213       && gimple_code (last) != GIMPLE_SWITCH)
6214     return NULL;
6215   gsi_prev (&gsi);
6216   split_point = gsi_stmt (gsi);
6217   return split_block (bb, split_point)->dest;
6218 }
6219 
6220 
6221 /* Return true if basic_block can be duplicated.  */
6222 
6223 static bool
gimple_can_duplicate_bb_p(const_basic_block bb ATTRIBUTE_UNUSED)6224 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6225 {
6226   return true;
6227 }
6228 
6229 /* Create a duplicate of the basic block BB.  NOTE: This does not
6230    preserve SSA form.  */
6231 
6232 static basic_block
gimple_duplicate_bb(basic_block bb,copy_bb_data * id)6233 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6234 {
6235   basic_block new_bb;
6236   gimple_stmt_iterator gsi_tgt;
6237 
6238   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6239 
6240   /* Copy the PHI nodes.  We ignore PHI node arguments here because
6241      the incoming edges have not been setup yet.  */
6242   for (gphi_iterator gpi = gsi_start_phis (bb);
6243        !gsi_end_p (gpi);
6244        gsi_next (&gpi))
6245     {
6246       gphi *phi, *copy;
6247       phi = gpi.phi ();
6248       copy = create_phi_node (NULL_TREE, new_bb);
6249       create_new_def_for (gimple_phi_result (phi), copy,
6250 			  gimple_phi_result_ptr (copy));
6251       gimple_set_uid (copy, gimple_uid (phi));
6252     }
6253 
6254   gsi_tgt = gsi_start_bb (new_bb);
6255   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6256        !gsi_end_p (gsi);
6257        gsi_next (&gsi))
6258     {
6259       def_operand_p def_p;
6260       ssa_op_iter op_iter;
6261       tree lhs;
6262       gimple *stmt, *copy;
6263 
6264       stmt = gsi_stmt (gsi);
6265       if (gimple_code (stmt) == GIMPLE_LABEL)
6266 	continue;
6267 
6268       /* Don't duplicate label debug stmts.  */
6269       if (gimple_debug_bind_p (stmt)
6270 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
6271 	     == LABEL_DECL)
6272 	continue;
6273 
6274       /* Create a new copy of STMT and duplicate STMT's virtual
6275 	 operands.  */
6276       copy = gimple_copy (stmt);
6277       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6278 
6279       maybe_duplicate_eh_stmt (copy, stmt);
6280       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6281 
6282       /* When copying around a stmt writing into a local non-user
6283 	 aggregate, make sure it won't share stack slot with other
6284 	 vars.  */
6285       lhs = gimple_get_lhs (stmt);
6286       if (lhs && TREE_CODE (lhs) != SSA_NAME)
6287 	{
6288 	  tree base = get_base_address (lhs);
6289 	  if (base
6290 	      && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6291 	      && DECL_IGNORED_P (base)
6292 	      && !TREE_STATIC (base)
6293 	      && !DECL_EXTERNAL (base)
6294 	      && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6295 	    DECL_NONSHAREABLE (base) = 1;
6296 	}
6297 
6298       /* If requested remap dependence info of cliques brought in
6299          via inlining.  */
6300       if (id)
6301 	for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6302 	  {
6303 	    tree op = gimple_op (copy, i);
6304 	    if (!op)
6305 	      continue;
6306 	    if (TREE_CODE (op) == ADDR_EXPR
6307 		|| TREE_CODE (op) == WITH_SIZE_EXPR)
6308 	      op = TREE_OPERAND (op, 0);
6309 	    while (handled_component_p (op))
6310 	      op = TREE_OPERAND (op, 0);
6311 	    if ((TREE_CODE (op) == MEM_REF
6312 		 || TREE_CODE (op) == TARGET_MEM_REF)
6313 		&& MR_DEPENDENCE_CLIQUE (op) > 1
6314 		&& MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6315 	      {
6316 		if (!id->dependence_map)
6317 		  id->dependence_map = new hash_map<dependence_hash,
6318 						    unsigned short>;
6319 		bool existed;
6320 		unsigned short &newc = id->dependence_map->get_or_insert
6321 		    (MR_DEPENDENCE_CLIQUE (op), &existed);
6322 		if (!existed)
6323 		  {
6324 		    gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6325 		    newc = ++cfun->last_clique;
6326 		  }
6327 		MR_DEPENDENCE_CLIQUE (op) = newc;
6328 	      }
6329 	  }
6330 
6331       /* Create new names for all the definitions created by COPY and
6332 	 add replacement mappings for each new name.  */
6333       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6334 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6335     }
6336 
6337   return new_bb;
6338 }
6339 
6340 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
6341 
6342 static void
add_phi_args_after_copy_edge(edge e_copy)6343 add_phi_args_after_copy_edge (edge e_copy)
6344 {
6345   basic_block bb, bb_copy = e_copy->src, dest;
6346   edge e;
6347   edge_iterator ei;
6348   gphi *phi, *phi_copy;
6349   tree def;
6350   gphi_iterator psi, psi_copy;
6351 
6352   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6353     return;
6354 
6355   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6356 
6357   if (e_copy->dest->flags & BB_DUPLICATED)
6358     dest = get_bb_original (e_copy->dest);
6359   else
6360     dest = e_copy->dest;
6361 
6362   e = find_edge (bb, dest);
6363   if (!e)
6364     {
6365       /* During loop unrolling the target of the latch edge is copied.
6366 	 In this case we are not looking for edge to dest, but to
6367 	 duplicated block whose original was dest.  */
6368       FOR_EACH_EDGE (e, ei, bb->succs)
6369 	{
6370 	  if ((e->dest->flags & BB_DUPLICATED)
6371 	      && get_bb_original (e->dest) == dest)
6372 	    break;
6373 	}
6374 
6375       gcc_assert (e != NULL);
6376     }
6377 
6378   for (psi = gsi_start_phis (e->dest),
6379        psi_copy = gsi_start_phis (e_copy->dest);
6380        !gsi_end_p (psi);
6381        gsi_next (&psi), gsi_next (&psi_copy))
6382     {
6383       phi = psi.phi ();
6384       phi_copy = psi_copy.phi ();
6385       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6386       add_phi_arg (phi_copy, def, e_copy,
6387 		   gimple_phi_arg_location_from_edge (phi, e));
6388     }
6389 }
6390 
6391 
6392 /* Basic block BB_COPY was created by code duplication.  Add phi node
6393    arguments for edges going out of BB_COPY.  The blocks that were
6394    duplicated have BB_DUPLICATED set.  */
6395 
6396 void
add_phi_args_after_copy_bb(basic_block bb_copy)6397 add_phi_args_after_copy_bb (basic_block bb_copy)
6398 {
6399   edge e_copy;
6400   edge_iterator ei;
6401 
6402   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6403     {
6404       add_phi_args_after_copy_edge (e_copy);
6405     }
6406 }
6407 
6408 /* Blocks in REGION_COPY array of length N_REGION were created by
6409    duplication of basic blocks.  Add phi node arguments for edges
6410    going from these blocks.  If E_COPY is not NULL, also add
6411    phi node arguments for its destination.*/
6412 
6413 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region,edge e_copy)6414 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6415 			 edge e_copy)
6416 {
6417   unsigned i;
6418 
6419   for (i = 0; i < n_region; i++)
6420     region_copy[i]->flags |= BB_DUPLICATED;
6421 
6422   for (i = 0; i < n_region; i++)
6423     add_phi_args_after_copy_bb (region_copy[i]);
6424   if (e_copy)
6425     add_phi_args_after_copy_edge (e_copy);
6426 
6427   for (i = 0; i < n_region; i++)
6428     region_copy[i]->flags &= ~BB_DUPLICATED;
6429 }
6430 
6431 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6432    important exit edge EXIT.  By important we mean that no SSA name defined
6433    inside region is live over the other exit edges of the region.  All entry
6434    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6435    to the duplicate of the region.  Dominance and loop information is
6436    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6437    UPDATE_DOMINANCE is false then we assume that the caller will update the
6438    dominance information after calling this function.  The new basic
6439    blocks are stored to REGION_COPY in the same order as they had in REGION,
6440    provided that REGION_COPY is not NULL.
6441    The function returns false if it is unable to copy the region,
6442    true otherwise.  */
6443 
6444 bool
gimple_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy,bool update_dominance)6445 gimple_duplicate_sese_region (edge entry, edge exit,
6446 			    basic_block *region, unsigned n_region,
6447 			    basic_block *region_copy,
6448 			    bool update_dominance)
6449 {
6450   unsigned i;
6451   bool free_region_copy = false, copying_header = false;
6452   class loop *loop = entry->dest->loop_father;
6453   edge exit_copy;
6454   vec<basic_block> doms = vNULL;
6455   edge redirected;
6456   profile_count total_count = profile_count::uninitialized ();
6457   profile_count entry_count = profile_count::uninitialized ();
6458 
6459   if (!can_copy_bbs_p (region, n_region))
6460     return false;
6461 
6462   /* Some sanity checking.  Note that we do not check for all possible
6463      missuses of the functions.  I.e. if you ask to copy something weird,
6464      it will work, but the state of structures probably will not be
6465      correct.  */
6466   for (i = 0; i < n_region; i++)
6467     {
6468       /* We do not handle subloops, i.e. all the blocks must belong to the
6469 	 same loop.  */
6470       if (region[i]->loop_father != loop)
6471 	return false;
6472 
6473       if (region[i] != entry->dest
6474 	  && region[i] == loop->header)
6475 	return false;
6476     }
6477 
6478   /* In case the function is used for loop header copying (which is the primary
6479      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6480   if (loop->header == entry->dest)
6481     {
6482       copying_header = true;
6483 
6484       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6485 	return false;
6486 
6487       for (i = 0; i < n_region; i++)
6488 	if (region[i] != exit->src
6489 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6490 	  return false;
6491     }
6492 
6493   initialize_original_copy_tables ();
6494 
6495   if (copying_header)
6496     set_loop_copy (loop, loop_outer (loop));
6497   else
6498     set_loop_copy (loop, loop);
6499 
6500   if (!region_copy)
6501     {
6502       region_copy = XNEWVEC (basic_block, n_region);
6503       free_region_copy = true;
6504     }
6505 
6506   /* Record blocks outside the region that are dominated by something
6507      inside.  */
6508   if (update_dominance)
6509     {
6510       doms.create (0);
6511       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6512     }
6513 
6514   if (entry->dest->count.initialized_p ())
6515     {
6516       total_count = entry->dest->count;
6517       entry_count = entry->count ();
6518       /* Fix up corner cases, to avoid division by zero or creation of negative
6519 	 frequencies.  */
6520       if (entry_count > total_count)
6521 	entry_count = total_count;
6522     }
6523 
6524   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6525 	    split_edge_bb_loc (entry), update_dominance);
6526   if (total_count.initialized_p () && entry_count.initialized_p ())
6527     {
6528       scale_bbs_frequencies_profile_count (region, n_region,
6529 				           total_count - entry_count,
6530 				           total_count);
6531       scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6532 				           total_count);
6533     }
6534 
6535   if (copying_header)
6536     {
6537       loop->header = exit->dest;
6538       loop->latch = exit->src;
6539     }
6540 
6541   /* Redirect the entry and add the phi node arguments.  */
6542   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6543   gcc_assert (redirected != NULL);
6544   flush_pending_stmts (entry);
6545 
6546   /* Concerning updating of dominators:  We must recount dominators
6547      for entry block and its copy.  Anything that is outside of the
6548      region, but was dominated by something inside needs recounting as
6549      well.  */
6550   if (update_dominance)
6551     {
6552       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6553       doms.safe_push (get_bb_original (entry->dest));
6554       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6555       doms.release ();
6556     }
6557 
6558   /* Add the other PHI node arguments.  */
6559   add_phi_args_after_copy (region_copy, n_region, NULL);
6560 
6561   if (free_region_copy)
6562     free (region_copy);
6563 
6564   free_original_copy_tables ();
6565   return true;
6566 }
6567 
6568 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6569 static bool
bb_part_of_region_p(basic_block bb,basic_block * bbs,unsigned n_region)6570 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6571 {
6572   unsigned int n;
6573 
6574   for (n = 0; n < n_region; n++)
6575     {
6576      if (bb == bbs[n])
6577        return true;
6578     }
6579   return false;
6580 }
6581 
6582 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6583    are stored to REGION_COPY in the same order in that they appear
6584    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6585    the region, EXIT an exit from it.  The condition guarding EXIT
6586    is moved to ENTRY.  Returns true if duplication succeeds, false
6587    otherwise.
6588 
6589    For example,
6590 
6591    some_code;
6592    if (cond)
6593      A;
6594    else
6595      B;
6596 
6597    is transformed to
6598 
6599    if (cond)
6600      {
6601        some_code;
6602        A;
6603      }
6604    else
6605      {
6606        some_code;
6607        B;
6608      }
6609 */
6610 
6611 bool
gimple_duplicate_sese_tail(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)6612 gimple_duplicate_sese_tail (edge entry, edge exit,
6613 			  basic_block *region, unsigned n_region,
6614 			  basic_block *region_copy)
6615 {
6616   unsigned i;
6617   bool free_region_copy = false;
6618   class loop *loop = exit->dest->loop_father;
6619   class loop *orig_loop = entry->dest->loop_father;
6620   basic_block switch_bb, entry_bb, nentry_bb;
6621   vec<basic_block> doms;
6622   profile_count total_count = profile_count::uninitialized (),
6623 		exit_count = profile_count::uninitialized ();
6624   edge exits[2], nexits[2], e;
6625   gimple_stmt_iterator gsi;
6626   gimple *cond_stmt;
6627   edge sorig, snew;
6628   basic_block exit_bb;
6629   gphi_iterator psi;
6630   gphi *phi;
6631   tree def;
6632   class loop *target, *aloop, *cloop;
6633 
6634   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6635   exits[0] = exit;
6636   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6637 
6638   if (!can_copy_bbs_p (region, n_region))
6639     return false;
6640 
6641   initialize_original_copy_tables ();
6642   set_loop_copy (orig_loop, loop);
6643 
6644   target= loop;
6645   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6646     {
6647       if (bb_part_of_region_p (aloop->header, region, n_region))
6648 	{
6649 	  cloop = duplicate_loop (aloop, target);
6650 	  duplicate_subloops (aloop, cloop);
6651 	}
6652     }
6653 
6654   if (!region_copy)
6655     {
6656       region_copy = XNEWVEC (basic_block, n_region);
6657       free_region_copy = true;
6658     }
6659 
6660   gcc_assert (!need_ssa_update_p (cfun));
6661 
6662   /* Record blocks outside the region that are dominated by something
6663      inside.  */
6664   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6665 
6666   total_count = exit->src->count;
6667   exit_count = exit->count ();
6668   /* Fix up corner cases, to avoid division by zero or creation of negative
6669      frequencies.  */
6670   if (exit_count > total_count)
6671     exit_count = total_count;
6672 
6673   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6674 	    split_edge_bb_loc (exit), true);
6675   if (total_count.initialized_p () && exit_count.initialized_p ())
6676     {
6677       scale_bbs_frequencies_profile_count (region, n_region,
6678 				           total_count - exit_count,
6679 				           total_count);
6680       scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6681 				           total_count);
6682     }
6683 
6684   /* Create the switch block, and put the exit condition to it.  */
6685   entry_bb = entry->dest;
6686   nentry_bb = get_bb_copy (entry_bb);
6687   if (!last_stmt (entry->src)
6688       || !stmt_ends_bb_p (last_stmt (entry->src)))
6689     switch_bb = entry->src;
6690   else
6691     switch_bb = split_edge (entry);
6692   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6693 
6694   gsi = gsi_last_bb (switch_bb);
6695   cond_stmt = last_stmt (exit->src);
6696   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6697   cond_stmt = gimple_copy (cond_stmt);
6698 
6699   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6700 
6701   sorig = single_succ_edge (switch_bb);
6702   sorig->flags = exits[1]->flags;
6703   sorig->probability = exits[1]->probability;
6704   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6705   snew->probability = exits[0]->probability;
6706 
6707 
6708   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6709   rescan_loop_exit (snew, true, false);
6710 
6711   /* Add the PHI node arguments.  */
6712   add_phi_args_after_copy (region_copy, n_region, snew);
6713 
6714   /* Get rid of now superfluous conditions and associated edges (and phi node
6715      arguments).  */
6716   exit_bb = exit->dest;
6717 
6718   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6719   PENDING_STMT (e) = NULL;
6720 
6721   /* The latch of ORIG_LOOP was copied, and so was the backedge
6722      to the original header.  We redirect this backedge to EXIT_BB.  */
6723   for (i = 0; i < n_region; i++)
6724     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6725       {
6726 	gcc_assert (single_succ_edge (region_copy[i]));
6727 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6728 	PENDING_STMT (e) = NULL;
6729 	for (psi = gsi_start_phis (exit_bb);
6730 	     !gsi_end_p (psi);
6731 	     gsi_next (&psi))
6732 	  {
6733 	    phi = psi.phi ();
6734 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6735 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6736 	  }
6737       }
6738   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6739   PENDING_STMT (e) = NULL;
6740 
6741   /* Anything that is outside of the region, but was dominated by something
6742      inside needs to update dominance info.  */
6743   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6744   doms.release ();
6745   /* Update the SSA web.  */
6746   update_ssa (TODO_update_ssa);
6747 
6748   if (free_region_copy)
6749     free (region_copy);
6750 
6751   free_original_copy_tables ();
6752   return true;
6753 }
6754 
6755 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6756    adding blocks when the dominator traversal reaches EXIT.  This
6757    function silently assumes that ENTRY strictly dominates EXIT.  */
6758 
6759 void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)6760 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6761 			      vec<basic_block> *bbs_p)
6762 {
6763   basic_block son;
6764 
6765   for (son = first_dom_son (CDI_DOMINATORS, entry);
6766        son;
6767        son = next_dom_son (CDI_DOMINATORS, son))
6768     {
6769       bbs_p->safe_push (son);
6770       if (son != exit)
6771 	gather_blocks_in_sese_region (son, exit, bbs_p);
6772     }
6773 }
6774 
6775 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6776    The duplicates are recorded in VARS_MAP.  */
6777 
6778 static void
replace_by_duplicate_decl(tree * tp,hash_map<tree,tree> * vars_map,tree to_context)6779 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6780 			   tree to_context)
6781 {
6782   tree t = *tp, new_t;
6783   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6784 
6785   if (DECL_CONTEXT (t) == to_context)
6786     return;
6787 
6788   bool existed;
6789   tree &loc = vars_map->get_or_insert (t, &existed);
6790 
6791   if (!existed)
6792     {
6793       if (SSA_VAR_P (t))
6794 	{
6795 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6796 	  add_local_decl (f, new_t);
6797 	}
6798       else
6799 	{
6800 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6801 	  new_t = copy_node (t);
6802 	}
6803       DECL_CONTEXT (new_t) = to_context;
6804 
6805       loc = new_t;
6806     }
6807   else
6808     new_t = loc;
6809 
6810   *tp = new_t;
6811 }
6812 
6813 
6814 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6815    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6816 
6817 static tree
replace_ssa_name(tree name,hash_map<tree,tree> * vars_map,tree to_context)6818 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6819 		  tree to_context)
6820 {
6821   tree new_name;
6822 
6823   gcc_assert (!virtual_operand_p (name));
6824 
6825   tree *loc = vars_map->get (name);
6826 
6827   if (!loc)
6828     {
6829       tree decl = SSA_NAME_VAR (name);
6830       if (decl)
6831 	{
6832 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6833 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6834 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6835 				       decl, SSA_NAME_DEF_STMT (name));
6836 	}
6837       else
6838 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6839 				     name, SSA_NAME_DEF_STMT (name));
6840 
6841       /* Now that we've used the def stmt to define new_name, make sure it
6842 	 doesn't define name anymore.  */
6843       SSA_NAME_DEF_STMT (name) = NULL;
6844 
6845       vars_map->put (name, new_name);
6846     }
6847   else
6848     new_name = *loc;
6849 
6850   return new_name;
6851 }
6852 
6853 struct move_stmt_d
6854 {
6855   tree orig_block;
6856   tree new_block;
6857   tree from_context;
6858   tree to_context;
6859   hash_map<tree, tree> *vars_map;
6860   htab_t new_label_map;
6861   hash_map<void *, void *> *eh_map;
6862   bool remap_decls_p;
6863 };
6864 
6865 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6866    contained in *TP if it has been ORIG_BLOCK previously and change the
6867    DECL_CONTEXT of every local variable referenced in *TP.  */
6868 
6869 static tree
move_stmt_op(tree * tp,int * walk_subtrees,void * data)6870 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6871 {
6872   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6873   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6874   tree t = *tp;
6875 
6876   if (EXPR_P (t))
6877     {
6878       tree block = TREE_BLOCK (t);
6879       if (block == NULL_TREE)
6880 	;
6881       else if (block == p->orig_block
6882 	       || p->orig_block == NULL_TREE)
6883 	{
6884 	  /* tree_node_can_be_shared says we can share invariant
6885 	     addresses but unshare_expr copies them anyways.  Make sure
6886 	     to unshare before adjusting the block in place - we do not
6887 	     always see a copy here.  */
6888 	  if (TREE_CODE (t) == ADDR_EXPR
6889 	      && is_gimple_min_invariant (t))
6890 	    *tp = t = unshare_expr (t);
6891 	  TREE_SET_BLOCK (t, p->new_block);
6892 	}
6893       else if (flag_checking)
6894 	{
6895 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6896 	    block = BLOCK_SUPERCONTEXT (block);
6897 	  gcc_assert (block == p->orig_block);
6898 	}
6899     }
6900   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6901     {
6902       if (TREE_CODE (t) == SSA_NAME)
6903 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6904       else if (TREE_CODE (t) == PARM_DECL
6905 	       && gimple_in_ssa_p (cfun))
6906 	*tp = *(p->vars_map->get (t));
6907       else if (TREE_CODE (t) == LABEL_DECL)
6908 	{
6909 	  if (p->new_label_map)
6910 	    {
6911 	      struct tree_map in, *out;
6912 	      in.base.from = t;
6913 	      out = (struct tree_map *)
6914 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6915 	      if (out)
6916 		*tp = t = out->to;
6917 	    }
6918 
6919 	  /* For FORCED_LABELs we can end up with references from other
6920 	     functions if some SESE regions are outlined.  It is UB to
6921 	     jump in between them, but they could be used just for printing
6922 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6923 	     be the function containing the glabel stmt with that LABEL_DECL,
6924 	     rather than whatever function a reference to the label was seen
6925 	     last time.  */
6926 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6927 	    DECL_CONTEXT (t) = p->to_context;
6928 	}
6929       else if (p->remap_decls_p)
6930 	{
6931 	  /* Replace T with its duplicate.  T should no longer appear in the
6932 	     parent function, so this looks wasteful; however, it may appear
6933 	     in referenced_vars, and more importantly, as virtual operands of
6934 	     statements, and in alias lists of other variables.  It would be
6935 	     quite difficult to expunge it from all those places.  ??? It might
6936 	     suffice to do this for addressable variables.  */
6937 	  if ((VAR_P (t) && !is_global_var (t))
6938 	      || TREE_CODE (t) == CONST_DECL)
6939 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6940 	}
6941       *walk_subtrees = 0;
6942     }
6943   else if (TYPE_P (t))
6944     *walk_subtrees = 0;
6945 
6946   return NULL_TREE;
6947 }
6948 
6949 /* Helper for move_stmt_r.  Given an EH region number for the source
6950    function, map that to the duplicate EH regio number in the dest.  */
6951 
6952 static int
move_stmt_eh_region_nr(int old_nr,struct move_stmt_d * p)6953 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6954 {
6955   eh_region old_r, new_r;
6956 
6957   old_r = get_eh_region_from_number (old_nr);
6958   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6959 
6960   return new_r->index;
6961 }
6962 
6963 /* Similar, but operate on INTEGER_CSTs.  */
6964 
6965 static tree
move_stmt_eh_region_tree_nr(tree old_t_nr,struct move_stmt_d * p)6966 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6967 {
6968   int old_nr, new_nr;
6969 
6970   old_nr = tree_to_shwi (old_t_nr);
6971   new_nr = move_stmt_eh_region_nr (old_nr, p);
6972 
6973   return build_int_cst (integer_type_node, new_nr);
6974 }
6975 
6976 /* Like move_stmt_op, but for gimple statements.
6977 
6978    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6979    contained in the current statement in *GSI_P and change the
6980    DECL_CONTEXT of every local variable referenced in the current
6981    statement.  */
6982 
6983 static tree
move_stmt_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)6984 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6985 	     struct walk_stmt_info *wi)
6986 {
6987   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6988   gimple *stmt = gsi_stmt (*gsi_p);
6989   tree block = gimple_block (stmt);
6990 
6991   if (block == p->orig_block
6992       || (p->orig_block == NULL_TREE
6993 	  && block != NULL_TREE))
6994     gimple_set_block (stmt, p->new_block);
6995 
6996   switch (gimple_code (stmt))
6997     {
6998     case GIMPLE_CALL:
6999       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
7000       {
7001 	tree r, fndecl = gimple_call_fndecl (stmt);
7002 	if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7003 	  switch (DECL_FUNCTION_CODE (fndecl))
7004 	    {
7005 	    case BUILT_IN_EH_COPY_VALUES:
7006 	      r = gimple_call_arg (stmt, 1);
7007 	      r = move_stmt_eh_region_tree_nr (r, p);
7008 	      gimple_call_set_arg (stmt, 1, r);
7009 	      /* FALLTHRU */
7010 
7011 	    case BUILT_IN_EH_POINTER:
7012 	    case BUILT_IN_EH_FILTER:
7013 	      r = gimple_call_arg (stmt, 0);
7014 	      r = move_stmt_eh_region_tree_nr (r, p);
7015 	      gimple_call_set_arg (stmt, 0, r);
7016 	      break;
7017 
7018 	    default:
7019 	      break;
7020 	    }
7021       }
7022       break;
7023 
7024     case GIMPLE_RESX:
7025       {
7026 	gresx *resx_stmt = as_a <gresx *> (stmt);
7027 	int r = gimple_resx_region (resx_stmt);
7028 	r = move_stmt_eh_region_nr (r, p);
7029 	gimple_resx_set_region (resx_stmt, r);
7030       }
7031       break;
7032 
7033     case GIMPLE_EH_DISPATCH:
7034       {
7035 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7036 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7037 	r = move_stmt_eh_region_nr (r, p);
7038 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7039       }
7040       break;
7041 
7042     case GIMPLE_OMP_RETURN:
7043     case GIMPLE_OMP_CONTINUE:
7044       break;
7045 
7046     case GIMPLE_LABEL:
7047       {
7048 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7049 	   so that such labels can be referenced from other regions.
7050 	   Make sure to update it when seeing a GIMPLE_LABEL though,
7051 	   that is the owner of the label.  */
7052 	walk_gimple_op (stmt, move_stmt_op, wi);
7053 	*handled_ops_p = true;
7054 	tree label = gimple_label_label (as_a <glabel *> (stmt));
7055 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7056 	  DECL_CONTEXT (label) = p->to_context;
7057       }
7058       break;
7059 
7060     default:
7061       if (is_gimple_omp (stmt))
7062 	{
7063 	  /* Do not remap variables inside OMP directives.  Variables
7064 	     referenced in clauses and directive header belong to the
7065 	     parent function and should not be moved into the child
7066 	     function.  */
7067 	  bool save_remap_decls_p = p->remap_decls_p;
7068 	  p->remap_decls_p = false;
7069 	  *handled_ops_p = true;
7070 
7071 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7072 			       move_stmt_op, wi);
7073 
7074 	  p->remap_decls_p = save_remap_decls_p;
7075 	}
7076       break;
7077     }
7078 
7079   return NULL_TREE;
7080 }
7081 
7082 /* Move basic block BB from function CFUN to function DEST_FN.  The
7083    block is moved out of the original linked list and placed after
7084    block AFTER in the new list.  Also, the block is removed from the
7085    original array of blocks and placed in DEST_FN's array of blocks.
7086    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7087    updated to reflect the moved edges.
7088 
7089    The local variables are remapped to new instances, VARS_MAP is used
7090    to record the mapping.  */
7091 
7092 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,struct move_stmt_d * d)7093 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7094 		  basic_block after, bool update_edge_count_p,
7095 		  struct move_stmt_d *d)
7096 {
7097   struct control_flow_graph *cfg;
7098   edge_iterator ei;
7099   edge e;
7100   gimple_stmt_iterator si;
7101   unsigned old_len, new_len;
7102 
7103   /* Remove BB from dominance structures.  */
7104   delete_from_dominance_info (CDI_DOMINATORS, bb);
7105 
7106   /* Move BB from its current loop to the copy in the new function.  */
7107   if (current_loops)
7108     {
7109       class loop *new_loop = (class loop *)bb->loop_father->aux;
7110       if (new_loop)
7111 	bb->loop_father = new_loop;
7112     }
7113 
7114   /* Link BB to the new linked list.  */
7115   move_block_after (bb, after);
7116 
7117   /* Update the edge count in the corresponding flowgraphs.  */
7118   if (update_edge_count_p)
7119     FOR_EACH_EDGE (e, ei, bb->succs)
7120       {
7121 	cfun->cfg->x_n_edges--;
7122 	dest_cfun->cfg->x_n_edges++;
7123       }
7124 
7125   /* Remove BB from the original basic block array.  */
7126   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7127   cfun->cfg->x_n_basic_blocks--;
7128 
7129   /* Grow DEST_CFUN's basic block array if needed.  */
7130   cfg = dest_cfun->cfg;
7131   cfg->x_n_basic_blocks++;
7132   if (bb->index >= cfg->x_last_basic_block)
7133     cfg->x_last_basic_block = bb->index + 1;
7134 
7135   old_len = vec_safe_length (cfg->x_basic_block_info);
7136   if ((unsigned) cfg->x_last_basic_block >= old_len)
7137     {
7138       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7139       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7140     }
7141 
7142   (*cfg->x_basic_block_info)[bb->index] = bb;
7143 
7144   /* Remap the variables in phi nodes.  */
7145   for (gphi_iterator psi = gsi_start_phis (bb);
7146        !gsi_end_p (psi); )
7147     {
7148       gphi *phi = psi.phi ();
7149       use_operand_p use;
7150       tree op = PHI_RESULT (phi);
7151       ssa_op_iter oi;
7152       unsigned i;
7153 
7154       if (virtual_operand_p (op))
7155 	{
7156 	  /* Remove the phi nodes for virtual operands (alias analysis will be
7157 	     run for the new function, anyway).  But replace all uses that
7158 	     might be outside of the region we move.  */
7159 	  use_operand_p use_p;
7160 	  imm_use_iterator iter;
7161 	  gimple *use_stmt;
7162 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7163 	    FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7164 	      SET_USE (use_p, SSA_NAME_VAR (op));
7165           remove_phi_node (&psi, true);
7166 	  continue;
7167 	}
7168 
7169       SET_PHI_RESULT (phi,
7170 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7171       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7172 	{
7173 	  op = USE_FROM_PTR (use);
7174 	  if (TREE_CODE (op) == SSA_NAME)
7175 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7176 	}
7177 
7178       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7179 	{
7180 	  location_t locus = gimple_phi_arg_location (phi, i);
7181 	  tree block = LOCATION_BLOCK (locus);
7182 
7183 	  if (locus == UNKNOWN_LOCATION)
7184 	    continue;
7185 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
7186 	    {
7187 	      locus = set_block (locus, d->new_block);
7188 	      gimple_phi_arg_set_location (phi, i, locus);
7189 	    }
7190 	}
7191 
7192       gsi_next (&psi);
7193     }
7194 
7195   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7196     {
7197       gimple *stmt = gsi_stmt (si);
7198       struct walk_stmt_info wi;
7199 
7200       memset (&wi, 0, sizeof (wi));
7201       wi.info = d;
7202       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7203 
7204       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7205 	{
7206 	  tree label = gimple_label_label (label_stmt);
7207 	  int uid = LABEL_DECL_UID (label);
7208 
7209 	  gcc_assert (uid > -1);
7210 
7211 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
7212 	  if (old_len <= (unsigned) uid)
7213 	    {
7214 	      new_len = 3 * uid / 2 + 1;
7215 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7216 	    }
7217 
7218 	  (*cfg->x_label_to_block_map)[uid] = bb;
7219 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7220 
7221 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7222 
7223 	  if (uid >= dest_cfun->cfg->last_label_uid)
7224 	    dest_cfun->cfg->last_label_uid = uid + 1;
7225 	}
7226 
7227       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7228       remove_stmt_from_eh_lp_fn (cfun, stmt);
7229 
7230       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7231       gimple_remove_stmt_histograms (cfun, stmt);
7232 
7233       /* We cannot leave any operands allocated from the operand caches of
7234 	 the current function.  */
7235       free_stmt_operands (cfun, stmt);
7236       push_cfun (dest_cfun);
7237       update_stmt (stmt);
7238       pop_cfun ();
7239     }
7240 
7241   FOR_EACH_EDGE (e, ei, bb->succs)
7242     if (e->goto_locus != UNKNOWN_LOCATION)
7243       {
7244 	tree block = LOCATION_BLOCK (e->goto_locus);
7245 	if (d->orig_block == NULL_TREE
7246 	    || block == d->orig_block)
7247 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
7248       }
7249 }
7250 
7251 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7252    the outermost EH region.  Use REGION as the incoming base EH region.
7253    If there is no single outermost region, return NULL and set *ALL to
7254    true.  */
7255 
7256 static eh_region
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,eh_region region,bool * all)7257 find_outermost_region_in_block (struct function *src_cfun,
7258 				basic_block bb, eh_region region,
7259 				bool *all)
7260 {
7261   gimple_stmt_iterator si;
7262 
7263   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7264     {
7265       gimple *stmt = gsi_stmt (si);
7266       eh_region stmt_region;
7267       int lp_nr;
7268 
7269       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7270       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7271       if (stmt_region)
7272 	{
7273 	  if (region == NULL)
7274 	    region = stmt_region;
7275 	  else if (stmt_region != region)
7276 	    {
7277 	      region = eh_region_outermost (src_cfun, stmt_region, region);
7278 	      if (region == NULL)
7279 		{
7280 		  *all = true;
7281 		  return NULL;
7282 		}
7283 	    }
7284 	}
7285     }
7286 
7287   return region;
7288 }
7289 
7290 static tree
new_label_mapper(tree decl,void * data)7291 new_label_mapper (tree decl, void *data)
7292 {
7293   htab_t hash = (htab_t) data;
7294   struct tree_map *m;
7295   void **slot;
7296 
7297   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7298 
7299   m = XNEW (struct tree_map);
7300   m->hash = DECL_UID (decl);
7301   m->base.from = decl;
7302   m->to = create_artificial_label (UNKNOWN_LOCATION);
7303   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7304   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7305     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7306 
7307   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7308   gcc_assert (*slot == NULL);
7309 
7310   *slot = m;
7311 
7312   return m->to;
7313 }
7314 
7315 /* Tree walker to replace the decls used inside value expressions by
7316    duplicates.  */
7317 
7318 static tree
replace_block_vars_by_duplicates_1(tree * tp,int * walk_subtrees,void * data)7319 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7320 {
7321   struct replace_decls_d *rd = (struct replace_decls_d *)data;
7322 
7323   switch (TREE_CODE (*tp))
7324     {
7325     case VAR_DECL:
7326     case PARM_DECL:
7327     case RESULT_DECL:
7328       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7329       break;
7330     default:
7331       break;
7332     }
7333 
7334   if (IS_TYPE_OR_DECL_P (*tp))
7335     *walk_subtrees = false;
7336 
7337   return NULL;
7338 }
7339 
7340 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7341    subblocks.  */
7342 
7343 static void
replace_block_vars_by_duplicates(tree block,hash_map<tree,tree> * vars_map,tree to_context)7344 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7345 				  tree to_context)
7346 {
7347   tree *tp, t;
7348 
7349   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7350     {
7351       t = *tp;
7352       if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7353 	continue;
7354       replace_by_duplicate_decl (&t, vars_map, to_context);
7355       if (t != *tp)
7356 	{
7357 	  if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7358 	    {
7359 	      tree x = DECL_VALUE_EXPR (*tp);
7360 	      struct replace_decls_d rd = { vars_map, to_context };
7361 	      unshare_expr (x);
7362 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7363 	      SET_DECL_VALUE_EXPR (t, x);
7364 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
7365 	    }
7366 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
7367 	  *tp = t;
7368 	}
7369     }
7370 
7371   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7372     replace_block_vars_by_duplicates (block, vars_map, to_context);
7373 }
7374 
7375 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7376    from FN1 to FN2.  */
7377 
7378 static void
fixup_loop_arrays_after_move(struct function * fn1,struct function * fn2,class loop * loop)7379 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7380 			      class loop *loop)
7381 {
7382   /* Discard it from the old loop array.  */
7383   (*get_loops (fn1))[loop->num] = NULL;
7384 
7385   /* Place it in the new loop array, assigning it a new number.  */
7386   loop->num = number_of_loops (fn2);
7387   vec_safe_push (loops_for_fn (fn2)->larray, loop);
7388 
7389   /* Recurse to children.  */
7390   for (loop = loop->inner; loop; loop = loop->next)
7391     fixup_loop_arrays_after_move (fn1, fn2, loop);
7392 }
7393 
7394 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7395    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
7396 
7397 DEBUG_FUNCTION void
verify_sese(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)7398 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7399 {
7400   basic_block bb;
7401   edge_iterator ei;
7402   edge e;
7403   bitmap bbs = BITMAP_ALLOC (NULL);
7404   int i;
7405 
7406   gcc_assert (entry != NULL);
7407   gcc_assert (entry != exit);
7408   gcc_assert (bbs_p != NULL);
7409 
7410   gcc_assert (bbs_p->length () > 0);
7411 
7412   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7413     bitmap_set_bit (bbs, bb->index);
7414 
7415   gcc_assert (bitmap_bit_p (bbs, entry->index));
7416   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7417 
7418   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7419     {
7420       if (bb == entry)
7421 	{
7422 	  gcc_assert (single_pred_p (entry));
7423 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7424 	}
7425       else
7426 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7427 	  {
7428 	    e = ei_edge (ei);
7429 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7430 	  }
7431 
7432       if (bb == exit)
7433 	{
7434 	  gcc_assert (single_succ_p (exit));
7435 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7436 	}
7437       else
7438 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7439 	  {
7440 	    e = ei_edge (ei);
7441 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7442 	  }
7443     }
7444 
7445   BITMAP_FREE (bbs);
7446 }
7447 
7448 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7449 
7450 bool
gather_ssa_name_hash_map_from(tree const & from,tree const &,void * data)7451 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7452 {
7453   bitmap release_names = (bitmap)data;
7454 
7455   if (TREE_CODE (from) != SSA_NAME)
7456     return true;
7457 
7458   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7459   return true;
7460 }
7461 
7462 /* Return LOOP_DIST_ALIAS call if present in BB.  */
7463 
7464 static gimple *
find_loop_dist_alias(basic_block bb)7465 find_loop_dist_alias (basic_block bb)
7466 {
7467   gimple *g = last_stmt (bb);
7468   if (g == NULL || gimple_code (g) != GIMPLE_COND)
7469     return NULL;
7470 
7471   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7472   gsi_prev (&gsi);
7473   if (gsi_end_p (gsi))
7474     return NULL;
7475 
7476   g = gsi_stmt (gsi);
7477   if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7478     return g;
7479   return NULL;
7480 }
7481 
7482 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7483    to VALUE and update any immediate uses of it's LHS.  */
7484 
7485 void
fold_loop_internal_call(gimple * g,tree value)7486 fold_loop_internal_call (gimple *g, tree value)
7487 {
7488   tree lhs = gimple_call_lhs (g);
7489   use_operand_p use_p;
7490   imm_use_iterator iter;
7491   gimple *use_stmt;
7492   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7493 
7494   update_call_from_tree (&gsi, value);
7495   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7496     {
7497       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7498 	SET_USE (use_p, value);
7499       update_stmt (use_stmt);
7500     }
7501 }
7502 
7503 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7504    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7505    single basic block in the original CFG and the new basic block is
7506    returned.  DEST_CFUN must not have a CFG yet.
7507 
7508    Note that the region need not be a pure SESE region.  Blocks inside
7509    the region may contain calls to abort/exit.  The only restriction
7510    is that ENTRY_BB should be the only entry point and it must
7511    dominate EXIT_BB.
7512 
7513    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7514    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7515    to the new function.
7516 
7517    All local variables referenced in the region are assumed to be in
7518    the corresponding BLOCK_VARS and unexpanded variable lists
7519    associated with DEST_CFUN.
7520 
7521    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7522    reimplement move_sese_region_to_fn by duplicating the region rather than
7523    moving it.  */
7524 
7525 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb,tree orig_block)7526 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7527 		        basic_block exit_bb, tree orig_block)
7528 {
7529   vec<basic_block> bbs, dom_bbs;
7530   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7531   basic_block after, bb, *entry_pred, *exit_succ, abb;
7532   struct function *saved_cfun = cfun;
7533   int *entry_flag, *exit_flag;
7534   profile_probability *entry_prob, *exit_prob;
7535   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7536   edge e;
7537   edge_iterator ei;
7538   htab_t new_label_map;
7539   hash_map<void *, void *> *eh_map;
7540   class loop *loop = entry_bb->loop_father;
7541   class loop *loop0 = get_loop (saved_cfun, 0);
7542   struct move_stmt_d d;
7543 
7544   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7545      region.  */
7546   gcc_assert (entry_bb != exit_bb
7547               && (!exit_bb
7548 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7549 
7550   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7551      because it won't be added by dfs_enumerate_from.  */
7552   bbs.create (0);
7553   bbs.safe_push (entry_bb);
7554   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7555 
7556   if (flag_checking)
7557     verify_sese (entry_bb, exit_bb, &bbs);
7558 
7559   /* The blocks that used to be dominated by something in BBS will now be
7560      dominated by the new block.  */
7561   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7562 				     bbs.address (),
7563 				     bbs.length ());
7564 
7565   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7566      the predecessor edges to ENTRY_BB and the successor edges to
7567      EXIT_BB so that we can re-attach them to the new basic block that
7568      will replace the region.  */
7569   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7570   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7571   entry_flag = XNEWVEC (int, num_entry_edges);
7572   entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7573   i = 0;
7574   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7575     {
7576       entry_prob[i] = e->probability;
7577       entry_flag[i] = e->flags;
7578       entry_pred[i++] = e->src;
7579       remove_edge (e);
7580     }
7581 
7582   if (exit_bb)
7583     {
7584       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7585       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7586       exit_flag = XNEWVEC (int, num_exit_edges);
7587       exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7588       i = 0;
7589       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7590 	{
7591 	  exit_prob[i] = e->probability;
7592 	  exit_flag[i] = e->flags;
7593 	  exit_succ[i++] = e->dest;
7594 	  remove_edge (e);
7595 	}
7596     }
7597   else
7598     {
7599       num_exit_edges = 0;
7600       exit_succ = NULL;
7601       exit_flag = NULL;
7602       exit_prob = NULL;
7603     }
7604 
7605   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7606   gcc_assert (dest_cfun->cfg == NULL);
7607   push_cfun (dest_cfun);
7608 
7609   init_empty_tree_cfg ();
7610 
7611   /* Initialize EH information for the new function.  */
7612   eh_map = NULL;
7613   new_label_map = NULL;
7614   if (saved_cfun->eh)
7615     {
7616       eh_region region = NULL;
7617       bool all = false;
7618 
7619       FOR_EACH_VEC_ELT (bbs, i, bb)
7620 	{
7621 	  region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7622 	  if (all)
7623 	    break;
7624 	}
7625 
7626       init_eh_for_function ();
7627       if (region != NULL || all)
7628 	{
7629 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7630 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7631 					 new_label_mapper, new_label_map);
7632 	}
7633     }
7634 
7635   /* Initialize an empty loop tree.  */
7636   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7637   init_loops_structure (dest_cfun, loops, 1);
7638   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7639   set_loops_for_fn (dest_cfun, loops);
7640 
7641   vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7642 
7643   /* Move the outlined loop tree part.  */
7644   num_nodes = bbs.length ();
7645   FOR_EACH_VEC_ELT (bbs, i, bb)
7646     {
7647       if (bb->loop_father->header == bb)
7648 	{
7649 	  class loop *this_loop = bb->loop_father;
7650 	  class loop *outer = loop_outer (this_loop);
7651 	  if (outer == loop
7652 	      /* If the SESE region contains some bbs ending with
7653 		 a noreturn call, those are considered to belong
7654 		 to the outermost loop in saved_cfun, rather than
7655 		 the entry_bb's loop_father.  */
7656 	      || outer == loop0)
7657 	    {
7658 	      if (outer != loop)
7659 		num_nodes -= this_loop->num_nodes;
7660 	      flow_loop_tree_node_remove (bb->loop_father);
7661 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7662 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7663 	    }
7664 	}
7665       else if (bb->loop_father == loop0 && loop0 != loop)
7666 	num_nodes--;
7667 
7668       /* Remove loop exits from the outlined region.  */
7669       if (loops_for_fn (saved_cfun)->exits)
7670 	FOR_EACH_EDGE (e, ei, bb->succs)
7671 	  {
7672 	    struct loops *l = loops_for_fn (saved_cfun);
7673 	    loop_exit **slot
7674 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7675 					       NO_INSERT);
7676 	    if (slot)
7677 	      l->exits->clear_slot (slot);
7678 	  }
7679     }
7680 
7681   /* Adjust the number of blocks in the tree root of the outlined part.  */
7682   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7683 
7684   /* Setup a mapping to be used by move_block_to_fn.  */
7685   loop->aux = current_loops->tree_root;
7686   loop0->aux = current_loops->tree_root;
7687 
7688   /* Fix up orig_loop_num.  If the block referenced in it has been moved
7689      to dest_cfun, update orig_loop_num field, otherwise clear it.  */
7690   class loop *dloop;
7691   signed char *moved_orig_loop_num = NULL;
7692   FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7693     if (dloop->orig_loop_num)
7694       {
7695 	if (moved_orig_loop_num == NULL)
7696 	  moved_orig_loop_num
7697 	    = XCNEWVEC (signed char, vec_safe_length (larray));
7698 	if ((*larray)[dloop->orig_loop_num] != NULL
7699 	    && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7700 	  {
7701 	    if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7702 		&& moved_orig_loop_num[dloop->orig_loop_num] < 2)
7703 	      moved_orig_loop_num[dloop->orig_loop_num]++;
7704 	    dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7705 	  }
7706 	else
7707 	  {
7708 	    moved_orig_loop_num[dloop->orig_loop_num] = -1;
7709 	    dloop->orig_loop_num = 0;
7710 	  }
7711       }
7712   pop_cfun ();
7713 
7714   if (moved_orig_loop_num)
7715     {
7716       FOR_EACH_VEC_ELT (bbs, i, bb)
7717 	{
7718 	  gimple *g = find_loop_dist_alias (bb);
7719 	  if (g == NULL)
7720 	    continue;
7721 
7722 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7723 	  gcc_assert (orig_loop_num
7724 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7725 	  if (moved_orig_loop_num[orig_loop_num] == 2)
7726 	    {
7727 	      /* If we have moved both loops with this orig_loop_num into
7728 		 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7729 		 too, update the first argument.  */
7730 	      gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7731 			  && (get_loop (saved_cfun, dloop->orig_loop_num)
7732 			      == NULL));
7733 	      tree t = build_int_cst (integer_type_node,
7734 				      (*larray)[dloop->orig_loop_num]->num);
7735 	      gimple_call_set_arg (g, 0, t);
7736 	      update_stmt (g);
7737 	      /* Make sure the following loop will not update it.  */
7738 	      moved_orig_loop_num[orig_loop_num] = 0;
7739 	    }
7740 	  else
7741 	    /* Otherwise at least one of the loops stayed in saved_cfun.
7742 	       Remove the LOOP_DIST_ALIAS call.  */
7743 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7744 	}
7745       FOR_EACH_BB_FN (bb, saved_cfun)
7746 	{
7747 	  gimple *g = find_loop_dist_alias (bb);
7748 	  if (g == NULL)
7749 	    continue;
7750 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7751 	  gcc_assert (orig_loop_num
7752 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7753 	  if (moved_orig_loop_num[orig_loop_num])
7754 	    /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7755 	       of the corresponding loops was moved, remove it.  */
7756 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7757 	}
7758       XDELETEVEC (moved_orig_loop_num);
7759     }
7760   ggc_free (larray);
7761 
7762   /* Move blocks from BBS into DEST_CFUN.  */
7763   gcc_assert (bbs.length () >= 2);
7764   after = dest_cfun->cfg->x_entry_block_ptr;
7765   hash_map<tree, tree> vars_map;
7766 
7767   memset (&d, 0, sizeof (d));
7768   d.orig_block = orig_block;
7769   d.new_block = DECL_INITIAL (dest_cfun->decl);
7770   d.from_context = cfun->decl;
7771   d.to_context = dest_cfun->decl;
7772   d.vars_map = &vars_map;
7773   d.new_label_map = new_label_map;
7774   d.eh_map = eh_map;
7775   d.remap_decls_p = true;
7776 
7777   if (gimple_in_ssa_p (cfun))
7778     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7779       {
7780 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7781 	set_ssa_default_def (dest_cfun, arg, narg);
7782 	vars_map.put (arg, narg);
7783       }
7784 
7785   FOR_EACH_VEC_ELT (bbs, i, bb)
7786     {
7787       /* No need to update edge counts on the last block.  It has
7788 	 already been updated earlier when we detached the region from
7789 	 the original CFG.  */
7790       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7791       after = bb;
7792     }
7793 
7794   /* Adjust the maximum clique used.  */
7795   dest_cfun->last_clique = saved_cfun->last_clique;
7796 
7797   loop->aux = NULL;
7798   loop0->aux = NULL;
7799   /* Loop sizes are no longer correct, fix them up.  */
7800   loop->num_nodes -= num_nodes;
7801   for (class loop *outer = loop_outer (loop);
7802        outer; outer = loop_outer (outer))
7803     outer->num_nodes -= num_nodes;
7804   loop0->num_nodes -= bbs.length () - num_nodes;
7805 
7806   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7807     {
7808       class loop *aloop;
7809       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7810 	if (aloop != NULL)
7811 	  {
7812 	    if (aloop->simduid)
7813 	      {
7814 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7815 					   d.to_context);
7816 		dest_cfun->has_simduid_loops = true;
7817 	      }
7818 	    if (aloop->force_vectorize)
7819 	      dest_cfun->has_force_vectorize_loops = true;
7820 	  }
7821     }
7822 
7823   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7824   if (orig_block)
7825     {
7826       tree block;
7827       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7828 		  == NULL_TREE);
7829       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7830 	= BLOCK_SUBBLOCKS (orig_block);
7831       for (block = BLOCK_SUBBLOCKS (orig_block);
7832 	   block; block = BLOCK_CHAIN (block))
7833 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7834       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7835     }
7836 
7837   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7838 				    &vars_map, dest_cfun->decl);
7839 
7840   if (new_label_map)
7841     htab_delete (new_label_map);
7842   if (eh_map)
7843     delete eh_map;
7844 
7845   if (gimple_in_ssa_p (cfun))
7846     {
7847       /* We need to release ssa-names in a defined order, so first find them,
7848 	 and then iterate in ascending version order.  */
7849       bitmap release_names = BITMAP_ALLOC (NULL);
7850       vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7851       bitmap_iterator bi;
7852       unsigned i;
7853       EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7854 	release_ssa_name (ssa_name (i));
7855       BITMAP_FREE (release_names);
7856     }
7857 
7858   /* Rewire the entry and exit blocks.  The successor to the entry
7859      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7860      the child function.  Similarly, the predecessor of DEST_FN's
7861      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7862      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7863      various CFG manipulation function get to the right CFG.
7864 
7865      FIXME, this is silly.  The CFG ought to become a parameter to
7866      these helpers.  */
7867   push_cfun (dest_cfun);
7868   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7869   make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7870   if (exit_bb)
7871     {
7872       make_single_succ_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7873       EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7874     }
7875   else
7876     EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7877   pop_cfun ();
7878 
7879   /* Back in the original function, the SESE region has disappeared,
7880      create a new basic block in its place.  */
7881   bb = create_empty_bb (entry_pred[0]);
7882   if (current_loops)
7883     add_bb_to_loop (bb, loop);
7884   for (i = 0; i < num_entry_edges; i++)
7885     {
7886       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7887       e->probability = entry_prob[i];
7888     }
7889 
7890   for (i = 0; i < num_exit_edges; i++)
7891     {
7892       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7893       e->probability = exit_prob[i];
7894     }
7895 
7896   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7897   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7898     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7899   dom_bbs.release ();
7900 
7901   if (exit_bb)
7902     {
7903       free (exit_prob);
7904       free (exit_flag);
7905       free (exit_succ);
7906     }
7907   free (entry_prob);
7908   free (entry_flag);
7909   free (entry_pred);
7910   bbs.release ();
7911 
7912   return bb;
7913 }
7914 
7915 /* Dump default def DEF to file FILE using FLAGS and indentation
7916    SPC.  */
7917 
7918 static void
dump_default_def(FILE * file,tree def,int spc,dump_flags_t flags)7919 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7920 {
7921   for (int i = 0; i < spc; ++i)
7922     fprintf (file, " ");
7923   dump_ssaname_info_to_file (file, def, spc);
7924 
7925   print_generic_expr (file, TREE_TYPE (def), flags);
7926   fprintf (file, " ");
7927   print_generic_expr (file, def, flags);
7928   fprintf (file, " = ");
7929   print_generic_expr (file, SSA_NAME_VAR (def), flags);
7930   fprintf (file, ";\n");
7931 }
7932 
7933 /* Print no_sanitize attribute to FILE for a given attribute VALUE.  */
7934 
7935 static void
print_no_sanitize_attr_value(FILE * file,tree value)7936 print_no_sanitize_attr_value (FILE *file, tree value)
7937 {
7938   unsigned int flags = tree_to_uhwi (value);
7939   bool first = true;
7940   for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7941     {
7942       if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7943 	{
7944 	  if (!first)
7945 	    fprintf (file, " | ");
7946 	  fprintf (file, "%s", sanitizer_opts[i].name);
7947 	  first = false;
7948 	}
7949     }
7950 }
7951 
7952 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7953    */
7954 
7955 void
dump_function_to_file(tree fndecl,FILE * file,dump_flags_t flags)7956 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7957 {
7958   tree arg, var, old_current_fndecl = current_function_decl;
7959   struct function *dsf;
7960   bool ignore_topmost_bind = false, any_var = false;
7961   basic_block bb;
7962   tree chain;
7963   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7964 		  && decl_is_tm_clone (fndecl));
7965   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7966 
7967   if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7968     {
7969       fprintf (file, "__attribute__((");
7970 
7971       bool first = true;
7972       tree chain;
7973       for (chain = DECL_ATTRIBUTES (fndecl); chain;
7974 	   first = false, chain = TREE_CHAIN (chain))
7975 	{
7976 	  if (!first)
7977 	    fprintf (file, ", ");
7978 
7979 	  tree name = get_attribute_name (chain);
7980 	  print_generic_expr (file, name, dump_flags);
7981 	  if (TREE_VALUE (chain) != NULL_TREE)
7982 	    {
7983 	      fprintf (file, " (");
7984 
7985 	      if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7986 		print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7987 	      else
7988 		print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7989 	      fprintf (file, ")");
7990 	    }
7991 	}
7992 
7993       fprintf (file, "))\n");
7994     }
7995 
7996   current_function_decl = fndecl;
7997   if (flags & TDF_GIMPLE)
7998     {
7999       static bool hotness_bb_param_printed = false;
8000       if (profile_info != NULL
8001 	  && !hotness_bb_param_printed)
8002 	{
8003 	  hotness_bb_param_printed = true;
8004 	  fprintf (file,
8005 		   "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8006 		   " */\n", get_hot_bb_threshold ());
8007 	}
8008 
8009       print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8010 			  dump_flags | TDF_SLIM);
8011       fprintf (file, " __GIMPLE (%s",
8012 	       (fun->curr_properties & PROP_ssa) ? "ssa"
8013 	       : (fun->curr_properties & PROP_cfg) ? "cfg"
8014 	       : "");
8015 
8016       if (cfun->cfg)
8017 	{
8018 	  basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8019 	  if (bb->count.initialized_p ())
8020 	    fprintf (file, ",%s(%d)",
8021 		     profile_quality_as_string (bb->count.quality ()),
8022 		     bb->count.value ());
8023 	  fprintf (file, ")\n%s (", function_name (fun));
8024 	}
8025     }
8026   else
8027     fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8028 
8029   arg = DECL_ARGUMENTS (fndecl);
8030   while (arg)
8031     {
8032       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8033       fprintf (file, " ");
8034       print_generic_expr (file, arg, dump_flags);
8035       if (DECL_CHAIN (arg))
8036 	fprintf (file, ", ");
8037       arg = DECL_CHAIN (arg);
8038     }
8039   fprintf (file, ")\n");
8040 
8041   dsf = DECL_STRUCT_FUNCTION (fndecl);
8042   if (dsf && (flags & TDF_EH))
8043     dump_eh_tree (file, dsf);
8044 
8045   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8046     {
8047       dump_node (fndecl, TDF_SLIM | flags, file);
8048       current_function_decl = old_current_fndecl;
8049       return;
8050     }
8051 
8052   /* When GIMPLE is lowered, the variables are no longer available in
8053      BIND_EXPRs, so display them separately.  */
8054   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8055     {
8056       unsigned ix;
8057       ignore_topmost_bind = true;
8058 
8059       fprintf (file, "{\n");
8060       if (gimple_in_ssa_p (fun)
8061 	  && (flags & TDF_ALIAS))
8062 	{
8063 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8064 	       arg = DECL_CHAIN (arg))
8065 	    {
8066 	      tree def = ssa_default_def (fun, arg);
8067 	      if (def)
8068 		dump_default_def (file, def, 2, flags);
8069 	    }
8070 
8071 	  tree res = DECL_RESULT (fun->decl);
8072 	  if (res != NULL_TREE
8073 	      && DECL_BY_REFERENCE (res))
8074 	    {
8075 	      tree def = ssa_default_def (fun, res);
8076 	      if (def)
8077 		dump_default_def (file, def, 2, flags);
8078 	    }
8079 
8080 	  tree static_chain = fun->static_chain_decl;
8081 	  if (static_chain != NULL_TREE)
8082 	    {
8083 	      tree def = ssa_default_def (fun, static_chain);
8084 	      if (def)
8085 		dump_default_def (file, def, 2, flags);
8086 	    }
8087 	}
8088 
8089       if (!vec_safe_is_empty (fun->local_decls))
8090 	FOR_EACH_LOCAL_DECL (fun, ix, var)
8091 	  {
8092 	    print_generic_decl (file, var, flags);
8093 	    fprintf (file, "\n");
8094 
8095 	    any_var = true;
8096 	  }
8097 
8098       tree name;
8099 
8100       if (gimple_in_ssa_p (cfun))
8101 	FOR_EACH_SSA_NAME (ix, name, cfun)
8102 	  {
8103 	    if (!SSA_NAME_VAR (name))
8104 	      {
8105 		fprintf (file, "  ");
8106 		print_generic_expr (file, TREE_TYPE (name), flags);
8107 		fprintf (file, " ");
8108 		print_generic_expr (file, name, flags);
8109 		fprintf (file, ";\n");
8110 
8111 		any_var = true;
8112 	      }
8113 	  }
8114     }
8115 
8116   if (fun && fun->decl == fndecl
8117       && fun->cfg
8118       && basic_block_info_for_fn (fun))
8119     {
8120       /* If the CFG has been built, emit a CFG-based dump.  */
8121       if (!ignore_topmost_bind)
8122 	fprintf (file, "{\n");
8123 
8124       if (any_var && n_basic_blocks_for_fn (fun))
8125 	fprintf (file, "\n");
8126 
8127       FOR_EACH_BB_FN (bb, fun)
8128 	dump_bb (file, bb, 2, flags);
8129 
8130       fprintf (file, "}\n");
8131     }
8132   else if (fun->curr_properties & PROP_gimple_any)
8133     {
8134       /* The function is now in GIMPLE form but the CFG has not been
8135 	 built yet.  Emit the single sequence of GIMPLE statements
8136 	 that make up its body.  */
8137       gimple_seq body = gimple_body (fndecl);
8138 
8139       if (gimple_seq_first_stmt (body)
8140 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8141 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8142 	print_gimple_seq (file, body, 0, flags);
8143       else
8144 	{
8145 	  if (!ignore_topmost_bind)
8146 	    fprintf (file, "{\n");
8147 
8148 	  if (any_var)
8149 	    fprintf (file, "\n");
8150 
8151 	  print_gimple_seq (file, body, 2, flags);
8152 	  fprintf (file, "}\n");
8153 	}
8154     }
8155   else
8156     {
8157       int indent;
8158 
8159       /* Make a tree based dump.  */
8160       chain = DECL_SAVED_TREE (fndecl);
8161       if (chain && TREE_CODE (chain) == BIND_EXPR)
8162 	{
8163 	  if (ignore_topmost_bind)
8164 	    {
8165 	      chain = BIND_EXPR_BODY (chain);
8166 	      indent = 2;
8167 	    }
8168 	  else
8169 	    indent = 0;
8170 	}
8171       else
8172 	{
8173 	  if (!ignore_topmost_bind)
8174 	    {
8175 	      fprintf (file, "{\n");
8176 	      /* No topmost bind, pretend it's ignored for later.  */
8177 	      ignore_topmost_bind = true;
8178 	    }
8179 	  indent = 2;
8180 	}
8181 
8182       if (any_var)
8183 	fprintf (file, "\n");
8184 
8185       print_generic_stmt_indented (file, chain, flags, indent);
8186       if (ignore_topmost_bind)
8187 	fprintf (file, "}\n");
8188     }
8189 
8190   if (flags & TDF_ENUMERATE_LOCALS)
8191     dump_enumerated_decls (file, flags);
8192   fprintf (file, "\n\n");
8193 
8194   current_function_decl = old_current_fndecl;
8195 }
8196 
8197 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
8198 
8199 DEBUG_FUNCTION void
debug_function(tree fn,dump_flags_t flags)8200 debug_function (tree fn, dump_flags_t flags)
8201 {
8202   dump_function_to_file (fn, stderr, flags);
8203 }
8204 
8205 
8206 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
8207 
8208 static void
print_pred_bbs(FILE * file,basic_block bb)8209 print_pred_bbs (FILE *file, basic_block bb)
8210 {
8211   edge e;
8212   edge_iterator ei;
8213 
8214   FOR_EACH_EDGE (e, ei, bb->preds)
8215     fprintf (file, "bb_%d ", e->src->index);
8216 }
8217 
8218 
8219 /* Print on FILE the indexes for the successors of basic_block BB.  */
8220 
8221 static void
print_succ_bbs(FILE * file,basic_block bb)8222 print_succ_bbs (FILE *file, basic_block bb)
8223 {
8224   edge e;
8225   edge_iterator ei;
8226 
8227   FOR_EACH_EDGE (e, ei, bb->succs)
8228     fprintf (file, "bb_%d ", e->dest->index);
8229 }
8230 
8231 /* Print to FILE the basic block BB following the VERBOSITY level.  */
8232 
8233 void
print_loops_bb(FILE * file,basic_block bb,int indent,int verbosity)8234 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8235 {
8236   char *s_indent = (char *) alloca ((size_t) indent + 1);
8237   memset ((void *) s_indent, ' ', (size_t) indent);
8238   s_indent[indent] = '\0';
8239 
8240   /* Print basic_block's header.  */
8241   if (verbosity >= 2)
8242     {
8243       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
8244       print_pred_bbs (file, bb);
8245       fprintf (file, "}, succs = {");
8246       print_succ_bbs (file, bb);
8247       fprintf (file, "})\n");
8248     }
8249 
8250   /* Print basic_block's body.  */
8251   if (verbosity >= 3)
8252     {
8253       fprintf (file, "%s  {\n", s_indent);
8254       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8255       fprintf (file, "%s  }\n", s_indent);
8256     }
8257 }
8258 
8259 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8260 
8261 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
8262    VERBOSITY level this outputs the contents of the loop, or just its
8263    structure.  */
8264 
8265 static void
print_loop(FILE * file,class loop * loop,int indent,int verbosity)8266 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8267 {
8268   char *s_indent;
8269   basic_block bb;
8270 
8271   if (loop == NULL)
8272     return;
8273 
8274   s_indent = (char *) alloca ((size_t) indent + 1);
8275   memset ((void *) s_indent, ' ', (size_t) indent);
8276   s_indent[indent] = '\0';
8277 
8278   /* Print loop's header.  */
8279   fprintf (file, "%sloop_%d (", s_indent, loop->num);
8280   if (loop->header)
8281     fprintf (file, "header = %d", loop->header->index);
8282   else
8283     {
8284       fprintf (file, "deleted)\n");
8285       return;
8286     }
8287   if (loop->latch)
8288     fprintf (file, ", latch = %d", loop->latch->index);
8289   else
8290     fprintf (file, ", multiple latches");
8291   fprintf (file, ", niter = ");
8292   print_generic_expr (file, loop->nb_iterations);
8293 
8294   if (loop->any_upper_bound)
8295     {
8296       fprintf (file, ", upper_bound = ");
8297       print_decu (loop->nb_iterations_upper_bound, file);
8298     }
8299   if (loop->any_likely_upper_bound)
8300     {
8301       fprintf (file, ", likely_upper_bound = ");
8302       print_decu (loop->nb_iterations_likely_upper_bound, file);
8303     }
8304 
8305   if (loop->any_estimate)
8306     {
8307       fprintf (file, ", estimate = ");
8308       print_decu (loop->nb_iterations_estimate, file);
8309     }
8310   if (loop->unroll)
8311     fprintf (file, ", unroll = %d", loop->unroll);
8312   fprintf (file, ")\n");
8313 
8314   /* Print loop's body.  */
8315   if (verbosity >= 1)
8316     {
8317       fprintf (file, "%s{\n", s_indent);
8318       FOR_EACH_BB_FN (bb, cfun)
8319 	if (bb->loop_father == loop)
8320 	  print_loops_bb (file, bb, indent, verbosity);
8321 
8322       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8323       fprintf (file, "%s}\n", s_indent);
8324     }
8325 }
8326 
8327 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8328    spaces.  Following VERBOSITY level this outputs the contents of the
8329    loop, or just its structure.  */
8330 
8331 static void
print_loop_and_siblings(FILE * file,class loop * loop,int indent,int verbosity)8332 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8333 			 int verbosity)
8334 {
8335   if (loop == NULL)
8336     return;
8337 
8338   print_loop (file, loop, indent, verbosity);
8339   print_loop_and_siblings (file, loop->next, indent, verbosity);
8340 }
8341 
8342 /* Follow a CFG edge from the entry point of the program, and on entry
8343    of a loop, pretty print the loop structure on FILE.  */
8344 
8345 void
print_loops(FILE * file,int verbosity)8346 print_loops (FILE *file, int verbosity)
8347 {
8348   basic_block bb;
8349 
8350   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8351   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8352   if (bb && bb->loop_father)
8353     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8354 }
8355 
8356 /* Dump a loop.  */
8357 
8358 DEBUG_FUNCTION void
debug(class loop & ref)8359 debug (class loop &ref)
8360 {
8361   print_loop (stderr, &ref, 0, /*verbosity*/0);
8362 }
8363 
8364 DEBUG_FUNCTION void
debug(class loop * ptr)8365 debug (class loop *ptr)
8366 {
8367   if (ptr)
8368     debug (*ptr);
8369   else
8370     fprintf (stderr, "<nil>\n");
8371 }
8372 
8373 /* Dump a loop verbosely.  */
8374 
8375 DEBUG_FUNCTION void
debug_verbose(class loop & ref)8376 debug_verbose (class loop &ref)
8377 {
8378   print_loop (stderr, &ref, 0, /*verbosity*/3);
8379 }
8380 
8381 DEBUG_FUNCTION void
debug_verbose(class loop * ptr)8382 debug_verbose (class loop *ptr)
8383 {
8384   if (ptr)
8385     debug (*ptr);
8386   else
8387     fprintf (stderr, "<nil>\n");
8388 }
8389 
8390 
8391 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
8392 
8393 DEBUG_FUNCTION void
debug_loops(int verbosity)8394 debug_loops (int verbosity)
8395 {
8396   print_loops (stderr, verbosity);
8397 }
8398 
8399 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
8400 
8401 DEBUG_FUNCTION void
debug_loop(class loop * loop,int verbosity)8402 debug_loop (class loop *loop, int verbosity)
8403 {
8404   print_loop (stderr, loop, 0, verbosity);
8405 }
8406 
8407 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8408    level.  */
8409 
8410 DEBUG_FUNCTION void
debug_loop_num(unsigned num,int verbosity)8411 debug_loop_num (unsigned num, int verbosity)
8412 {
8413   debug_loop (get_loop (cfun, num), verbosity);
8414 }
8415 
8416 /* Return true if BB ends with a call, possibly followed by some
8417    instructions that must stay with the call.  Return false,
8418    otherwise.  */
8419 
8420 static bool
gimple_block_ends_with_call_p(basic_block bb)8421 gimple_block_ends_with_call_p (basic_block bb)
8422 {
8423   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8424   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8425 }
8426 
8427 
8428 /* Return true if BB ends with a conditional branch.  Return false,
8429    otherwise.  */
8430 
8431 static bool
gimple_block_ends_with_condjump_p(const_basic_block bb)8432 gimple_block_ends_with_condjump_p (const_basic_block bb)
8433 {
8434   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8435   return (stmt && gimple_code (stmt) == GIMPLE_COND);
8436 }
8437 
8438 
8439 /* Return true if statement T may terminate execution of BB in ways not
8440    explicitly represtented in the CFG.  */
8441 
8442 bool
stmt_can_terminate_bb_p(gimple * t)8443 stmt_can_terminate_bb_p (gimple *t)
8444 {
8445   tree fndecl = NULL_TREE;
8446   int call_flags = 0;
8447 
8448   /* Eh exception not handled internally terminates execution of the whole
8449      function.  */
8450   if (stmt_can_throw_external (cfun, t))
8451     return true;
8452 
8453   /* NORETURN and LONGJMP calls already have an edge to exit.
8454      CONST and PURE calls do not need one.
8455      We don't currently check for CONST and PURE here, although
8456      it would be a good idea, because those attributes are
8457      figured out from the RTL in mark_constant_function, and
8458      the counter incrementation code from -fprofile-arcs
8459      leads to different results from -fbranch-probabilities.  */
8460   if (is_gimple_call (t))
8461     {
8462       fndecl = gimple_call_fndecl (t);
8463       call_flags = gimple_call_flags (t);
8464     }
8465 
8466   if (is_gimple_call (t)
8467       && fndecl
8468       && fndecl_built_in_p (fndecl)
8469       && (call_flags & ECF_NOTHROW)
8470       && !(call_flags & ECF_RETURNS_TWICE)
8471       /* fork() doesn't really return twice, but the effect of
8472          wrapping it in __gcov_fork() which calls __gcov_flush()
8473 	 and clears the counters before forking has the same
8474 	 effect as returning twice.  Force a fake edge.  */
8475       && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8476     return false;
8477 
8478   if (is_gimple_call (t))
8479     {
8480       edge_iterator ei;
8481       edge e;
8482       basic_block bb;
8483 
8484       if (call_flags & (ECF_PURE | ECF_CONST)
8485 	  && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8486 	return false;
8487 
8488       /* Function call may do longjmp, terminate program or do other things.
8489 	 Special case noreturn that have non-abnormal edges out as in this case
8490 	 the fact is sufficiently represented by lack of edges out of T.  */
8491       if (!(call_flags & ECF_NORETURN))
8492 	return true;
8493 
8494       bb = gimple_bb (t);
8495       FOR_EACH_EDGE (e, ei, bb->succs)
8496 	if ((e->flags & EDGE_FAKE) == 0)
8497 	  return true;
8498     }
8499 
8500   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8501     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8502       return true;
8503 
8504   return false;
8505 }
8506 
8507 
8508 /* Add fake edges to the function exit for any non constant and non
8509    noreturn calls (or noreturn calls with EH/abnormal edges),
8510    volatile inline assembly in the bitmap of blocks specified by BLOCKS
8511    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
8512    that were split.
8513 
8514    The goal is to expose cases in which entering a basic block does
8515    not imply that all subsequent instructions must be executed.  */
8516 
8517 static int
gimple_flow_call_edges_add(sbitmap blocks)8518 gimple_flow_call_edges_add (sbitmap blocks)
8519 {
8520   int i;
8521   int blocks_split = 0;
8522   int last_bb = last_basic_block_for_fn (cfun);
8523   bool check_last_block = false;
8524 
8525   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8526     return 0;
8527 
8528   if (! blocks)
8529     check_last_block = true;
8530   else
8531     check_last_block = bitmap_bit_p (blocks,
8532 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8533 
8534   /* In the last basic block, before epilogue generation, there will be
8535      a fallthru edge to EXIT.  Special care is required if the last insn
8536      of the last basic block is a call because make_edge folds duplicate
8537      edges, which would result in the fallthru edge also being marked
8538      fake, which would result in the fallthru edge being removed by
8539      remove_fake_edges, which would result in an invalid CFG.
8540 
8541      Moreover, we can't elide the outgoing fake edge, since the block
8542      profiler needs to take this into account in order to solve the minimal
8543      spanning tree in the case that the call doesn't return.
8544 
8545      Handle this by adding a dummy instruction in a new last basic block.  */
8546   if (check_last_block)
8547     {
8548       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8549       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8550       gimple *t = NULL;
8551 
8552       if (!gsi_end_p (gsi))
8553 	t = gsi_stmt (gsi);
8554 
8555       if (t && stmt_can_terminate_bb_p (t))
8556 	{
8557 	  edge e;
8558 
8559 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8560 	  if (e)
8561 	    {
8562 	      gsi_insert_on_edge (e, gimple_build_nop ());
8563 	      gsi_commit_edge_inserts ();
8564 	    }
8565 	}
8566     }
8567 
8568   /* Now add fake edges to the function exit for any non constant
8569      calls since there is no way that we can determine if they will
8570      return or not...  */
8571   for (i = 0; i < last_bb; i++)
8572     {
8573       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8574       gimple_stmt_iterator gsi;
8575       gimple *stmt, *last_stmt;
8576 
8577       if (!bb)
8578 	continue;
8579 
8580       if (blocks && !bitmap_bit_p (blocks, i))
8581 	continue;
8582 
8583       gsi = gsi_last_nondebug_bb (bb);
8584       if (!gsi_end_p (gsi))
8585 	{
8586 	  last_stmt = gsi_stmt (gsi);
8587 	  do
8588 	    {
8589 	      stmt = gsi_stmt (gsi);
8590 	      if (stmt_can_terminate_bb_p (stmt))
8591 		{
8592 		  edge e;
8593 
8594 		  /* The handling above of the final block before the
8595 		     epilogue should be enough to verify that there is
8596 		     no edge to the exit block in CFG already.
8597 		     Calling make_edge in such case would cause us to
8598 		     mark that edge as fake and remove it later.  */
8599 		  if (flag_checking && stmt == last_stmt)
8600 		    {
8601 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8602 		      gcc_assert (e == NULL);
8603 		    }
8604 
8605 		  /* Note that the following may create a new basic block
8606 		     and renumber the existing basic blocks.  */
8607 		  if (stmt != last_stmt)
8608 		    {
8609 		      e = split_block (bb, stmt);
8610 		      if (e)
8611 			blocks_split++;
8612 		    }
8613 		  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8614 		  e->probability = profile_probability::guessed_never ();
8615 		}
8616 	      gsi_prev (&gsi);
8617 	    }
8618 	  while (!gsi_end_p (gsi));
8619 	}
8620     }
8621 
8622   if (blocks_split)
8623     checking_verify_flow_info ();
8624 
8625   return blocks_split;
8626 }
8627 
8628 /* Removes edge E and all the blocks dominated by it, and updates dominance
8629    information.  The IL in E->src needs to be updated separately.
8630    If dominance info is not available, only the edge E is removed.*/
8631 
8632 void
remove_edge_and_dominated_blocks(edge e)8633 remove_edge_and_dominated_blocks (edge e)
8634 {
8635   vec<basic_block> bbs_to_remove = vNULL;
8636   vec<basic_block> bbs_to_fix_dom = vNULL;
8637   edge f;
8638   edge_iterator ei;
8639   bool none_removed = false;
8640   unsigned i;
8641   basic_block bb, dbb;
8642   bitmap_iterator bi;
8643 
8644   /* If we are removing a path inside a non-root loop that may change
8645      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8646   if (current_loops
8647       && loop_outer (e->src->loop_father) != NULL
8648       && e->src->loop_father == e->dest->loop_father)
8649     loops_state_set (LOOPS_NEED_FIXUP);
8650 
8651   if (!dom_info_available_p (CDI_DOMINATORS))
8652     {
8653       remove_edge (e);
8654       return;
8655     }
8656 
8657   /* No updating is needed for edges to exit.  */
8658   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8659     {
8660       if (cfgcleanup_altered_bbs)
8661 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8662       remove_edge (e);
8663       return;
8664     }
8665 
8666   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8667      that is not dominated by E->dest, then this set is empty.  Otherwise,
8668      all the basic blocks dominated by E->dest are removed.
8669 
8670      Also, to DF_IDOM we store the immediate dominators of the blocks in
8671      the dominance frontier of E (i.e., of the successors of the
8672      removed blocks, if there are any, and of E->dest otherwise).  */
8673   FOR_EACH_EDGE (f, ei, e->dest->preds)
8674     {
8675       if (f == e)
8676 	continue;
8677 
8678       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8679 	{
8680 	  none_removed = true;
8681 	  break;
8682 	}
8683     }
8684 
8685   auto_bitmap df, df_idom;
8686   if (none_removed)
8687     bitmap_set_bit (df_idom,
8688 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8689   else
8690     {
8691       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8692       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8693 	{
8694 	  FOR_EACH_EDGE (f, ei, bb->succs)
8695 	    {
8696 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8697 		bitmap_set_bit (df, f->dest->index);
8698 	    }
8699 	}
8700       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8701 	bitmap_clear_bit (df, bb->index);
8702 
8703       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8704 	{
8705 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8706 	  bitmap_set_bit (df_idom,
8707 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8708 	}
8709     }
8710 
8711   if (cfgcleanup_altered_bbs)
8712     {
8713       /* Record the set of the altered basic blocks.  */
8714       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8715       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8716     }
8717 
8718   /* Remove E and the cancelled blocks.  */
8719   if (none_removed)
8720     remove_edge (e);
8721   else
8722     {
8723       /* Walk backwards so as to get a chance to substitute all
8724 	 released DEFs into debug stmts.  See
8725 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8726 	 details.  */
8727       for (i = bbs_to_remove.length (); i-- > 0; )
8728 	delete_basic_block (bbs_to_remove[i]);
8729     }
8730 
8731   /* Update the dominance information.  The immediate dominator may change only
8732      for blocks whose immediate dominator belongs to DF_IDOM:
8733 
8734      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8735      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8736      Z dominates X after the removal.  Before removal, there exists a path P
8737      from Y to X that avoids Z.  Let F be the last edge on P that is
8738      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8739      dominates W, and because of P, Z does not dominate W), and W belongs to
8740      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8741   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8742     {
8743       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8744       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8745 	   dbb;
8746 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8747 	bbs_to_fix_dom.safe_push (dbb);
8748     }
8749 
8750   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8751 
8752   bbs_to_remove.release ();
8753   bbs_to_fix_dom.release ();
8754 }
8755 
8756 /* Purge dead EH edges from basic block BB.  */
8757 
8758 bool
gimple_purge_dead_eh_edges(basic_block bb)8759 gimple_purge_dead_eh_edges (basic_block bb)
8760 {
8761   bool changed = false;
8762   edge e;
8763   edge_iterator ei;
8764   gimple *stmt = last_stmt (bb);
8765 
8766   if (stmt && stmt_can_throw_internal (cfun, stmt))
8767     return false;
8768 
8769   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8770     {
8771       if (e->flags & EDGE_EH)
8772 	{
8773 	  remove_edge_and_dominated_blocks (e);
8774 	  changed = true;
8775 	}
8776       else
8777 	ei_next (&ei);
8778     }
8779 
8780   return changed;
8781 }
8782 
8783 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8784 
8785 bool
gimple_purge_all_dead_eh_edges(const_bitmap blocks)8786 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8787 {
8788   bool changed = false;
8789   unsigned i;
8790   bitmap_iterator bi;
8791 
8792   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8793     {
8794       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8795 
8796       /* Earlier gimple_purge_dead_eh_edges could have removed
8797 	 this basic block already.  */
8798       gcc_assert (bb || changed);
8799       if (bb != NULL)
8800 	changed |= gimple_purge_dead_eh_edges (bb);
8801     }
8802 
8803   return changed;
8804 }
8805 
8806 /* Purge dead abnormal call edges from basic block BB.  */
8807 
8808 bool
gimple_purge_dead_abnormal_call_edges(basic_block bb)8809 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8810 {
8811   bool changed = false;
8812   edge e;
8813   edge_iterator ei;
8814   gimple *stmt = last_stmt (bb);
8815 
8816   if (!cfun->has_nonlocal_label
8817       && !cfun->calls_setjmp)
8818     return false;
8819 
8820   if (stmt && stmt_can_make_abnormal_goto (stmt))
8821     return false;
8822 
8823   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8824     {
8825       if (e->flags & EDGE_ABNORMAL)
8826 	{
8827 	  if (e->flags & EDGE_FALLTHRU)
8828 	    e->flags &= ~EDGE_ABNORMAL;
8829 	  else
8830 	    remove_edge_and_dominated_blocks (e);
8831 	  changed = true;
8832 	}
8833       else
8834 	ei_next (&ei);
8835     }
8836 
8837   return changed;
8838 }
8839 
8840 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8841 
8842 bool
gimple_purge_all_dead_abnormal_call_edges(const_bitmap blocks)8843 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8844 {
8845   bool changed = false;
8846   unsigned i;
8847   bitmap_iterator bi;
8848 
8849   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8850     {
8851       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8852 
8853       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8854 	 this basic block already.  */
8855       gcc_assert (bb || changed);
8856       if (bb != NULL)
8857 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8858     }
8859 
8860   return changed;
8861 }
8862 
8863 /* This function is called whenever a new edge is created or
8864    redirected.  */
8865 
8866 static void
gimple_execute_on_growing_pred(edge e)8867 gimple_execute_on_growing_pred (edge e)
8868 {
8869   basic_block bb = e->dest;
8870 
8871   if (!gimple_seq_empty_p (phi_nodes (bb)))
8872     reserve_phi_args_for_new_edge (bb);
8873 }
8874 
8875 /* This function is called immediately before edge E is removed from
8876    the edge vector E->dest->preds.  */
8877 
8878 static void
gimple_execute_on_shrinking_pred(edge e)8879 gimple_execute_on_shrinking_pred (edge e)
8880 {
8881   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8882     remove_phi_args (e);
8883 }
8884 
8885 /*---------------------------------------------------------------------------
8886   Helper functions for Loop versioning
8887   ---------------------------------------------------------------------------*/
8888 
8889 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8890    of 'first'. Both of them are dominated by 'new_head' basic block. When
8891    'new_head' was created by 'second's incoming edge it received phi arguments
8892    on the edge by split_edge(). Later, additional edge 'e' was created to
8893    connect 'new_head' and 'first'. Now this routine adds phi args on this
8894    additional edge 'e' that new_head to second edge received as part of edge
8895    splitting.  */
8896 
8897 static void
gimple_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)8898 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8899 				  basic_block new_head, edge e)
8900 {
8901   gphi *phi1, *phi2;
8902   gphi_iterator psi1, psi2;
8903   tree def;
8904   edge e2 = find_edge (new_head, second);
8905 
8906   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8907      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8908   gcc_assert (e2 != NULL);
8909 
8910   /* Browse all 'second' basic block phi nodes and add phi args to
8911      edge 'e' for 'first' head. PHI args are always in correct order.  */
8912 
8913   for (psi2 = gsi_start_phis (second),
8914        psi1 = gsi_start_phis (first);
8915        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8916        gsi_next (&psi2),  gsi_next (&psi1))
8917     {
8918       phi1 = psi1.phi ();
8919       phi2 = psi2.phi ();
8920       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8921       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8922     }
8923 }
8924 
8925 
8926 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8927    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8928    the destination of the ELSE part.  */
8929 
8930 static void
gimple_lv_add_condition_to_bb(basic_block first_head ATTRIBUTE_UNUSED,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * cond_e)8931 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8932 			       basic_block second_head ATTRIBUTE_UNUSED,
8933 			       basic_block cond_bb, void *cond_e)
8934 {
8935   gimple_stmt_iterator gsi;
8936   gimple *new_cond_expr;
8937   tree cond_expr = (tree) cond_e;
8938   edge e0;
8939 
8940   /* Build new conditional expr */
8941   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8942 					       NULL_TREE, NULL_TREE);
8943 
8944   /* Add new cond in cond_bb.  */
8945   gsi = gsi_last_bb (cond_bb);
8946   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8947 
8948   /* Adjust edges appropriately to connect new head with first head
8949      as well as second head.  */
8950   e0 = single_succ_edge (cond_bb);
8951   e0->flags &= ~EDGE_FALLTHRU;
8952   e0->flags |= EDGE_FALSE_VALUE;
8953 }
8954 
8955 
8956 /* Do book-keeping of basic block BB for the profile consistency checker.
8957    Store the counting in RECORD.  */
8958 static void
gimple_account_profile_record(basic_block bb,struct profile_record * record)8959 gimple_account_profile_record (basic_block bb,
8960 			       struct profile_record *record)
8961 {
8962   gimple_stmt_iterator i;
8963   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8964     {
8965       record->size
8966 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8967       if (bb->count.initialized_p ())
8968 	record->time
8969 	  += estimate_num_insns (gsi_stmt (i),
8970 				 &eni_time_weights) * bb->count.to_gcov_type ();
8971       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8972 	record->time
8973 	  += estimate_num_insns (gsi_stmt (i),
8974 				 &eni_time_weights) * bb->count.to_frequency (cfun);
8975     }
8976 }
8977 
8978 struct cfg_hooks gimple_cfg_hooks = {
8979   "gimple",
8980   gimple_verify_flow_info,
8981   gimple_dump_bb,		/* dump_bb  */
8982   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8983   create_bb,			/* create_basic_block  */
8984   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8985   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8986   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8987   remove_bb,			/* delete_basic_block  */
8988   gimple_split_block,		/* split_block  */
8989   gimple_move_block_after,	/* move_block_after  */
8990   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8991   gimple_merge_blocks,		/* merge_blocks  */
8992   gimple_predict_edge,		/* predict_edge  */
8993   gimple_predicted_by_p,	/* predicted_by_p  */
8994   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8995   gimple_duplicate_bb,		/* duplicate_block  */
8996   gimple_split_edge,		/* split_edge  */
8997   gimple_make_forwarder_block,	/* make_forward_block  */
8998   NULL,				/* tidy_fallthru_edge  */
8999   NULL,				/* force_nonfallthru */
9000   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9001   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9002   gimple_flow_call_edges_add,   /* flow_call_edges_add */
9003   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
9004   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9005   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9006   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9007   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9008   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9009   flush_pending_stmts, 		/* flush_pending_stmts */
9010   gimple_empty_block_p,           /* block_empty_p */
9011   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9012   gimple_account_profile_record,
9013 };
9014 
9015 
9016 /* Split all critical edges.  Split some extra (not necessarily critical) edges
9017    if FOR_EDGE_INSERTION_P is true.  */
9018 
9019 unsigned int
split_critical_edges(bool for_edge_insertion_p)9020 split_critical_edges (bool for_edge_insertion_p /* = false */)
9021 {
9022   basic_block bb;
9023   edge e;
9024   edge_iterator ei;
9025 
9026   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9027      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
9028      mappings around the calls to split_edge.  */
9029   start_recording_case_labels ();
9030   FOR_ALL_BB_FN (bb, cfun)
9031     {
9032       FOR_EACH_EDGE (e, ei, bb->succs)
9033         {
9034 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9035 	    split_edge (e);
9036 	  /* PRE inserts statements to edges and expects that
9037 	     since split_critical_edges was done beforehand, committing edge
9038 	     insertions will not split more edges.  In addition to critical
9039 	     edges we must split edges that have multiple successors and
9040 	     end by control flow statements, such as RESX.
9041 	     Go ahead and split them too.  This matches the logic in
9042 	     gimple_find_edge_insert_loc.  */
9043 	  else if (for_edge_insertion_p
9044 		   && (!single_pred_p (e->dest)
9045 		       || !gimple_seq_empty_p (phi_nodes (e->dest))
9046 		       || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9047 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9048 		   && !(e->flags & EDGE_ABNORMAL))
9049 	    {
9050 	      gimple_stmt_iterator gsi;
9051 
9052 	      gsi = gsi_last_bb (e->src);
9053 	      if (!gsi_end_p (gsi)
9054 		  && stmt_ends_bb_p (gsi_stmt (gsi))
9055 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9056 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
9057 						 BUILT_IN_RETURN)))
9058 		split_edge (e);
9059 	    }
9060 	}
9061     }
9062   end_recording_case_labels ();
9063   return 0;
9064 }
9065 
9066 namespace {
9067 
9068 const pass_data pass_data_split_crit_edges =
9069 {
9070   GIMPLE_PASS, /* type */
9071   "crited", /* name */
9072   OPTGROUP_NONE, /* optinfo_flags */
9073   TV_TREE_SPLIT_EDGES, /* tv_id */
9074   PROP_cfg, /* properties_required */
9075   PROP_no_crit_edges, /* properties_provided */
9076   0, /* properties_destroyed */
9077   0, /* todo_flags_start */
9078   0, /* todo_flags_finish */
9079 };
9080 
9081 class pass_split_crit_edges : public gimple_opt_pass
9082 {
9083 public:
pass_split_crit_edges(gcc::context * ctxt)9084   pass_split_crit_edges (gcc::context *ctxt)
9085     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9086   {}
9087 
9088   /* opt_pass methods: */
execute(function *)9089   virtual unsigned int execute (function *) { return split_critical_edges (); }
9090 
clone()9091   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9092 }; // class pass_split_crit_edges
9093 
9094 } // anon namespace
9095 
9096 gimple_opt_pass *
make_pass_split_crit_edges(gcc::context * ctxt)9097 make_pass_split_crit_edges (gcc::context *ctxt)
9098 {
9099   return new pass_split_crit_edges (ctxt);
9100 }
9101 
9102 
9103 /* Insert COND expression which is GIMPLE_COND after STMT
9104    in basic block BB with appropriate basic block split
9105    and creation of a new conditionally executed basic block.
9106    Update profile so the new bb is visited with probability PROB.
9107    Return created basic block.  */
9108 basic_block
insert_cond_bb(basic_block bb,gimple * stmt,gimple * cond,profile_probability prob)9109 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9110 	        profile_probability prob)
9111 {
9112   edge fall = split_block (bb, stmt);
9113   gimple_stmt_iterator iter = gsi_last_bb (bb);
9114   basic_block new_bb;
9115 
9116   /* Insert cond statement.  */
9117   gcc_assert (gimple_code (cond) == GIMPLE_COND);
9118   if (gsi_end_p (iter))
9119     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9120   else
9121     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9122 
9123   /* Create conditionally executed block.  */
9124   new_bb = create_empty_bb (bb);
9125   edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9126   e->probability = prob;
9127   new_bb->count = e->count ();
9128   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9129 
9130   /* Fix edge for split bb.  */
9131   fall->flags = EDGE_FALSE_VALUE;
9132   fall->probability -= e->probability;
9133 
9134   /* Update dominance info.  */
9135   if (dom_info_available_p (CDI_DOMINATORS))
9136     {
9137       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9138       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9139     }
9140 
9141   /* Update loop info.  */
9142   if (current_loops)
9143     add_bb_to_loop (new_bb, bb->loop_father);
9144 
9145   return new_bb;
9146 }
9147 
9148 /* Build a ternary operation and gimplify it.  Emit code before GSI.
9149    Return the gimple_val holding the result.  */
9150 
9151 tree
gimplify_build3(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b,tree c)9152 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9153 		 tree type, tree a, tree b, tree c)
9154 {
9155   tree ret;
9156   location_t loc = gimple_location (gsi_stmt (*gsi));
9157 
9158   ret = fold_build3_loc (loc, code, type, a, b, c);
9159   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9160                                    GSI_SAME_STMT);
9161 }
9162 
9163 /* Build a binary operation and gimplify it.  Emit code before GSI.
9164    Return the gimple_val holding the result.  */
9165 
9166 tree
gimplify_build2(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b)9167 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9168 		 tree type, tree a, tree b)
9169 {
9170   tree ret;
9171 
9172   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9173   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9174                                    GSI_SAME_STMT);
9175 }
9176 
9177 /* Build a unary operation and gimplify it.  Emit code before GSI.
9178    Return the gimple_val holding the result.  */
9179 
9180 tree
gimplify_build1(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a)9181 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9182 		 tree a)
9183 {
9184   tree ret;
9185 
9186   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9187   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9188                                    GSI_SAME_STMT);
9189 }
9190 
9191 
9192 
9193 /* Given a basic block B which ends with a conditional and has
9194    precisely two successors, determine which of the edges is taken if
9195    the conditional is true and which is taken if the conditional is
9196    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
9197 
9198 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)9199 extract_true_false_edges_from_block (basic_block b,
9200 				     edge *true_edge,
9201 				     edge *false_edge)
9202 {
9203   edge e = EDGE_SUCC (b, 0);
9204 
9205   if (e->flags & EDGE_TRUE_VALUE)
9206     {
9207       *true_edge = e;
9208       *false_edge = EDGE_SUCC (b, 1);
9209     }
9210   else
9211     {
9212       *false_edge = e;
9213       *true_edge = EDGE_SUCC (b, 1);
9214     }
9215 }
9216 
9217 
9218 /* From a controlling predicate in the immediate dominator DOM of
9219    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9220    predicate evaluates to true and false and store them to
9221    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9222    they are non-NULL.  Returns true if the edges can be determined,
9223    else return false.  */
9224 
9225 bool
extract_true_false_controlled_edges(basic_block dom,basic_block phiblock,edge * true_controlled_edge,edge * false_controlled_edge)9226 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9227 				     edge *true_controlled_edge,
9228 				     edge *false_controlled_edge)
9229 {
9230   basic_block bb = phiblock;
9231   edge true_edge, false_edge, tem;
9232   edge e0 = NULL, e1 = NULL;
9233 
9234   /* We have to verify that one edge into the PHI node is dominated
9235      by the true edge of the predicate block and the other edge
9236      dominated by the false edge.  This ensures that the PHI argument
9237      we are going to take is completely determined by the path we
9238      take from the predicate block.
9239      We can only use BB dominance checks below if the destination of
9240      the true/false edges are dominated by their edge, thus only
9241      have a single predecessor.  */
9242   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9243   tem = EDGE_PRED (bb, 0);
9244   if (tem == true_edge
9245       || (single_pred_p (true_edge->dest)
9246 	  && (tem->src == true_edge->dest
9247 	      || dominated_by_p (CDI_DOMINATORS,
9248 				 tem->src, true_edge->dest))))
9249     e0 = tem;
9250   else if (tem == false_edge
9251 	   || (single_pred_p (false_edge->dest)
9252 	       && (tem->src == false_edge->dest
9253 		   || dominated_by_p (CDI_DOMINATORS,
9254 				      tem->src, false_edge->dest))))
9255     e1 = tem;
9256   else
9257     return false;
9258   tem = EDGE_PRED (bb, 1);
9259   if (tem == true_edge
9260       || (single_pred_p (true_edge->dest)
9261 	  && (tem->src == true_edge->dest
9262 	      || dominated_by_p (CDI_DOMINATORS,
9263 				 tem->src, true_edge->dest))))
9264     e0 = tem;
9265   else if (tem == false_edge
9266 	   || (single_pred_p (false_edge->dest)
9267 	       && (tem->src == false_edge->dest
9268 		   || dominated_by_p (CDI_DOMINATORS,
9269 				      tem->src, false_edge->dest))))
9270     e1 = tem;
9271   else
9272     return false;
9273   if (!e0 || !e1)
9274     return false;
9275 
9276   if (true_controlled_edge)
9277     *true_controlled_edge = e0;
9278   if (false_controlled_edge)
9279     *false_controlled_edge = e1;
9280 
9281   return true;
9282 }
9283 
9284 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9285     range [low, high].  Place associated stmts before *GSI.  */
9286 
9287 void
generate_range_test(basic_block bb,tree index,tree low,tree high,tree * lhs,tree * rhs)9288 generate_range_test (basic_block bb, tree index, tree low, tree high,
9289 		     tree *lhs, tree *rhs)
9290 {
9291   tree type = TREE_TYPE (index);
9292   tree utype = range_check_type (type);
9293 
9294   low = fold_convert (utype, low);
9295   high = fold_convert (utype, high);
9296 
9297   gimple_seq seq = NULL;
9298   index = gimple_convert (&seq, utype, index);
9299   *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9300   *rhs = const_binop (MINUS_EXPR, utype, high, low);
9301 
9302   gimple_stmt_iterator gsi = gsi_last_bb (bb);
9303   gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9304 }
9305 
9306 /* Return the basic block that belongs to label numbered INDEX
9307    of a switch statement.  */
9308 
9309 basic_block
gimple_switch_label_bb(function * ifun,gswitch * gs,unsigned index)9310 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9311 {
9312   return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9313 }
9314 
9315 /* Return the default basic block of a switch statement.  */
9316 
9317 basic_block
gimple_switch_default_bb(function * ifun,gswitch * gs)9318 gimple_switch_default_bb (function *ifun, gswitch *gs)
9319 {
9320   return gimple_switch_label_bb (ifun, gs, 0);
9321 }
9322 
9323 /* Return the edge that belongs to label numbered INDEX
9324    of a switch statement.  */
9325 
9326 edge
gimple_switch_edge(function * ifun,gswitch * gs,unsigned index)9327 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9328 {
9329   return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9330 }
9331 
9332 /* Return the default edge of a switch statement.  */
9333 
9334 edge
gimple_switch_default_edge(function * ifun,gswitch * gs)9335 gimple_switch_default_edge (function *ifun, gswitch *gs)
9336 {
9337   return gimple_switch_edge (ifun, gs, 0);
9338 }
9339 
9340 
9341 /* Emit return warnings.  */
9342 
9343 namespace {
9344 
9345 const pass_data pass_data_warn_function_return =
9346 {
9347   GIMPLE_PASS, /* type */
9348   "*warn_function_return", /* name */
9349   OPTGROUP_NONE, /* optinfo_flags */
9350   TV_NONE, /* tv_id */
9351   PROP_cfg, /* properties_required */
9352   0, /* properties_provided */
9353   0, /* properties_destroyed */
9354   0, /* todo_flags_start */
9355   0, /* todo_flags_finish */
9356 };
9357 
9358 class pass_warn_function_return : public gimple_opt_pass
9359 {
9360 public:
pass_warn_function_return(gcc::context * ctxt)9361   pass_warn_function_return (gcc::context *ctxt)
9362     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9363   {}
9364 
9365   /* opt_pass methods: */
9366   virtual unsigned int execute (function *);
9367 
9368 }; // class pass_warn_function_return
9369 
9370 unsigned int
execute(function * fun)9371 pass_warn_function_return::execute (function *fun)
9372 {
9373   location_t location;
9374   gimple *last;
9375   edge e;
9376   edge_iterator ei;
9377 
9378   if (!targetm.warn_func_return (fun->decl))
9379     return 0;
9380 
9381   /* If we have a path to EXIT, then we do return.  */
9382   if (TREE_THIS_VOLATILE (fun->decl)
9383       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9384     {
9385       location = UNKNOWN_LOCATION;
9386       for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9387 	   (e = ei_safe_edge (ei)); )
9388 	{
9389 	  last = last_stmt (e->src);
9390 	  if ((gimple_code (last) == GIMPLE_RETURN
9391 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9392 	      && location == UNKNOWN_LOCATION
9393 	      && ((location = LOCATION_LOCUS (gimple_location (last)))
9394 		  != UNKNOWN_LOCATION)
9395 	      && !optimize)
9396 	    break;
9397 	  /* When optimizing, replace return stmts in noreturn functions
9398 	     with __builtin_unreachable () call.  */
9399 	  if (optimize && gimple_code (last) == GIMPLE_RETURN)
9400 	    {
9401 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9402 	      gimple *new_stmt = gimple_build_call (fndecl, 0);
9403 	      gimple_set_location (new_stmt, gimple_location (last));
9404 	      gimple_stmt_iterator gsi = gsi_for_stmt (last);
9405 	      gsi_replace (&gsi, new_stmt, true);
9406 	      remove_edge (e);
9407 	    }
9408 	  else
9409 	    ei_next (&ei);
9410 	}
9411       if (location == UNKNOWN_LOCATION)
9412 	location = cfun->function_end_locus;
9413       warning_at (location, 0, "%<noreturn%> function does return");
9414     }
9415 
9416   /* If we see "return;" in some basic block, then we do reach the end
9417      without returning a value.  */
9418   else if (warn_return_type > 0
9419 	   && !TREE_NO_WARNING (fun->decl)
9420 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9421     {
9422       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9423 	{
9424 	  gimple *last = last_stmt (e->src);
9425 	  greturn *return_stmt = dyn_cast <greturn *> (last);
9426 	  if (return_stmt
9427 	      && gimple_return_retval (return_stmt) == NULL
9428 	      && !gimple_no_warning_p (last))
9429 	    {
9430 	      location = gimple_location (last);
9431 	      if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9432 		location = fun->function_end_locus;
9433 	      if (warning_at (location, OPT_Wreturn_type,
9434 			      "control reaches end of non-void function"))
9435 		TREE_NO_WARNING (fun->decl) = 1;
9436 	      break;
9437 	    }
9438 	}
9439       /* The C++ FE turns fallthrough from the end of non-void function
9440 	 into __builtin_unreachable () call with BUILTINS_LOCATION.
9441 	 Recognize those too.  */
9442       basic_block bb;
9443       if (!TREE_NO_WARNING (fun->decl))
9444 	FOR_EACH_BB_FN (bb, fun)
9445 	  if (EDGE_COUNT (bb->succs) == 0)
9446 	    {
9447 	      gimple *last = last_stmt (bb);
9448 	      const enum built_in_function ubsan_missing_ret
9449 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9450 	      if (last
9451 		  && ((LOCATION_LOCUS (gimple_location (last))
9452 		       == BUILTINS_LOCATION
9453 		       && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9454 		      || gimple_call_builtin_p (last, ubsan_missing_ret)))
9455 		{
9456 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
9457 		  gsi_prev_nondebug (&gsi);
9458 		  gimple *prev = gsi_stmt (gsi);
9459 		  if (prev == NULL)
9460 		    location = UNKNOWN_LOCATION;
9461 		  else
9462 		    location = gimple_location (prev);
9463 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9464 		    location = fun->function_end_locus;
9465 		  if (warning_at (location, OPT_Wreturn_type,
9466 				  "control reaches end of non-void function"))
9467 		    TREE_NO_WARNING (fun->decl) = 1;
9468 		  break;
9469 		}
9470 	    }
9471     }
9472   return 0;
9473 }
9474 
9475 } // anon namespace
9476 
9477 gimple_opt_pass *
make_pass_warn_function_return(gcc::context * ctxt)9478 make_pass_warn_function_return (gcc::context *ctxt)
9479 {
9480   return new pass_warn_function_return (ctxt);
9481 }
9482 
9483 /* Walk a gimplified function and warn for functions whose return value is
9484    ignored and attribute((warn_unused_result)) is set.  This is done before
9485    inlining, so we don't have to worry about that.  */
9486 
9487 static void
do_warn_unused_result(gimple_seq seq)9488 do_warn_unused_result (gimple_seq seq)
9489 {
9490   tree fdecl, ftype;
9491   gimple_stmt_iterator i;
9492 
9493   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9494     {
9495       gimple *g = gsi_stmt (i);
9496 
9497       switch (gimple_code (g))
9498 	{
9499 	case GIMPLE_BIND:
9500 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9501 	  break;
9502 	case GIMPLE_TRY:
9503 	  do_warn_unused_result (gimple_try_eval (g));
9504 	  do_warn_unused_result (gimple_try_cleanup (g));
9505 	  break;
9506 	case GIMPLE_CATCH:
9507 	  do_warn_unused_result (gimple_catch_handler (
9508 				   as_a <gcatch *> (g)));
9509 	  break;
9510 	case GIMPLE_EH_FILTER:
9511 	  do_warn_unused_result (gimple_eh_filter_failure (g));
9512 	  break;
9513 
9514 	case GIMPLE_CALL:
9515 	  if (gimple_call_lhs (g))
9516 	    break;
9517 	  if (gimple_call_internal_p (g))
9518 	    break;
9519 
9520 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
9521 	     LHS.  All calls whose value is ignored should be
9522 	     represented like this.  Look for the attribute.  */
9523 	  fdecl = gimple_call_fndecl (g);
9524 	  ftype = gimple_call_fntype (g);
9525 
9526 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9527 	    {
9528 	      location_t loc = gimple_location (g);
9529 
9530 	      if (fdecl)
9531 		warning_at (loc, OPT_Wunused_result,
9532 			    "ignoring return value of %qD "
9533 			    "declared with attribute %<warn_unused_result%>",
9534 			    fdecl);
9535 	      else
9536 		warning_at (loc, OPT_Wunused_result,
9537 			    "ignoring return value of function "
9538 			    "declared with attribute %<warn_unused_result%>");
9539 	    }
9540 	  break;
9541 
9542 	default:
9543 	  /* Not a container, not a call, or a call whose value is used.  */
9544 	  break;
9545 	}
9546     }
9547 }
9548 
9549 namespace {
9550 
9551 const pass_data pass_data_warn_unused_result =
9552 {
9553   GIMPLE_PASS, /* type */
9554   "*warn_unused_result", /* name */
9555   OPTGROUP_NONE, /* optinfo_flags */
9556   TV_NONE, /* tv_id */
9557   PROP_gimple_any, /* properties_required */
9558   0, /* properties_provided */
9559   0, /* properties_destroyed */
9560   0, /* todo_flags_start */
9561   0, /* todo_flags_finish */
9562 };
9563 
9564 class pass_warn_unused_result : public gimple_opt_pass
9565 {
9566 public:
pass_warn_unused_result(gcc::context * ctxt)9567   pass_warn_unused_result (gcc::context *ctxt)
9568     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9569   {}
9570 
9571   /* opt_pass methods: */
gate(function *)9572   virtual bool gate (function *) { return flag_warn_unused_result; }
execute(function *)9573   virtual unsigned int execute (function *)
9574     {
9575       do_warn_unused_result (gimple_body (current_function_decl));
9576       return 0;
9577     }
9578 
9579 }; // class pass_warn_unused_result
9580 
9581 } // anon namespace
9582 
9583 gimple_opt_pass *
make_pass_warn_unused_result(gcc::context * ctxt)9584 make_pass_warn_unused_result (gcc::context *ctxt)
9585 {
9586   return new pass_warn_unused_result (ctxt);
9587 }
9588 
9589 /* IPA passes, compilation of earlier functions or inlining
9590    might have changed some properties, such as marked functions nothrow,
9591    pure, const or noreturn.
9592    Remove redundant edges and basic blocks, and create new ones if necessary.
9593 
9594    This pass can't be executed as stand alone pass from pass manager, because
9595    in between inlining and this fixup the verify_flow_info would fail.  */
9596 
9597 unsigned int
execute_fixup_cfg(void)9598 execute_fixup_cfg (void)
9599 {
9600   basic_block bb;
9601   gimple_stmt_iterator gsi;
9602   int todo = 0;
9603   cgraph_node *node = cgraph_node::get (current_function_decl);
9604   /* Same scaling is also done by ipa_merge_profiles.  */
9605   profile_count num = node->count;
9606   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9607   bool scale = num.initialized_p () && !(num == den);
9608 
9609   if (scale)
9610     {
9611       profile_count::adjust_for_ipa_scaling (&num, &den);
9612       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9613       EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9614         = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9615     }
9616 
9617   FOR_EACH_BB_FN (bb, cfun)
9618     {
9619       if (scale)
9620         bb->count = bb->count.apply_scale (num, den);
9621       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9622 	{
9623 	  gimple *stmt = gsi_stmt (gsi);
9624 	  tree decl = is_gimple_call (stmt)
9625 		      ? gimple_call_fndecl (stmt)
9626 		      : NULL;
9627 	  if (decl)
9628 	    {
9629 	      int flags = gimple_call_flags (stmt);
9630 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9631 		{
9632 		  if (gimple_purge_dead_abnormal_call_edges (bb))
9633 		    todo |= TODO_cleanup_cfg;
9634 
9635 		  if (gimple_in_ssa_p (cfun))
9636 		    {
9637 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9638 		      update_stmt (stmt);
9639 		    }
9640 		}
9641 
9642 	      if (flags & ECF_NORETURN
9643 		  && fixup_noreturn_call (stmt))
9644 		todo |= TODO_cleanup_cfg;
9645 	     }
9646 
9647 	  /* Remove stores to variables we marked write-only.
9648 	     Keep access when store has side effect, i.e. in case when source
9649 	     is volatile.  */
9650 	  if (gimple_store_p (stmt)
9651 	      && !gimple_has_side_effects (stmt)
9652 	      && !optimize_debug)
9653 	    {
9654 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9655 
9656 	      if (VAR_P (lhs)
9657 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9658 		  && varpool_node::get (lhs)->writeonly)
9659 		{
9660 		  unlink_stmt_vdef (stmt);
9661 		  gsi_remove (&gsi, true);
9662 		  release_defs (stmt);
9663 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9664 	          continue;
9665 		}
9666 	    }
9667 	  /* For calls we can simply remove LHS when it is known
9668 	     to be write-only.  */
9669 	  if (is_gimple_call (stmt)
9670 	      && gimple_get_lhs (stmt))
9671 	    {
9672 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9673 
9674 	      if (VAR_P (lhs)
9675 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9676 		  && varpool_node::get (lhs)->writeonly)
9677 		{
9678 		  gimple_call_set_lhs (stmt, NULL);
9679 		  update_stmt (stmt);
9680 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9681 		}
9682 	    }
9683 
9684 	  if (maybe_clean_eh_stmt (stmt)
9685 	      && gimple_purge_dead_eh_edges (bb))
9686 	    todo |= TODO_cleanup_cfg;
9687 	  gsi_next (&gsi);
9688 	}
9689 
9690       /* If we have a basic block with no successors that does not
9691 	 end with a control statement or a noreturn call end it with
9692 	 a call to __builtin_unreachable.  This situation can occur
9693 	 when inlining a noreturn call that does in fact return.  */
9694       if (EDGE_COUNT (bb->succs) == 0)
9695 	{
9696 	  gimple *stmt = last_stmt (bb);
9697 	  if (!stmt
9698 	      || (!is_ctrl_stmt (stmt)
9699 		  && (!is_gimple_call (stmt)
9700 		      || !gimple_call_noreturn_p (stmt))))
9701 	    {
9702 	      if (stmt && is_gimple_call (stmt))
9703 		gimple_call_set_ctrl_altering (stmt, false);
9704 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9705 	      stmt = gimple_build_call (fndecl, 0);
9706 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9707 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9708 	      if (!cfun->after_inlining)
9709 		{
9710 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9711 		  node->create_edge (cgraph_node::get_create (fndecl),
9712 				     call_stmt, bb->count);
9713 		}
9714 	    }
9715 	}
9716     }
9717   if (scale)
9718     {
9719       update_max_bb_count ();
9720       compute_function_frequency ();
9721     }
9722 
9723   if (current_loops
9724       && (todo & TODO_cleanup_cfg))
9725     loops_state_set (LOOPS_NEED_FIXUP);
9726 
9727   return todo;
9728 }
9729 
9730 namespace {
9731 
9732 const pass_data pass_data_fixup_cfg =
9733 {
9734   GIMPLE_PASS, /* type */
9735   "fixup_cfg", /* name */
9736   OPTGROUP_NONE, /* optinfo_flags */
9737   TV_NONE, /* tv_id */
9738   PROP_cfg, /* properties_required */
9739   0, /* properties_provided */
9740   0, /* properties_destroyed */
9741   0, /* todo_flags_start */
9742   0, /* todo_flags_finish */
9743 };
9744 
9745 class pass_fixup_cfg : public gimple_opt_pass
9746 {
9747 public:
pass_fixup_cfg(gcc::context * ctxt)9748   pass_fixup_cfg (gcc::context *ctxt)
9749     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9750   {}
9751 
9752   /* opt_pass methods: */
clone()9753   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
execute(function *)9754   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9755 
9756 }; // class pass_fixup_cfg
9757 
9758 } // anon namespace
9759 
9760 gimple_opt_pass *
make_pass_fixup_cfg(gcc::context * ctxt)9761 make_pass_fixup_cfg (gcc::context *ctxt)
9762 {
9763   return new pass_fixup_cfg (ctxt);
9764 }
9765 
9766 /* Garbage collection support for edge_def.  */
9767 
9768 extern void gt_ggc_mx (tree&);
9769 extern void gt_ggc_mx (gimple *&);
9770 extern void gt_ggc_mx (rtx&);
9771 extern void gt_ggc_mx (basic_block&);
9772 
9773 static void
gt_ggc_mx(rtx_insn * & x)9774 gt_ggc_mx (rtx_insn *& x)
9775 {
9776   if (x)
9777     gt_ggc_mx_rtx_def ((void *) x);
9778 }
9779 
9780 void
gt_ggc_mx(edge_def * e)9781 gt_ggc_mx (edge_def *e)
9782 {
9783   tree block = LOCATION_BLOCK (e->goto_locus);
9784   gt_ggc_mx (e->src);
9785   gt_ggc_mx (e->dest);
9786   if (current_ir_type () == IR_GIMPLE)
9787     gt_ggc_mx (e->insns.g);
9788   else
9789     gt_ggc_mx (e->insns.r);
9790   gt_ggc_mx (block);
9791 }
9792 
9793 /* PCH support for edge_def.  */
9794 
9795 extern void gt_pch_nx (tree&);
9796 extern void gt_pch_nx (gimple *&);
9797 extern void gt_pch_nx (rtx&);
9798 extern void gt_pch_nx (basic_block&);
9799 
9800 static void
gt_pch_nx(rtx_insn * & x)9801 gt_pch_nx (rtx_insn *& x)
9802 {
9803   if (x)
9804     gt_pch_nx_rtx_def ((void *) x);
9805 }
9806 
9807 void
gt_pch_nx(edge_def * e)9808 gt_pch_nx (edge_def *e)
9809 {
9810   tree block = LOCATION_BLOCK (e->goto_locus);
9811   gt_pch_nx (e->src);
9812   gt_pch_nx (e->dest);
9813   if (current_ir_type () == IR_GIMPLE)
9814     gt_pch_nx (e->insns.g);
9815   else
9816     gt_pch_nx (e->insns.r);
9817   gt_pch_nx (block);
9818 }
9819 
9820 void
gt_pch_nx(edge_def * e,gt_pointer_operator op,void * cookie)9821 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9822 {
9823   tree block = LOCATION_BLOCK (e->goto_locus);
9824   op (&(e->src), cookie);
9825   op (&(e->dest), cookie);
9826   if (current_ir_type () == IR_GIMPLE)
9827     op (&(e->insns.g), cookie);
9828   else
9829     op (&(e->insns.r), cookie);
9830   op (&(block), cookie);
9831 }
9832 
9833 #if CHECKING_P
9834 
9835 namespace selftest {
9836 
9837 /* Helper function for CFG selftests: create a dummy function decl
9838    and push it as cfun.  */
9839 
9840 static tree
push_fndecl(const char * name)9841 push_fndecl (const char *name)
9842 {
9843   tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9844   /* FIXME: this uses input_location: */
9845   tree fndecl = build_fn_decl (name, fn_type);
9846   tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9847 			    NULL_TREE, integer_type_node);
9848   DECL_RESULT (fndecl) = retval;
9849   push_struct_function (fndecl);
9850   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9851   ASSERT_TRUE (fun != NULL);
9852   init_empty_tree_cfg_for_function (fun);
9853   ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9854   ASSERT_EQ (0, n_edges_for_fn (fun));
9855   return fndecl;
9856 }
9857 
9858 /* These tests directly create CFGs.
9859    Compare with the static fns within tree-cfg.c:
9860      - build_gimple_cfg
9861      - make_blocks: calls create_basic_block (seq, bb);
9862      - make_edges.   */
9863 
9864 /* Verify a simple cfg of the form:
9865      ENTRY -> A -> B -> C -> EXIT.  */
9866 
9867 static void
test_linear_chain()9868 test_linear_chain ()
9869 {
9870   gimple_register_cfg_hooks ();
9871 
9872   tree fndecl = push_fndecl ("cfg_test_linear_chain");
9873   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9874 
9875   /* Create some empty blocks.  */
9876   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9877   basic_block bb_b = create_empty_bb (bb_a);
9878   basic_block bb_c = create_empty_bb (bb_b);
9879 
9880   ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9881   ASSERT_EQ (0, n_edges_for_fn (fun));
9882 
9883   /* Create some edges: a simple linear chain of BBs.  */
9884   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9885   make_edge (bb_a, bb_b, 0);
9886   make_edge (bb_b, bb_c, 0);
9887   make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9888 
9889   /* Verify the edges.  */
9890   ASSERT_EQ (4, n_edges_for_fn (fun));
9891   ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9892   ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9893   ASSERT_EQ (1, bb_a->preds->length ());
9894   ASSERT_EQ (1, bb_a->succs->length ());
9895   ASSERT_EQ (1, bb_b->preds->length ());
9896   ASSERT_EQ (1, bb_b->succs->length ());
9897   ASSERT_EQ (1, bb_c->preds->length ());
9898   ASSERT_EQ (1, bb_c->succs->length ());
9899   ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9900   ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9901 
9902   /* Verify the dominance information
9903      Each BB in our simple chain should be dominated by the one before
9904      it.  */
9905   calculate_dominance_info (CDI_DOMINATORS);
9906   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9907   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9908   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9909   ASSERT_EQ (1, dom_by_b.length ());
9910   ASSERT_EQ (bb_c, dom_by_b[0]);
9911   free_dominance_info (CDI_DOMINATORS);
9912   dom_by_b.release ();
9913 
9914   /* Similarly for post-dominance: each BB in our chain is post-dominated
9915      by the one after it.  */
9916   calculate_dominance_info (CDI_POST_DOMINATORS);
9917   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9918   ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9919   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9920   ASSERT_EQ (1, postdom_by_b.length ());
9921   ASSERT_EQ (bb_a, postdom_by_b[0]);
9922   free_dominance_info (CDI_POST_DOMINATORS);
9923   postdom_by_b.release ();
9924 
9925   pop_cfun ();
9926 }
9927 
9928 /* Verify a simple CFG of the form:
9929      ENTRY
9930        |
9931        A
9932       / \
9933      /t  \f
9934     B     C
9935      \   /
9936       \ /
9937        D
9938        |
9939       EXIT.  */
9940 
9941 static void
test_diamond()9942 test_diamond ()
9943 {
9944   gimple_register_cfg_hooks ();
9945 
9946   tree fndecl = push_fndecl ("cfg_test_diamond");
9947   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9948 
9949   /* Create some empty blocks.  */
9950   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9951   basic_block bb_b = create_empty_bb (bb_a);
9952   basic_block bb_c = create_empty_bb (bb_a);
9953   basic_block bb_d = create_empty_bb (bb_b);
9954 
9955   ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9956   ASSERT_EQ (0, n_edges_for_fn (fun));
9957 
9958   /* Create the edges.  */
9959   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9960   make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9961   make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9962   make_edge (bb_b, bb_d, 0);
9963   make_edge (bb_c, bb_d, 0);
9964   make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9965 
9966   /* Verify the edges.  */
9967   ASSERT_EQ (6, n_edges_for_fn (fun));
9968   ASSERT_EQ (1, bb_a->preds->length ());
9969   ASSERT_EQ (2, bb_a->succs->length ());
9970   ASSERT_EQ (1, bb_b->preds->length ());
9971   ASSERT_EQ (1, bb_b->succs->length ());
9972   ASSERT_EQ (1, bb_c->preds->length ());
9973   ASSERT_EQ (1, bb_c->succs->length ());
9974   ASSERT_EQ (2, bb_d->preds->length ());
9975   ASSERT_EQ (1, bb_d->succs->length ());
9976 
9977   /* Verify the dominance information.  */
9978   calculate_dominance_info (CDI_DOMINATORS);
9979   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9980   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9981   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9982   vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9983   ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order.  */
9984   dom_by_a.release ();
9985   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9986   ASSERT_EQ (0, dom_by_b.length ());
9987   dom_by_b.release ();
9988   free_dominance_info (CDI_DOMINATORS);
9989 
9990   /* Similarly for post-dominance.  */
9991   calculate_dominance_info (CDI_POST_DOMINATORS);
9992   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9993   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9994   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9995   vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9996   ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order.  */
9997   postdom_by_d.release ();
9998   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9999   ASSERT_EQ (0, postdom_by_b.length ());
10000   postdom_by_b.release ();
10001   free_dominance_info (CDI_POST_DOMINATORS);
10002 
10003   pop_cfun ();
10004 }
10005 
10006 /* Verify that we can handle a CFG containing a "complete" aka
10007    fully-connected subgraph (where A B C D below all have edges
10008    pointing to each other node, also to themselves).
10009    e.g.:
10010      ENTRY  EXIT
10011        |    ^
10012        |   /
10013        |  /
10014        | /
10015        V/
10016        A<--->B
10017        ^^   ^^
10018        | \ / |
10019        |  X  |
10020        | / \ |
10021        VV   VV
10022        C<--->D
10023 */
10024 
10025 static void
test_fully_connected()10026 test_fully_connected ()
10027 {
10028   gimple_register_cfg_hooks ();
10029 
10030   tree fndecl = push_fndecl ("cfg_fully_connected");
10031   function *fun = DECL_STRUCT_FUNCTION (fndecl);
10032 
10033   const int n = 4;
10034 
10035   /* Create some empty blocks.  */
10036   auto_vec <basic_block> subgraph_nodes;
10037   for (int i = 0; i < n; i++)
10038     subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10039 
10040   ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10041   ASSERT_EQ (0, n_edges_for_fn (fun));
10042 
10043   /* Create the edges.  */
10044   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10045   make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10046   for (int i = 0; i < n; i++)
10047     for (int j = 0; j < n; j++)
10048       make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10049 
10050   /* Verify the edges.  */
10051   ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10052   /* The first one is linked to ENTRY/EXIT as well as itself and
10053      everything else.  */
10054   ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10055   ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10056   /* The other ones in the subgraph are linked to everything in
10057      the subgraph (including themselves).  */
10058   for (int i = 1; i < n; i++)
10059     {
10060       ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10061       ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10062     }
10063 
10064   /* Verify the dominance information.  */
10065   calculate_dominance_info (CDI_DOMINATORS);
10066   /* The initial block in the subgraph should be dominated by ENTRY.  */
10067   ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10068 	     get_immediate_dominator (CDI_DOMINATORS,
10069 				      subgraph_nodes[0]));
10070   /* Every other block in the subgraph should be dominated by the
10071      initial block.  */
10072   for (int i = 1; i < n; i++)
10073     ASSERT_EQ (subgraph_nodes[0],
10074 	       get_immediate_dominator (CDI_DOMINATORS,
10075 					subgraph_nodes[i]));
10076   free_dominance_info (CDI_DOMINATORS);
10077 
10078   /* Similarly for post-dominance.  */
10079   calculate_dominance_info (CDI_POST_DOMINATORS);
10080   /* The initial block in the subgraph should be postdominated by EXIT.  */
10081   ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10082 	     get_immediate_dominator (CDI_POST_DOMINATORS,
10083 				      subgraph_nodes[0]));
10084   /* Every other block in the subgraph should be postdominated by the
10085      initial block, since that leads to EXIT.  */
10086   for (int i = 1; i < n; i++)
10087     ASSERT_EQ (subgraph_nodes[0],
10088 	       get_immediate_dominator (CDI_POST_DOMINATORS,
10089 					subgraph_nodes[i]));
10090   free_dominance_info (CDI_POST_DOMINATORS);
10091 
10092   pop_cfun ();
10093 }
10094 
10095 /* Run all of the selftests within this file.  */
10096 
10097 void
tree_cfg_c_tests()10098 tree_cfg_c_tests ()
10099 {
10100   test_linear_chain ();
10101   test_diamond ();
10102   test_fully_connected ();
10103 }
10104 
10105 } // namespace selftest
10106 
10107 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10108    - loop
10109    - nested loops
10110    - switch statement (a block with many out-edges)
10111    - something that jumps to itself
10112    - etc  */
10113 
10114 #endif /* CHECKING_P */
10115