xref: /openbsd/gnu/gcc/gcc/tree-cfg.c (revision 404b540a)
1 /* Control flow functions for trees.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3    Free Software Foundation, Inc.
4    Contributed by Diego Novillo <dnovillo@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
49 
50 /* This file contains functions for building the Control Flow Graph (CFG)
51    for a function tree.  */
52 
53 /* Local declarations.  */
54 
55 /* Initial capacity for the basic block array.  */
56 static const int initial_cfg_capacity = 20;
57 
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
60    via their TREE_CHAIN field, which we clear after we're done with the
61    hash table to prevent problems with duplication of SWITCH_EXPRs.
62 
63    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64    update the case vector in response to edge redirections.
65 
66    Right now this table is set up and torn down at key points in the
67    compilation process.  It would be nice if we could make the table
68    more persistent.  The key is getting notification of changes to
69    the CFG (particularly edge removal, creation and redirection).  */
70 
71 struct edge_to_cases_elt
72 {
73   /* The edge itself.  Necessary for hashing and equality tests.  */
74   edge e;
75 
76   /* The case labels associated with this edge.  We link these up via
77      their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78      when we destroy the hash table.  This prevents problems when copying
79      SWITCH_EXPRs.  */
80   tree case_labels;
81 };
82 
83 static htab_t edge_to_cases;
84 
85 /* CFG statistics.  */
86 struct cfg_stats_d
87 {
88   long num_merged_labels;
89 };
90 
91 static struct cfg_stats_d cfg_stats;
92 
93 /* Nonzero if we found a computed goto while building basic blocks.  */
94 static bool found_computed_goto;
95 
96 /* Basic blocks and flowgraphs.  */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
100 
101 /* Edges.  */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
109 
110 /* Various helpers.  */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
115 static inline void change_bb_for_stmt (tree t, basic_block bb);
116 
117 /* Flowgraph optimization and cleanup.  */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
125 
126 void
init_empty_tree_cfg(void)127 init_empty_tree_cfg (void)
128 {
129   /* Initialize the basic block array.  */
130   init_flow ();
131   profile_status = PROFILE_ABSENT;
132   n_basic_blocks = NUM_FIXED_BLOCKS;
133   last_basic_block = NUM_FIXED_BLOCKS;
134   basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135   VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136   memset (VEC_address (basic_block, basic_block_info), 0,
137 	  sizeof (basic_block) * initial_cfg_capacity);
138 
139   /* Build a mapping of labels to their associated blocks.  */
140   label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141   VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142   memset (VEC_address (basic_block, label_to_block_map),
143 	  0, sizeof (basic_block) * initial_cfg_capacity);
144 
145   SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146   SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
149 }
150 
151 /*---------------------------------------------------------------------------
152 			      Create basic blocks
153 ---------------------------------------------------------------------------*/
154 
155 /* Entry point to the CFG builder for trees.  TP points to the list of
156    statements to be added to the flowgraph.  */
157 
158 static void
build_tree_cfg(tree * tp)159 build_tree_cfg (tree *tp)
160 {
161   /* Register specific tree functions.  */
162   tree_register_cfg_hooks ();
163 
164   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165 
166   init_empty_tree_cfg ();
167 
168   found_computed_goto = 0;
169   make_blocks (*tp);
170 
171   /* Computed gotos are hell to deal with, especially if there are
172      lots of them with a large number of destinations.  So we factor
173      them to a common computed goto location before we build the
174      edge list.  After we convert back to normal form, we will un-factor
175      the computed gotos since factoring introduces an unwanted jump.  */
176   if (found_computed_goto)
177     factor_computed_gotos ();
178 
179   /* Make sure there is always at least one block, even if it's empty.  */
180   if (n_basic_blocks == NUM_FIXED_BLOCKS)
181     create_empty_bb (ENTRY_BLOCK_PTR);
182 
183   /* Adjust the size of the array.  */
184   if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185     {
186       size_t old_size = VEC_length (basic_block, basic_block_info);
187       basic_block *p;
188       VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189       p = VEC_address (basic_block, basic_block_info);
190       memset (&p[old_size], 0,
191 	      sizeof (basic_block) * (n_basic_blocks - old_size));
192     }
193 
194   /* To speed up statement iterator walks, we first purge dead labels.  */
195   cleanup_dead_labels ();
196 
197   /* Group case nodes to reduce the number of edges.
198      We do this after cleaning up dead labels because otherwise we miss
199      a lot of obvious case merging opportunities.  */
200   group_case_labels ();
201 
202   /* Create the edges of the flowgraph.  */
203   make_edges ();
204 
205   /* Debugging dumps.  */
206 
207   /* Write the flowgraph to a VCG file.  */
208   {
209     int local_dump_flags;
210     FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211     if (vcg_file)
212       {
213 	tree_cfg2vcg (vcg_file);
214 	dump_end (TDI_vcg, vcg_file);
215       }
216   }
217 
218 #ifdef ENABLE_CHECKING
219   verify_stmts ();
220 #endif
221 
222   /* Dump a textual representation of the flowgraph.  */
223   if (dump_file)
224     dump_tree_cfg (dump_file, dump_flags);
225 }
226 
227 static unsigned int
execute_build_cfg(void)228 execute_build_cfg (void)
229 {
230   build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231   return 0;
232 }
233 
234 struct tree_opt_pass pass_build_cfg =
235 {
236   "cfg",				/* name */
237   NULL,					/* gate */
238   execute_build_cfg,			/* execute */
239   NULL,					/* sub */
240   NULL,					/* next */
241   0,					/* static_pass_number */
242   TV_TREE_CFG,				/* tv_id */
243   PROP_gimple_leh,			/* properties_required */
244   PROP_cfg,				/* properties_provided */
245   0,					/* properties_destroyed */
246   0,					/* todo_flags_start */
247   TODO_verify_stmts,			/* todo_flags_finish */
248   0					/* letter */
249 };
250 
251 /* Search the CFG for any computed gotos.  If found, factor them to a
252    common computed goto site.  Also record the location of that site so
253    that we can un-factor the gotos after we have converted back to
254    normal form.  */
255 
256 static void
factor_computed_gotos(void)257 factor_computed_gotos (void)
258 {
259   basic_block bb;
260   tree factored_label_decl = NULL;
261   tree var = NULL;
262   tree factored_computed_goto_label = NULL;
263   tree factored_computed_goto = NULL;
264 
265   /* We know there are one or more computed gotos in this function.
266      Examine the last statement in each basic block to see if the block
267      ends with a computed goto.  */
268 
269   FOR_EACH_BB (bb)
270     {
271       block_stmt_iterator bsi = bsi_last (bb);
272       tree last;
273 
274       if (bsi_end_p (bsi))
275 	continue;
276       last = bsi_stmt (bsi);
277 
278       /* Ignore the computed goto we create when we factor the original
279 	 computed gotos.  */
280       if (last == factored_computed_goto)
281 	continue;
282 
283       /* If the last statement is a computed goto, factor it.  */
284       if (computed_goto_p (last))
285 	{
286 	  tree assignment;
287 
288 	  /* The first time we find a computed goto we need to create
289 	     the factored goto block and the variable each original
290 	     computed goto will use for their goto destination.  */
291 	  if (! factored_computed_goto)
292 	    {
293 	      basic_block new_bb = create_empty_bb (bb);
294 	      block_stmt_iterator new_bsi = bsi_start (new_bb);
295 
296 	      /* Create the destination of the factored goto.  Each original
297 		 computed goto will put its desired destination into this
298 		 variable and jump to the label we create immediately
299 		 below.  */
300 	      var = create_tmp_var (ptr_type_node, "gotovar");
301 
302 	      /* Build a label for the new block which will contain the
303 		 factored computed goto.  */
304 	      factored_label_decl = create_artificial_label ();
305 	      factored_computed_goto_label
306 		= build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 	      bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 				BSI_NEW_STMT);
309 
310 	      /* Build our new computed goto.  */
311 	      factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 	      bsi_insert_after (&new_bsi, factored_computed_goto,
313 				BSI_NEW_STMT);
314 	    }
315 
316 	  /* Copy the original computed goto's destination into VAR.  */
317 	  assignment = build2 (MODIFY_EXPR, ptr_type_node,
318 			       var, GOTO_DESTINATION (last));
319 	  bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320 
321 	  /* And re-vector the computed goto to the new destination.  */
322 	  GOTO_DESTINATION (last) = factored_label_decl;
323 	}
324     }
325 }
326 
327 
328 /* Build a flowgraph for the statement_list STMT_LIST.  */
329 
330 static void
make_blocks(tree stmt_list)331 make_blocks (tree stmt_list)
332 {
333   tree_stmt_iterator i = tsi_start (stmt_list);
334   tree stmt = NULL;
335   bool start_new_block = true;
336   bool first_stmt_of_list = true;
337   basic_block bb = ENTRY_BLOCK_PTR;
338 
339   while (!tsi_end_p (i))
340     {
341       tree prev_stmt;
342 
343       prev_stmt = stmt;
344       stmt = tsi_stmt (i);
345 
346       /* If the statement starts a new basic block or if we have determined
347 	 in a previous pass that we need to create a new block for STMT, do
348 	 so now.  */
349       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 	{
351 	  if (!first_stmt_of_list)
352 	    stmt_list = tsi_split_statement_list_before (&i);
353 	  bb = create_basic_block (stmt_list, NULL, bb);
354 	  start_new_block = false;
355 	}
356 
357       /* Now add STMT to BB and create the subgraphs for special statement
358 	 codes.  */
359       set_bb_for_stmt (stmt, bb);
360 
361       if (computed_goto_p (stmt))
362 	found_computed_goto = true;
363 
364       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 	 next iteration.  */
366       if (stmt_ends_bb_p (stmt))
367 	start_new_block = true;
368 
369       tsi_next (&i);
370       first_stmt_of_list = false;
371     }
372 }
373 
374 
375 /* Create and return a new empty basic block after bb AFTER.  */
376 
377 static basic_block
create_bb(void * h,void * e,basic_block after)378 create_bb (void *h, void *e, basic_block after)
379 {
380   basic_block bb;
381 
382   gcc_assert (!e);
383 
384   /* Create and initialize a new basic block.  Since alloc_block uses
385      ggc_alloc_cleared to allocate a basic block, we do not have to
386      clear the newly allocated basic block here.  */
387   bb = alloc_block ();
388 
389   bb->index = last_basic_block;
390   bb->flags = BB_NEW;
391   bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392 
393   /* Add the new block to the linked list of blocks.  */
394   link_block (bb, after);
395 
396   /* Grow the basic block array if needed.  */
397   if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398     {
399       size_t old_size = VEC_length (basic_block, basic_block_info);
400       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401       basic_block *p;
402       VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403       p = VEC_address (basic_block, basic_block_info);
404       memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
405     }
406 
407   /* Add the newly created block to the array.  */
408   SET_BASIC_BLOCK (last_basic_block, bb);
409 
410   n_basic_blocks++;
411   last_basic_block++;
412 
413   return bb;
414 }
415 
416 
417 /*---------------------------------------------------------------------------
418 				 Edge creation
419 ---------------------------------------------------------------------------*/
420 
421 /* Fold COND_EXPR_COND of each COND_EXPR.  */
422 
423 void
fold_cond_expr_cond(void)424 fold_cond_expr_cond (void)
425 {
426   basic_block bb;
427 
428   FOR_EACH_BB (bb)
429     {
430       tree stmt = last_stmt (bb);
431 
432       if (stmt
433 	  && TREE_CODE (stmt) == COND_EXPR)
434 	{
435 	  tree cond;
436 	  bool zerop, onep;
437 
438 	  fold_defer_overflow_warnings ();
439 	  cond = fold (COND_EXPR_COND (stmt));
440 	  zerop = integer_zerop (cond);
441 	  onep = integer_onep (cond);
442 	  fold_undefer_overflow_warnings (((zerop || onep)
443 					   && !TREE_NO_WARNING (stmt)),
444 					  stmt,
445 					  WARN_STRICT_OVERFLOW_CONDITIONAL);
446 	  if (zerop)
447 	    COND_EXPR_COND (stmt) = boolean_false_node;
448 	  else if (onep)
449 	    COND_EXPR_COND (stmt) = boolean_true_node;
450 	}
451     }
452 }
453 
454 /* Join all the blocks in the flowgraph.  */
455 
456 static void
make_edges(void)457 make_edges (void)
458 {
459   basic_block bb;
460   struct omp_region *cur_region = NULL;
461 
462   /* Create an edge from entry to the first block with executable
463      statements in it.  */
464   make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
465 
466   /* Traverse the basic block array placing edges.  */
467   FOR_EACH_BB (bb)
468     {
469       tree last = last_stmt (bb);
470       bool fallthru;
471 
472       if (last)
473 	{
474 	  enum tree_code code = TREE_CODE (last);
475 	  switch (code)
476 	    {
477 	    case GOTO_EXPR:
478 	      make_goto_expr_edges (bb);
479 	      fallthru = false;
480 	      break;
481 	    case RETURN_EXPR:
482 	      make_edge (bb, EXIT_BLOCK_PTR, 0);
483 	      fallthru = false;
484 	      break;
485 	    case COND_EXPR:
486 	      make_cond_expr_edges (bb);
487 	      fallthru = false;
488 	      break;
489 	    case SWITCH_EXPR:
490 	      make_switch_expr_edges (bb);
491 	      fallthru = false;
492 	      break;
493 	    case RESX_EXPR:
494 	      make_eh_edges (last);
495 	      fallthru = false;
496 	      break;
497 
498 	    case CALL_EXPR:
499 	      /* If this function receives a nonlocal goto, then we need to
500 		 make edges from this call site to all the nonlocal goto
501 		 handlers.  */
502 	      if (tree_can_make_abnormal_goto (last))
503 		make_abnormal_goto_edges (bb, true);
504 
505 	      /* If this statement has reachable exception handlers, then
506 		 create abnormal edges to them.  */
507 	      make_eh_edges (last);
508 
509 	      /* Some calls are known not to return.  */
510 	      fallthru = !(call_expr_flags (last) & ECF_NORETURN);
511 	      break;
512 
513 	    case MODIFY_EXPR:
514 	      if (is_ctrl_altering_stmt (last))
515 		{
516 		  /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
517 		     CALL_EXPR may have an abnormal edge.  Search the RHS for
518 		     this case and create any required edges.  */
519 		  if (tree_can_make_abnormal_goto (last))
520 		    make_abnormal_goto_edges (bb, true);
521 
522 		  make_eh_edges (last);
523 		}
524 	      fallthru = true;
525 	      break;
526 
527 	    case OMP_PARALLEL:
528 	    case OMP_FOR:
529 	    case OMP_SINGLE:
530 	    case OMP_MASTER:
531 	    case OMP_ORDERED:
532 	    case OMP_CRITICAL:
533 	    case OMP_SECTION:
534 	      cur_region = new_omp_region (bb, code, cur_region);
535 	      fallthru = true;
536 	      break;
537 
538 	    case OMP_SECTIONS:
539 	      cur_region = new_omp_region (bb, code, cur_region);
540 	      fallthru = false;
541 	      break;
542 
543 	    case OMP_RETURN:
544 	      /* In the case of an OMP_SECTION, the edge will go somewhere
545 		 other than the next block.  This will be created later.  */
546 	      cur_region->exit = bb;
547 	      fallthru = cur_region->type != OMP_SECTION;
548 	      cur_region = cur_region->outer;
549 	      break;
550 
551 	    case OMP_CONTINUE:
552 	      cur_region->cont = bb;
553 	      switch (cur_region->type)
554 		{
555 		case OMP_FOR:
556 		  /* ??? Technically there should be a some sort of loopback
557 		     edge here, but it goes to a block that doesn't exist yet,
558 		     and without it, updating the ssa form would be a real
559 		     bear.  Fortunately, we don't yet do ssa before expanding
560 		     these nodes.  */
561 		  break;
562 
563 		case OMP_SECTIONS:
564 		  /* Wire up the edges into and out of the nested sections.  */
565 		  /* ??? Similarly wrt loopback.  */
566 		  {
567 		    struct omp_region *i;
568 		    for (i = cur_region->inner; i ; i = i->next)
569 		      {
570 			gcc_assert (i->type == OMP_SECTION);
571 			make_edge (cur_region->entry, i->entry, 0);
572 			make_edge (i->exit, bb, EDGE_FALLTHRU);
573 		      }
574 		  }
575 		  break;
576 
577 		default:
578 		  gcc_unreachable ();
579 		}
580 	      fallthru = true;
581 	      break;
582 
583 	    default:
584 	      gcc_assert (!stmt_ends_bb_p (last));
585 	      fallthru = true;
586 	    }
587 	}
588       else
589 	fallthru = true;
590 
591       if (fallthru)
592 	make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
593     }
594 
595   if (root_omp_region)
596     free_omp_regions ();
597 
598   /* Fold COND_EXPR_COND of each COND_EXPR.  */
599   fold_cond_expr_cond ();
600 
601   /* Clean up the graph and warn for unreachable code.  */
602   cleanup_tree_cfg ();
603 }
604 
605 
606 /* Create the edges for a COND_EXPR starting at block BB.
607    At this point, both clauses must contain only simple gotos.  */
608 
609 static void
make_cond_expr_edges(basic_block bb)610 make_cond_expr_edges (basic_block bb)
611 {
612   tree entry = last_stmt (bb);
613   basic_block then_bb, else_bb;
614   tree then_label, else_label;
615   edge e;
616 
617   gcc_assert (entry);
618   gcc_assert (TREE_CODE (entry) == COND_EXPR);
619 
620   /* Entry basic blocks for each component.  */
621   then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
622   else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
623   then_bb = label_to_block (then_label);
624   else_bb = label_to_block (else_label);
625 
626   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
627 #ifdef USE_MAPPED_LOCATION
628   e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
629 #else
630   e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
631 #endif
632   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
633   if (e)
634     {
635 #ifdef USE_MAPPED_LOCATION
636       e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
637 #else
638       e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
639 #endif
640     }
641 }
642 
643 /* Hashing routine for EDGE_TO_CASES.  */
644 
645 static hashval_t
edge_to_cases_hash(const void * p)646 edge_to_cases_hash (const void *p)
647 {
648   edge e = ((struct edge_to_cases_elt *)p)->e;
649 
650   /* Hash on the edge itself (which is a pointer).  */
651   return htab_hash_pointer (e);
652 }
653 
654 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
655    for equality is just a pointer comparison.  */
656 
657 static int
edge_to_cases_eq(const void * p1,const void * p2)658 edge_to_cases_eq (const void *p1, const void *p2)
659 {
660   edge e1 = ((struct edge_to_cases_elt *)p1)->e;
661   edge e2 = ((struct edge_to_cases_elt *)p2)->e;
662 
663   return e1 == e2;
664 }
665 
666 /* Called for each element in the hash table (P) as we delete the
667    edge to cases hash table.
668 
669    Clear all the TREE_CHAINs to prevent problems with copying of
670    SWITCH_EXPRs and structure sharing rules, then free the hash table
671    element.  */
672 
673 static void
edge_to_cases_cleanup(void * p)674 edge_to_cases_cleanup (void *p)
675 {
676   struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
677   tree t, next;
678 
679   for (t = elt->case_labels; t; t = next)
680     {
681       next = TREE_CHAIN (t);
682       TREE_CHAIN (t) = NULL;
683     }
684   free (p);
685 }
686 
687 /* Start recording information mapping edges to case labels.  */
688 
689 void
start_recording_case_labels(void)690 start_recording_case_labels (void)
691 {
692   gcc_assert (edge_to_cases == NULL);
693 
694   edge_to_cases = htab_create (37,
695 			       edge_to_cases_hash,
696 			       edge_to_cases_eq,
697 			       edge_to_cases_cleanup);
698 }
699 
700 /* Return nonzero if we are recording information for case labels.  */
701 
702 static bool
recording_case_labels_p(void)703 recording_case_labels_p (void)
704 {
705   return (edge_to_cases != NULL);
706 }
707 
708 /* Stop recording information mapping edges to case labels and
709    remove any information we have recorded.  */
710 void
end_recording_case_labels(void)711 end_recording_case_labels (void)
712 {
713   htab_delete (edge_to_cases);
714   edge_to_cases = NULL;
715 }
716 
717 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E.  */
718 
719 static void
record_switch_edge(edge e,tree case_label)720 record_switch_edge (edge e, tree case_label)
721 {
722   struct edge_to_cases_elt *elt;
723   void **slot;
724 
725   /* Build a hash table element so we can see if E is already
726      in the table.  */
727   elt = XNEW (struct edge_to_cases_elt);
728   elt->e = e;
729   elt->case_labels = case_label;
730 
731   slot = htab_find_slot (edge_to_cases, elt, INSERT);
732 
733   if (*slot == NULL)
734     {
735       /* E was not in the hash table.  Install E into the hash table.  */
736       *slot = (void *)elt;
737     }
738   else
739     {
740       /* E was already in the hash table.  Free ELT as we do not need it
741 	 anymore.  */
742       free (elt);
743 
744       /* Get the entry stored in the hash table.  */
745       elt = (struct edge_to_cases_elt *) *slot;
746 
747       /* Add it to the chain of CASE_LABEL_EXPRs referencing E.  */
748       TREE_CHAIN (case_label) = elt->case_labels;
749       elt->case_labels = case_label;
750     }
751 }
752 
753 /* If we are inside a {start,end}_recording_cases block, then return
754    a chain of CASE_LABEL_EXPRs from T which reference E.
755 
756    Otherwise return NULL.  */
757 
758 static tree
get_cases_for_edge(edge e,tree t)759 get_cases_for_edge (edge e, tree t)
760 {
761   struct edge_to_cases_elt elt, *elt_p;
762   void **slot;
763   size_t i, n;
764   tree vec;
765 
766   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
767      chains available.  Return NULL so the caller can detect this case.  */
768   if (!recording_case_labels_p ())
769     return NULL;
770 
771 restart:
772   elt.e = e;
773   elt.case_labels = NULL;
774   slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
775 
776   if (slot)
777     {
778       elt_p = (struct edge_to_cases_elt *)*slot;
779       return elt_p->case_labels;
780     }
781 
782   /* If we did not find E in the hash table, then this must be the first
783      time we have been queried for information about E & T.  Add all the
784      elements from T to the hash table then perform the query again.  */
785 
786   vec = SWITCH_LABELS (t);
787   n = TREE_VEC_LENGTH (vec);
788   for (i = 0; i < n; i++)
789     {
790       tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
791       basic_block label_bb = label_to_block (lab);
792       record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
793     }
794   goto restart;
795 }
796 
797 /* Create the edges for a SWITCH_EXPR starting at block BB.
798    At this point, the switch body has been lowered and the
799    SWITCH_LABELS filled in, so this is in effect a multi-way branch.  */
800 
801 static void
make_switch_expr_edges(basic_block bb)802 make_switch_expr_edges (basic_block bb)
803 {
804   tree entry = last_stmt (bb);
805   size_t i, n;
806   tree vec;
807 
808   vec = SWITCH_LABELS (entry);
809   n = TREE_VEC_LENGTH (vec);
810 
811   for (i = 0; i < n; ++i)
812     {
813       tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
814       basic_block label_bb = label_to_block (lab);
815       make_edge (bb, label_bb, 0);
816     }
817 }
818 
819 
820 /* Return the basic block holding label DEST.  */
821 
822 basic_block
label_to_block_fn(struct function * ifun,tree dest)823 label_to_block_fn (struct function *ifun, tree dest)
824 {
825   int uid = LABEL_DECL_UID (dest);
826 
827   /* We would die hard when faced by an undefined label.  Emit a label to
828      the very first basic block.  This will hopefully make even the dataflow
829      and undefined variable warnings quite right.  */
830   if ((errorcount || sorrycount) && uid < 0)
831     {
832       block_stmt_iterator bsi =
833 	bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
834       tree stmt;
835 
836       stmt = build1 (LABEL_EXPR, void_type_node, dest);
837       bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
838       uid = LABEL_DECL_UID (dest);
839     }
840   if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
841       <= (unsigned int) uid)
842     return NULL;
843   return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
844 }
845 
846 /* Create edges for an abnormal goto statement at block BB.  If FOR_CALL
847    is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR.  */
848 
849 void
make_abnormal_goto_edges(basic_block bb,bool for_call)850 make_abnormal_goto_edges (basic_block bb, bool for_call)
851 {
852   basic_block target_bb;
853   block_stmt_iterator bsi;
854 
855   FOR_EACH_BB (target_bb)
856     for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
857       {
858 	tree target = bsi_stmt (bsi);
859 
860 	if (TREE_CODE (target) != LABEL_EXPR)
861 	  break;
862 
863 	target = LABEL_EXPR_LABEL (target);
864 
865 	/* Make an edge to every label block that has been marked as a
866 	   potential target for a computed goto or a non-local goto.  */
867 	if ((FORCED_LABEL (target) && !for_call)
868 	    || (DECL_NONLOCAL (target) && for_call))
869 	  {
870 	    make_edge (bb, target_bb, EDGE_ABNORMAL);
871 	    break;
872 	  }
873       }
874 }
875 
876 /* Create edges for a goto statement at block BB.  */
877 
878 static void
make_goto_expr_edges(basic_block bb)879 make_goto_expr_edges (basic_block bb)
880 {
881   block_stmt_iterator last = bsi_last (bb);
882   tree goto_t = bsi_stmt (last);
883 
884   /* A simple GOTO creates normal edges.  */
885   if (simple_goto_p (goto_t))
886     {
887       tree dest = GOTO_DESTINATION (goto_t);
888       edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
889 #ifdef USE_MAPPED_LOCATION
890       e->goto_locus = EXPR_LOCATION (goto_t);
891 #else
892       e->goto_locus = EXPR_LOCUS (goto_t);
893 #endif
894       bsi_remove (&last, true);
895       return;
896     }
897 
898   /* A computed GOTO creates abnormal edges.  */
899   make_abnormal_goto_edges (bb, false);
900 }
901 
902 
903 /*---------------------------------------------------------------------------
904 			       Flowgraph analysis
905 ---------------------------------------------------------------------------*/
906 
907 /* Cleanup useless labels in basic blocks.  This is something we wish
908    to do early because it allows us to group case labels before creating
909    the edges for the CFG, and it speeds up block statement iterators in
910    all passes later on.
911    We only run this pass once, running it more than once is probably not
912    profitable.  */
913 
914 /* A map from basic block index to the leading label of that block.  */
915 static tree *label_for_bb;
916 
917 /* Callback for for_each_eh_region.  Helper for cleanup_dead_labels.  */
918 static void
update_eh_label(struct eh_region * region)919 update_eh_label (struct eh_region *region)
920 {
921   tree old_label = get_eh_region_tree_label (region);
922   if (old_label)
923     {
924       tree new_label;
925       basic_block bb = label_to_block (old_label);
926 
927       /* ??? After optimizing, there may be EH regions with labels
928 	 that have already been removed from the function body, so
929 	 there is no basic block for them.  */
930       if (! bb)
931 	return;
932 
933       new_label = label_for_bb[bb->index];
934       set_eh_region_tree_label (region, new_label);
935     }
936 }
937 
938 /* Given LABEL return the first label in the same basic block.  */
939 static tree
main_block_label(tree label)940 main_block_label (tree label)
941 {
942   basic_block bb = label_to_block (label);
943 
944   /* label_to_block possibly inserted undefined label into the chain.  */
945   if (!label_for_bb[bb->index])
946     label_for_bb[bb->index] = label;
947   return label_for_bb[bb->index];
948 }
949 
950 /* Cleanup redundant labels.  This is a three-step process:
951      1) Find the leading label for each block.
952      2) Redirect all references to labels to the leading labels.
953      3) Cleanup all useless labels.  */
954 
955 void
cleanup_dead_labels(void)956 cleanup_dead_labels (void)
957 {
958   basic_block bb;
959   label_for_bb = XCNEWVEC (tree, last_basic_block);
960 
961   /* Find a suitable label for each block.  We use the first user-defined
962      label if there is one, or otherwise just the first label we see.  */
963   FOR_EACH_BB (bb)
964     {
965       block_stmt_iterator i;
966 
967       for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
968 	{
969 	  tree label, stmt = bsi_stmt (i);
970 
971 	  if (TREE_CODE (stmt) != LABEL_EXPR)
972 	    break;
973 
974 	  label = LABEL_EXPR_LABEL (stmt);
975 
976 	  /* If we have not yet seen a label for the current block,
977 	     remember this one and see if there are more labels.  */
978 	  if (! label_for_bb[bb->index])
979 	    {
980 	      label_for_bb[bb->index] = label;
981 	      continue;
982 	    }
983 
984 	  /* If we did see a label for the current block already, but it
985 	     is an artificially created label, replace it if the current
986 	     label is a user defined label.  */
987 	  if (! DECL_ARTIFICIAL (label)
988 	      && DECL_ARTIFICIAL (label_for_bb[bb->index]))
989 	    {
990 	      label_for_bb[bb->index] = label;
991 	      break;
992 	    }
993 	}
994     }
995 
996   /* Now redirect all jumps/branches to the selected label.
997      First do so for each block ending in a control statement.  */
998   FOR_EACH_BB (bb)
999     {
1000       tree stmt = last_stmt (bb);
1001       if (!stmt)
1002 	continue;
1003 
1004       switch (TREE_CODE (stmt))
1005 	{
1006 	case COND_EXPR:
1007 	  {
1008 	    tree true_branch, false_branch;
1009 
1010 	    true_branch = COND_EXPR_THEN (stmt);
1011 	    false_branch = COND_EXPR_ELSE (stmt);
1012 
1013 	    GOTO_DESTINATION (true_branch)
1014 	      = main_block_label (GOTO_DESTINATION (true_branch));
1015 	    GOTO_DESTINATION (false_branch)
1016 	      = main_block_label (GOTO_DESTINATION (false_branch));
1017 
1018 	    break;
1019 	  }
1020 
1021 	case SWITCH_EXPR:
1022 	  {
1023 	    size_t i;
1024 	    tree vec = SWITCH_LABELS (stmt);
1025 	    size_t n = TREE_VEC_LENGTH (vec);
1026 
1027 	    /* Replace all destination labels.  */
1028 	    for (i = 0; i < n; ++i)
1029 	      {
1030 		tree elt = TREE_VEC_ELT (vec, i);
1031 		tree label = main_block_label (CASE_LABEL (elt));
1032 		CASE_LABEL (elt) = label;
1033 	      }
1034 	    break;
1035 	  }
1036 
1037 	/* We have to handle GOTO_EXPRs until they're removed, and we don't
1038 	   remove them until after we've created the CFG edges.  */
1039 	case GOTO_EXPR:
1040           if (! computed_goto_p (stmt))
1041 	    {
1042 	      GOTO_DESTINATION (stmt)
1043 		= main_block_label (GOTO_DESTINATION (stmt));
1044 	      break;
1045 	    }
1046 
1047 	default:
1048 	  break;
1049       }
1050     }
1051 
1052   for_each_eh_region (update_eh_label);
1053 
1054   /* Finally, purge dead labels.  All user-defined labels and labels that
1055      can be the target of non-local gotos and labels which have their
1056      address taken are preserved.  */
1057   FOR_EACH_BB (bb)
1058     {
1059       block_stmt_iterator i;
1060       tree label_for_this_bb = label_for_bb[bb->index];
1061 
1062       if (! label_for_this_bb)
1063 	continue;
1064 
1065       for (i = bsi_start (bb); !bsi_end_p (i); )
1066 	{
1067 	  tree label, stmt = bsi_stmt (i);
1068 
1069 	  if (TREE_CODE (stmt) != LABEL_EXPR)
1070 	    break;
1071 
1072 	  label = LABEL_EXPR_LABEL (stmt);
1073 
1074 	  if (label == label_for_this_bb
1075 	      || ! DECL_ARTIFICIAL (label)
1076 	      || DECL_NONLOCAL (label)
1077 	      || FORCED_LABEL (label))
1078 	    bsi_next (&i);
1079 	  else
1080 	    bsi_remove (&i, true);
1081 	}
1082     }
1083 
1084   free (label_for_bb);
1085 }
1086 
1087 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1088    and scan the sorted vector of cases.  Combine the ones jumping to the
1089    same label.
1090    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1091 
1092 void
group_case_labels(void)1093 group_case_labels (void)
1094 {
1095   basic_block bb;
1096 
1097   FOR_EACH_BB (bb)
1098     {
1099       tree stmt = last_stmt (bb);
1100       if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1101 	{
1102 	  tree labels = SWITCH_LABELS (stmt);
1103 	  int old_size = TREE_VEC_LENGTH (labels);
1104 	  int i, j, new_size = old_size;
1105 	  tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1106 	  tree default_label;
1107 
1108 	  /* The default label is always the last case in a switch
1109 	     statement after gimplification.  */
1110 	  default_label = CASE_LABEL (default_case);
1111 
1112 	  /* Look for possible opportunities to merge cases.
1113 	     Ignore the last element of the label vector because it
1114 	     must be the default case.  */
1115           i = 0;
1116 	  while (i < old_size - 1)
1117 	    {
1118 	      tree base_case, base_label, base_high;
1119 	      base_case = TREE_VEC_ELT (labels, i);
1120 
1121 	      gcc_assert (base_case);
1122 	      base_label = CASE_LABEL (base_case);
1123 
1124 	      /* Discard cases that have the same destination as the
1125 		 default case.  */
1126 	      if (base_label == default_label)
1127 		{
1128 		  TREE_VEC_ELT (labels, i) = NULL_TREE;
1129 		  i++;
1130 		  new_size--;
1131 		  continue;
1132 		}
1133 
1134 	      base_high = CASE_HIGH (base_case) ?
1135 		CASE_HIGH (base_case) : CASE_LOW (base_case);
1136 	      i++;
1137 	      /* Try to merge case labels.  Break out when we reach the end
1138 		 of the label vector or when we cannot merge the next case
1139 		 label with the current one.  */
1140 	      while (i < old_size - 1)
1141 		{
1142 		  tree merge_case = TREE_VEC_ELT (labels, i);
1143 	          tree merge_label = CASE_LABEL (merge_case);
1144 		  tree t = int_const_binop (PLUS_EXPR, base_high,
1145 					    integer_one_node, 1);
1146 
1147 		  /* Merge the cases if they jump to the same place,
1148 		     and their ranges are consecutive.  */
1149 		  if (merge_label == base_label
1150 		      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1151 		    {
1152 		      base_high = CASE_HIGH (merge_case) ?
1153 			CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1154 		      CASE_HIGH (base_case) = base_high;
1155 		      TREE_VEC_ELT (labels, i) = NULL_TREE;
1156 		      new_size--;
1157 		      i++;
1158 		    }
1159 		  else
1160 		    break;
1161 		}
1162 	    }
1163 
1164 	  /* Compress the case labels in the label vector, and adjust the
1165 	     length of the vector.  */
1166 	  for (i = 0, j = 0; i < new_size; i++)
1167 	    {
1168 	      while (! TREE_VEC_ELT (labels, j))
1169 		j++;
1170 	      TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1171 	    }
1172 	  TREE_VEC_LENGTH (labels) = new_size;
1173 	}
1174     }
1175 }
1176 
1177 /* Checks whether we can merge block B into block A.  */
1178 
1179 static bool
tree_can_merge_blocks_p(basic_block a,basic_block b)1180 tree_can_merge_blocks_p (basic_block a, basic_block b)
1181 {
1182   tree stmt;
1183   block_stmt_iterator bsi;
1184   tree phi;
1185 
1186   if (!single_succ_p (a))
1187     return false;
1188 
1189   if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1190     return false;
1191 
1192   if (single_succ (a) != b)
1193     return false;
1194 
1195   if (!single_pred_p (b))
1196     return false;
1197 
1198   if (b == EXIT_BLOCK_PTR)
1199     return false;
1200 
1201   /* If A ends by a statement causing exceptions or something similar, we
1202      cannot merge the blocks.  */
1203   stmt = last_stmt (a);
1204   if (stmt && stmt_ends_bb_p (stmt))
1205     return false;
1206 
1207   /* Do not allow a block with only a non-local label to be merged.  */
1208   if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1209       && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1210     return false;
1211 
1212   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1213      is not up-to-date, we cannot eliminate any phis.  */
1214   phi = phi_nodes (b);
1215   if (phi)
1216     {
1217       if (need_ssa_update_p ())
1218 	return false;
1219 
1220       for (; phi; phi = PHI_CHAIN (phi))
1221 	if (!is_gimple_reg (PHI_RESULT (phi))
1222 	    && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1223 	  return false;
1224     }
1225 
1226   /* Do not remove user labels.  */
1227   for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1228     {
1229       stmt = bsi_stmt (bsi);
1230       if (TREE_CODE (stmt) != LABEL_EXPR)
1231 	break;
1232       if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1233 	return false;
1234     }
1235 
1236   /* Protect the loop latches.  */
1237   if (current_loops
1238       && b->loop_father->latch == b)
1239     return false;
1240 
1241   return true;
1242 }
1243 
1244 /* Replaces all uses of NAME by VAL.  */
1245 
1246 void
replace_uses_by(tree name,tree val)1247 replace_uses_by (tree name, tree val)
1248 {
1249   imm_use_iterator imm_iter;
1250   use_operand_p use;
1251   tree stmt;
1252   edge e;
1253   unsigned i;
1254 
1255 
1256   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1257     {
1258       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1259         {
1260 	  replace_exp (use, val);
1261 
1262 	  if (TREE_CODE (stmt) == PHI_NODE)
1263 	    {
1264 	      e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1265 	      if (e->flags & EDGE_ABNORMAL)
1266 		{
1267 		  /* This can only occur for virtual operands, since
1268 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1269 		     would prevent replacement.  */
1270 		  gcc_assert (!is_gimple_reg (name));
1271 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1272 		}
1273 	    }
1274 	}
1275       if (TREE_CODE (stmt) != PHI_NODE)
1276 	{
1277 	  tree rhs;
1278 
1279 	  fold_stmt_inplace (stmt);
1280 	  rhs = get_rhs (stmt);
1281 	  if (TREE_CODE (rhs) == ADDR_EXPR)
1282 	    recompute_tree_invariant_for_addr_expr (rhs);
1283 
1284 	  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1285 	  mark_new_vars_to_rename (stmt);
1286 	}
1287     }
1288 
1289   gcc_assert (num_imm_uses (name) == 0);
1290 
1291   /* Also update the trees stored in loop structures.  */
1292   if (current_loops)
1293     {
1294       struct loop *loop;
1295 
1296       for (i = 0; i < current_loops->num; i++)
1297 	{
1298 	  loop = current_loops->parray[i];
1299 	  if (loop)
1300 	    substitute_in_loop_info (loop, name, val);
1301 	}
1302     }
1303 }
1304 
1305 /* Merge block B into block A.  */
1306 
1307 static void
tree_merge_blocks(basic_block a,basic_block b)1308 tree_merge_blocks (basic_block a, basic_block b)
1309 {
1310   block_stmt_iterator bsi;
1311   tree_stmt_iterator last;
1312   tree phi;
1313 
1314   if (dump_file)
1315     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1316 
1317   /* Remove all single-valued PHI nodes from block B of the form
1318      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1319   bsi = bsi_last (a);
1320   for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1321     {
1322       tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1323       tree copy;
1324       bool may_replace_uses = may_propagate_copy (def, use);
1325 
1326       /* In case we have loops to care about, do not propagate arguments of
1327 	 loop closed ssa phi nodes.  */
1328       if (current_loops
1329 	  && is_gimple_reg (def)
1330 	  && TREE_CODE (use) == SSA_NAME
1331 	  && a->loop_father != b->loop_father)
1332 	may_replace_uses = false;
1333 
1334       if (!may_replace_uses)
1335 	{
1336 	  gcc_assert (is_gimple_reg (def));
1337 
1338 	  /* Note that just emitting the copies is fine -- there is no problem
1339 	     with ordering of phi nodes.  This is because A is the single
1340 	     predecessor of B, therefore results of the phi nodes cannot
1341 	     appear as arguments of the phi nodes.  */
1342 	  copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1343 	  bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1344 	  SET_PHI_RESULT (phi, NULL_TREE);
1345 	  SSA_NAME_DEF_STMT (def) = copy;
1346 	}
1347       else
1348 	replace_uses_by (def, use);
1349 
1350       remove_phi_node (phi, NULL);
1351     }
1352 
1353   /* Ensure that B follows A.  */
1354   move_block_after (b, a);
1355 
1356   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1357   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1358 
1359   /* Remove labels from B and set bb_for_stmt to A for other statements.  */
1360   for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1361     {
1362       if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1363 	{
1364 	  tree label = bsi_stmt (bsi);
1365 
1366 	  bsi_remove (&bsi, false);
1367 	  /* Now that we can thread computed gotos, we might have
1368 	     a situation where we have a forced label in block B
1369 	     However, the label at the start of block B might still be
1370 	     used in other ways (think about the runtime checking for
1371 	     Fortran assigned gotos).  So we can not just delete the
1372 	     label.  Instead we move the label to the start of block A.  */
1373 	  if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1374 	    {
1375 	      block_stmt_iterator dest_bsi = bsi_start (a);
1376 	      bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1377 	    }
1378 	}
1379       else
1380 	{
1381 	  change_bb_for_stmt (bsi_stmt (bsi), a);
1382 	  bsi_next (&bsi);
1383 	}
1384     }
1385 
1386   /* Merge the chains.  */
1387   last = tsi_last (a->stmt_list);
1388   tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1389   b->stmt_list = NULL;
1390 }
1391 
1392 
1393 /* Return the one of two successors of BB that is not reachable by a
1394    reached by a complex edge, if there is one.  Else, return BB.  We use
1395    this in optimizations that use post-dominators for their heuristics,
1396    to catch the cases in C++ where function calls are involved.  */
1397 
1398 basic_block
single_noncomplex_succ(basic_block bb)1399 single_noncomplex_succ (basic_block bb)
1400 {
1401   edge e0, e1;
1402   if (EDGE_COUNT (bb->succs) != 2)
1403     return bb;
1404 
1405   e0 = EDGE_SUCC (bb, 0);
1406   e1 = EDGE_SUCC (bb, 1);
1407   if (e0->flags & EDGE_COMPLEX)
1408     return e1->dest;
1409   if (e1->flags & EDGE_COMPLEX)
1410     return e0->dest;
1411 
1412   return bb;
1413 }
1414 
1415 
1416 /* Walk the function tree removing unnecessary statements.
1417 
1418      * Empty statement nodes are removed
1419 
1420      * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1421 
1422      * Unnecessary COND_EXPRs are removed
1423 
1424      * Some unnecessary BIND_EXPRs are removed
1425 
1426    Clearly more work could be done.  The trick is doing the analysis
1427    and removal fast enough to be a net improvement in compile times.
1428 
1429    Note that when we remove a control structure such as a COND_EXPR
1430    BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1431    to ensure we eliminate all the useless code.  */
1432 
1433 struct rus_data
1434 {
1435   tree *last_goto;
1436   bool repeat;
1437   bool may_throw;
1438   bool may_branch;
1439   bool has_label;
1440 };
1441 
1442 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1443 
1444 static bool
remove_useless_stmts_warn_notreached(tree stmt)1445 remove_useless_stmts_warn_notreached (tree stmt)
1446 {
1447   if (EXPR_HAS_LOCATION (stmt))
1448     {
1449       location_t loc = EXPR_LOCATION (stmt);
1450       if (LOCATION_LINE (loc) > 0)
1451 	{
1452 	  warning (0, "%Hwill never be executed", &loc);
1453 	  return true;
1454 	}
1455     }
1456 
1457   switch (TREE_CODE (stmt))
1458     {
1459     case STATEMENT_LIST:
1460       {
1461 	tree_stmt_iterator i;
1462 	for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1463 	  if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1464 	    return true;
1465       }
1466       break;
1467 
1468     case COND_EXPR:
1469       if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1470 	return true;
1471       if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1472 	return true;
1473       if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1474 	return true;
1475       break;
1476 
1477     case TRY_FINALLY_EXPR:
1478     case TRY_CATCH_EXPR:
1479       if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1480 	return true;
1481       if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1482 	return true;
1483       break;
1484 
1485     case CATCH_EXPR:
1486       return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1487     case EH_FILTER_EXPR:
1488       return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1489     case BIND_EXPR:
1490       return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1491 
1492     default:
1493       /* Not a live container.  */
1494       break;
1495     }
1496 
1497   return false;
1498 }
1499 
1500 static void
remove_useless_stmts_cond(tree * stmt_p,struct rus_data * data)1501 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1502 {
1503   tree then_clause, else_clause, cond;
1504   bool save_has_label, then_has_label, else_has_label;
1505 
1506   save_has_label = data->has_label;
1507   data->has_label = false;
1508   data->last_goto = NULL;
1509 
1510   remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1511 
1512   then_has_label = data->has_label;
1513   data->has_label = false;
1514   data->last_goto = NULL;
1515 
1516   remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1517 
1518   else_has_label = data->has_label;
1519   data->has_label = save_has_label | then_has_label | else_has_label;
1520 
1521   then_clause = COND_EXPR_THEN (*stmt_p);
1522   else_clause = COND_EXPR_ELSE (*stmt_p);
1523   cond = fold (COND_EXPR_COND (*stmt_p));
1524 
1525   /* If neither arm does anything at all, we can remove the whole IF.  */
1526   if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1527     {
1528       *stmt_p = build_empty_stmt ();
1529       data->repeat = true;
1530     }
1531 
1532   /* If there are no reachable statements in an arm, then we can
1533      zap the entire conditional.  */
1534   else if (integer_nonzerop (cond) && !else_has_label)
1535     {
1536       if (warn_notreached)
1537 	remove_useless_stmts_warn_notreached (else_clause);
1538       *stmt_p = then_clause;
1539       data->repeat = true;
1540     }
1541   else if (integer_zerop (cond) && !then_has_label)
1542     {
1543       if (warn_notreached)
1544 	remove_useless_stmts_warn_notreached (then_clause);
1545       *stmt_p = else_clause;
1546       data->repeat = true;
1547     }
1548 
1549   /* Check a couple of simple things on then/else with single stmts.  */
1550   else
1551     {
1552       tree then_stmt = expr_only (then_clause);
1553       tree else_stmt = expr_only (else_clause);
1554 
1555       /* Notice branches to a common destination.  */
1556       if (then_stmt && else_stmt
1557 	  && TREE_CODE (then_stmt) == GOTO_EXPR
1558 	  && TREE_CODE (else_stmt) == GOTO_EXPR
1559 	  && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1560 	{
1561 	  *stmt_p = then_stmt;
1562 	  data->repeat = true;
1563 	}
1564 
1565       /* If the THEN/ELSE clause merely assigns a value to a variable or
1566 	 parameter which is already known to contain that value, then
1567 	 remove the useless THEN/ELSE clause.  */
1568       else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1569 	{
1570 	  if (else_stmt
1571 	      && TREE_CODE (else_stmt) == MODIFY_EXPR
1572 	      && TREE_OPERAND (else_stmt, 0) == cond
1573 	      && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1574 	    COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1575 	}
1576       else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1577 	       && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1578 		   || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1579 	       && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1580 	{
1581 	  tree stmt = (TREE_CODE (cond) == EQ_EXPR
1582 		       ? then_stmt : else_stmt);
1583 	  tree *location = (TREE_CODE (cond) == EQ_EXPR
1584 			    ? &COND_EXPR_THEN (*stmt_p)
1585 			    : &COND_EXPR_ELSE (*stmt_p));
1586 
1587 	  if (stmt
1588 	      && TREE_CODE (stmt) == MODIFY_EXPR
1589 	      && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1590 	      && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1591 	    *location = alloc_stmt_list ();
1592 	}
1593     }
1594 
1595   /* Protect GOTOs in the arm of COND_EXPRs from being removed.  They
1596      would be re-introduced during lowering.  */
1597   data->last_goto = NULL;
1598 }
1599 
1600 
1601 static void
remove_useless_stmts_tf(tree * stmt_p,struct rus_data * data)1602 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1603 {
1604   bool save_may_branch, save_may_throw;
1605   bool this_may_branch, this_may_throw;
1606 
1607   /* Collect may_branch and may_throw information for the body only.  */
1608   save_may_branch = data->may_branch;
1609   save_may_throw = data->may_throw;
1610   data->may_branch = false;
1611   data->may_throw = false;
1612   data->last_goto = NULL;
1613 
1614   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1615 
1616   this_may_branch = data->may_branch;
1617   this_may_throw = data->may_throw;
1618   data->may_branch |= save_may_branch;
1619   data->may_throw |= save_may_throw;
1620   data->last_goto = NULL;
1621 
1622   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1623 
1624   /* If the body is empty, then we can emit the FINALLY block without
1625      the enclosing TRY_FINALLY_EXPR.  */
1626   if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1627     {
1628       *stmt_p = TREE_OPERAND (*stmt_p, 1);
1629       data->repeat = true;
1630     }
1631 
1632   /* If the handler is empty, then we can emit the TRY block without
1633      the enclosing TRY_FINALLY_EXPR.  */
1634   else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1635     {
1636       *stmt_p = TREE_OPERAND (*stmt_p, 0);
1637       data->repeat = true;
1638     }
1639 
1640   /* If the body neither throws, nor branches, then we can safely
1641      string the TRY and FINALLY blocks together.  */
1642   else if (!this_may_branch && !this_may_throw)
1643     {
1644       tree stmt = *stmt_p;
1645       *stmt_p = TREE_OPERAND (stmt, 0);
1646       append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1647       data->repeat = true;
1648     }
1649 }
1650 
1651 
1652 static void
remove_useless_stmts_tc(tree * stmt_p,struct rus_data * data)1653 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1654 {
1655   bool save_may_throw, this_may_throw;
1656   tree_stmt_iterator i;
1657   tree stmt;
1658 
1659   /* Collect may_throw information for the body only.  */
1660   save_may_throw = data->may_throw;
1661   data->may_throw = false;
1662   data->last_goto = NULL;
1663 
1664   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1665 
1666   this_may_throw = data->may_throw;
1667   data->may_throw = save_may_throw;
1668 
1669   /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR.  */
1670   if (!this_may_throw)
1671     {
1672       if (warn_notreached)
1673 	remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1674       *stmt_p = TREE_OPERAND (*stmt_p, 0);
1675       data->repeat = true;
1676       return;
1677     }
1678 
1679   /* Process the catch clause specially.  We may be able to tell that
1680      no exceptions propagate past this point.  */
1681 
1682   this_may_throw = true;
1683   i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1684   stmt = tsi_stmt (i);
1685   data->last_goto = NULL;
1686 
1687   switch (TREE_CODE (stmt))
1688     {
1689     case CATCH_EXPR:
1690       for (; !tsi_end_p (i); tsi_next (&i))
1691 	{
1692 	  stmt = tsi_stmt (i);
1693 	  /* If we catch all exceptions, then the body does not
1694 	     propagate exceptions past this point.  */
1695 	  if (CATCH_TYPES (stmt) == NULL)
1696 	    this_may_throw = false;
1697 	  data->last_goto = NULL;
1698 	  remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1699 	}
1700       break;
1701 
1702     case EH_FILTER_EXPR:
1703       if (EH_FILTER_MUST_NOT_THROW (stmt))
1704 	this_may_throw = false;
1705       else if (EH_FILTER_TYPES (stmt) == NULL)
1706 	this_may_throw = false;
1707       remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1708       break;
1709 
1710     default:
1711       /* Otherwise this is a cleanup.  */
1712       remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1713 
1714       /* If the cleanup is empty, then we can emit the TRY block without
1715 	 the enclosing TRY_CATCH_EXPR.  */
1716       if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1717 	{
1718 	  *stmt_p = TREE_OPERAND (*stmt_p, 0);
1719 	  data->repeat = true;
1720 	}
1721       break;
1722     }
1723   data->may_throw |= this_may_throw;
1724 }
1725 
1726 
1727 static void
remove_useless_stmts_bind(tree * stmt_p,struct rus_data * data)1728 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1729 {
1730   tree block;
1731 
1732   /* First remove anything underneath the BIND_EXPR.  */
1733   remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1734 
1735   /* If the BIND_EXPR has no variables, then we can pull everything
1736      up one level and remove the BIND_EXPR, unless this is the toplevel
1737      BIND_EXPR for the current function or an inlined function.
1738 
1739      When this situation occurs we will want to apply this
1740      optimization again.  */
1741   block = BIND_EXPR_BLOCK (*stmt_p);
1742   if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1743       && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1744       && (! block
1745 	  || ! BLOCK_ABSTRACT_ORIGIN (block)
1746 	  || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1747 	      != FUNCTION_DECL)))
1748     {
1749       *stmt_p = BIND_EXPR_BODY (*stmt_p);
1750       data->repeat = true;
1751     }
1752 }
1753 
1754 
1755 static void
remove_useless_stmts_goto(tree * stmt_p,struct rus_data * data)1756 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1757 {
1758   tree dest = GOTO_DESTINATION (*stmt_p);
1759 
1760   data->may_branch = true;
1761   data->last_goto = NULL;
1762 
1763   /* Record the last goto expr, so that we can delete it if unnecessary.  */
1764   if (TREE_CODE (dest) == LABEL_DECL)
1765     data->last_goto = stmt_p;
1766 }
1767 
1768 
1769 static void
remove_useless_stmts_label(tree * stmt_p,struct rus_data * data)1770 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1771 {
1772   tree label = LABEL_EXPR_LABEL (*stmt_p);
1773 
1774   data->has_label = true;
1775 
1776   /* We do want to jump across non-local label receiver code.  */
1777   if (DECL_NONLOCAL (label))
1778     data->last_goto = NULL;
1779 
1780   else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1781     {
1782       *data->last_goto = build_empty_stmt ();
1783       data->repeat = true;
1784     }
1785 
1786   /* ??? Add something here to delete unused labels.  */
1787 }
1788 
1789 
1790 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1791    decl.  This allows us to eliminate redundant or useless
1792    calls to "const" functions.
1793 
1794    Gimplifier already does the same operation, but we may notice functions
1795    being const and pure once their calls has been gimplified, so we need
1796    to update the flag.  */
1797 
1798 static void
update_call_expr_flags(tree call)1799 update_call_expr_flags (tree call)
1800 {
1801   tree decl = get_callee_fndecl (call);
1802   if (!decl)
1803     return;
1804   if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1805     TREE_SIDE_EFFECTS (call) = 0;
1806   if (TREE_NOTHROW (decl))
1807     TREE_NOTHROW (call) = 1;
1808 }
1809 
1810 
1811 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1812 
1813 void
notice_special_calls(tree t)1814 notice_special_calls (tree t)
1815 {
1816   int flags = call_expr_flags (t);
1817 
1818   if (flags & ECF_MAY_BE_ALLOCA)
1819     current_function_calls_alloca = true;
1820   if (flags & ECF_RETURNS_TWICE)
1821     current_function_calls_setjmp = true;
1822 }
1823 
1824 
1825 /* Clear flags set by notice_special_calls.  Used by dead code removal
1826    to update the flags.  */
1827 
1828 void
clear_special_calls(void)1829 clear_special_calls (void)
1830 {
1831   current_function_calls_alloca = false;
1832   current_function_calls_setjmp = false;
1833 }
1834 
1835 
1836 static void
remove_useless_stmts_1(tree * tp,struct rus_data * data)1837 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1838 {
1839   tree t = *tp, op;
1840 
1841   switch (TREE_CODE (t))
1842     {
1843     case COND_EXPR:
1844       remove_useless_stmts_cond (tp, data);
1845       break;
1846 
1847     case TRY_FINALLY_EXPR:
1848       remove_useless_stmts_tf (tp, data);
1849       break;
1850 
1851     case TRY_CATCH_EXPR:
1852       remove_useless_stmts_tc (tp, data);
1853       break;
1854 
1855     case BIND_EXPR:
1856       remove_useless_stmts_bind (tp, data);
1857       break;
1858 
1859     case GOTO_EXPR:
1860       remove_useless_stmts_goto (tp, data);
1861       break;
1862 
1863     case LABEL_EXPR:
1864       remove_useless_stmts_label (tp, data);
1865       break;
1866 
1867     case RETURN_EXPR:
1868       fold_stmt (tp);
1869       data->last_goto = NULL;
1870       data->may_branch = true;
1871       break;
1872 
1873     case CALL_EXPR:
1874       fold_stmt (tp);
1875       data->last_goto = NULL;
1876       notice_special_calls (t);
1877       update_call_expr_flags (t);
1878       if (tree_could_throw_p (t))
1879 	data->may_throw = true;
1880       break;
1881 
1882     case MODIFY_EXPR:
1883       data->last_goto = NULL;
1884       fold_stmt (tp);
1885       op = get_call_expr_in (t);
1886       if (op)
1887 	{
1888 	  update_call_expr_flags (op);
1889 	  notice_special_calls (op);
1890 	}
1891       if (tree_could_throw_p (t))
1892 	data->may_throw = true;
1893       break;
1894 
1895     case STATEMENT_LIST:
1896       {
1897 	tree_stmt_iterator i = tsi_start (t);
1898 	while (!tsi_end_p (i))
1899 	  {
1900 	    t = tsi_stmt (i);
1901 	    if (IS_EMPTY_STMT (t))
1902 	      {
1903 		tsi_delink (&i);
1904 		continue;
1905 	      }
1906 
1907 	    remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1908 
1909 	    t = tsi_stmt (i);
1910 	    if (TREE_CODE (t) == STATEMENT_LIST)
1911 	      {
1912 		tsi_link_before (&i, t, TSI_SAME_STMT);
1913 		tsi_delink (&i);
1914 	      }
1915 	    else
1916 	      tsi_next (&i);
1917 	  }
1918       }
1919       break;
1920     case ASM_EXPR:
1921       fold_stmt (tp);
1922       data->last_goto = NULL;
1923       break;
1924 
1925     default:
1926       data->last_goto = NULL;
1927       break;
1928     }
1929 }
1930 
1931 static unsigned int
remove_useless_stmts(void)1932 remove_useless_stmts (void)
1933 {
1934   struct rus_data data;
1935 
1936   clear_special_calls ();
1937 
1938   do
1939     {
1940       memset (&data, 0, sizeof (data));
1941       remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1942     }
1943   while (data.repeat);
1944   return 0;
1945 }
1946 
1947 
1948 struct tree_opt_pass pass_remove_useless_stmts =
1949 {
1950   "useless",				/* name */
1951   NULL,					/* gate */
1952   remove_useless_stmts,			/* execute */
1953   NULL,					/* sub */
1954   NULL,					/* next */
1955   0,					/* static_pass_number */
1956   0,					/* tv_id */
1957   PROP_gimple_any,			/* properties_required */
1958   0,					/* properties_provided */
1959   0,					/* properties_destroyed */
1960   0,					/* todo_flags_start */
1961   TODO_dump_func,			/* todo_flags_finish */
1962   0					/* letter */
1963 };
1964 
1965 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1966 
1967 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)1968 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1969 {
1970   tree phi;
1971 
1972   /* Since this block is no longer reachable, we can just delete all
1973      of its PHI nodes.  */
1974   phi = phi_nodes (bb);
1975   while (phi)
1976     {
1977       tree next = PHI_CHAIN (phi);
1978       remove_phi_node (phi, NULL_TREE);
1979       phi = next;
1980     }
1981 
1982   /* Remove edges to BB's successors.  */
1983   while (EDGE_COUNT (bb->succs) > 0)
1984     remove_edge (EDGE_SUCC (bb, 0));
1985 }
1986 
1987 
1988 /* Remove statements of basic block BB.  */
1989 
1990 static void
remove_bb(basic_block bb)1991 remove_bb (basic_block bb)
1992 {
1993   block_stmt_iterator i;
1994 #ifdef USE_MAPPED_LOCATION
1995   source_location loc = UNKNOWN_LOCATION;
1996 #else
1997   source_locus loc = 0;
1998 #endif
1999 
2000   if (dump_file)
2001     {
2002       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2003       if (dump_flags & TDF_DETAILS)
2004 	{
2005 	  dump_bb (bb, dump_file, 0);
2006 	  fprintf (dump_file, "\n");
2007 	}
2008     }
2009 
2010   /* If we remove the header or the latch of a loop, mark the loop for
2011      removal by setting its header and latch to NULL.  */
2012   if (current_loops)
2013     {
2014       struct loop *loop = bb->loop_father;
2015 
2016       if (loop->latch == bb
2017 	  || loop->header == bb)
2018 	{
2019 	  loop->latch = NULL;
2020 	  loop->header = NULL;
2021 
2022 	  /* Also clean up the information associated with the loop.  Updating
2023 	     it would waste time. More importantly, it may refer to ssa
2024 	     names that were defined in other removed basic block -- these
2025 	     ssa names are now removed and invalid.  */
2026 	  free_numbers_of_iterations_estimates_loop (loop);
2027 	}
2028     }
2029 
2030   /* Remove all the instructions in the block.  */
2031   for (i = bsi_start (bb); !bsi_end_p (i);)
2032     {
2033       tree stmt = bsi_stmt (i);
2034       if (TREE_CODE (stmt) == LABEL_EXPR
2035           && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2036 	      || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2037 	{
2038 	  basic_block new_bb;
2039 	  block_stmt_iterator new_bsi;
2040 
2041 	  /* A non-reachable non-local label may still be referenced.
2042 	     But it no longer needs to carry the extra semantics of
2043 	     non-locality.  */
2044 	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2045 	    {
2046 	      DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2047 	      FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2048 	    }
2049 
2050 	  new_bb = bb->prev_bb;
2051 	  new_bsi = bsi_start (new_bb);
2052 	  bsi_remove (&i, false);
2053 	  bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2054 	}
2055       else
2056         {
2057 	  /* Release SSA definitions if we are in SSA.  Note that we
2058 	     may be called when not in SSA.  For example,
2059 	     final_cleanup calls this function via
2060 	     cleanup_tree_cfg.  */
2061 	  if (in_ssa_p)
2062 	    release_defs (stmt);
2063 
2064 	  bsi_remove (&i, true);
2065 	}
2066 
2067       /* Don't warn for removed gotos.  Gotos are often removed due to
2068 	 jump threading, thus resulting in bogus warnings.  Not great,
2069 	 since this way we lose warnings for gotos in the original
2070 	 program that are indeed unreachable.  */
2071       if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2072 	{
2073 #ifdef USE_MAPPED_LOCATION
2074 	  if (EXPR_HAS_LOCATION (stmt))
2075 	    loc = EXPR_LOCATION (stmt);
2076 #else
2077 	  source_locus t;
2078 	  t = EXPR_LOCUS (stmt);
2079 	  if (t && LOCATION_LINE (*t) > 0)
2080 	    loc = t;
2081 #endif
2082 	}
2083     }
2084 
2085   /* If requested, give a warning that the first statement in the
2086      block is unreachable.  We walk statements backwards in the
2087      loop above, so the last statement we process is the first statement
2088      in the block.  */
2089 #ifdef USE_MAPPED_LOCATION
2090   if (loc > BUILTINS_LOCATION)
2091     warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2092 #else
2093   if (loc)
2094     warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2095 #endif
2096 
2097   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2098 }
2099 
2100 
2101 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2102    predicate VAL, return the edge that will be taken out of the block.
2103    If VAL does not match a unique edge, NULL is returned.  */
2104 
2105 edge
find_taken_edge(basic_block bb,tree val)2106 find_taken_edge (basic_block bb, tree val)
2107 {
2108   tree stmt;
2109 
2110   stmt = last_stmt (bb);
2111 
2112   gcc_assert (stmt);
2113   gcc_assert (is_ctrl_stmt (stmt));
2114   gcc_assert (val);
2115 
2116   if (! is_gimple_min_invariant (val))
2117     return NULL;
2118 
2119   if (TREE_CODE (stmt) == COND_EXPR)
2120     return find_taken_edge_cond_expr (bb, val);
2121 
2122   if (TREE_CODE (stmt) == SWITCH_EXPR)
2123     return find_taken_edge_switch_expr (bb, val);
2124 
2125   if (computed_goto_p (stmt))
2126     {
2127       /* Only optimize if the argument is a label, if the argument is
2128 	 not a label then we can not construct a proper CFG.
2129 
2130          It may be the case that we only need to allow the LABEL_REF to
2131          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2132          appear inside a LABEL_EXPR just to be safe.  */
2133       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2134 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2135 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2136       return NULL;
2137     }
2138 
2139   gcc_unreachable ();
2140 }
2141 
2142 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2143    statement, determine which of the outgoing edges will be taken out of the
2144    block.  Return NULL if either edge may be taken.  */
2145 
2146 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2147 find_taken_edge_computed_goto (basic_block bb, tree val)
2148 {
2149   basic_block dest;
2150   edge e = NULL;
2151 
2152   dest = label_to_block (val);
2153   if (dest)
2154     {
2155       e = find_edge (bb, dest);
2156       gcc_assert (e != NULL);
2157     }
2158 
2159   return e;
2160 }
2161 
2162 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2163    statement, determine which of the two edges will be taken out of the
2164    block.  Return NULL if either edge may be taken.  */
2165 
2166 static edge
find_taken_edge_cond_expr(basic_block bb,tree val)2167 find_taken_edge_cond_expr (basic_block bb, tree val)
2168 {
2169   edge true_edge, false_edge;
2170 
2171   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2172 
2173   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2174   return (zero_p (val) ? false_edge : true_edge);
2175 }
2176 
2177 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2178    statement, determine which edge will be taken out of the block.  Return
2179    NULL if any edge may be taken.  */
2180 
2181 static edge
find_taken_edge_switch_expr(basic_block bb,tree val)2182 find_taken_edge_switch_expr (basic_block bb, tree val)
2183 {
2184   tree switch_expr, taken_case;
2185   basic_block dest_bb;
2186   edge e;
2187 
2188   switch_expr = last_stmt (bb);
2189   taken_case = find_case_label_for_value (switch_expr, val);
2190   dest_bb = label_to_block (CASE_LABEL (taken_case));
2191 
2192   e = find_edge (bb, dest_bb);
2193   gcc_assert (e);
2194   return e;
2195 }
2196 
2197 
2198 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2199    We can make optimal use here of the fact that the case labels are
2200    sorted: We can do a binary search for a case matching VAL.  */
2201 
2202 static tree
find_case_label_for_value(tree switch_expr,tree val)2203 find_case_label_for_value (tree switch_expr, tree val)
2204 {
2205   tree vec = SWITCH_LABELS (switch_expr);
2206   size_t low, high, n = TREE_VEC_LENGTH (vec);
2207   tree default_case = TREE_VEC_ELT (vec, n - 1);
2208 
2209   for (low = -1, high = n - 1; high - low > 1; )
2210     {
2211       size_t i = (high + low) / 2;
2212       tree t = TREE_VEC_ELT (vec, i);
2213       int cmp;
2214 
2215       /* Cache the result of comparing CASE_LOW and val.  */
2216       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2217 
2218       if (cmp > 0)
2219 	high = i;
2220       else
2221 	low = i;
2222 
2223       if (CASE_HIGH (t) == NULL)
2224 	{
2225 	  /* A singe-valued case label.  */
2226 	  if (cmp == 0)
2227 	    return t;
2228 	}
2229       else
2230 	{
2231 	  /* A case range.  We can only handle integer ranges.  */
2232 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2233 	    return t;
2234 	}
2235     }
2236 
2237   return default_case;
2238 }
2239 
2240 
2241 
2242 
2243 /*---------------------------------------------------------------------------
2244 			      Debugging functions
2245 ---------------------------------------------------------------------------*/
2246 
2247 /* Dump tree-specific information of block BB to file OUTF.  */
2248 
2249 void
tree_dump_bb(basic_block bb,FILE * outf,int indent)2250 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2251 {
2252   dump_generic_bb (outf, bb, indent, TDF_VOPS);
2253 }
2254 
2255 
2256 /* Dump a basic block on stderr.  */
2257 
2258 void
debug_tree_bb(basic_block bb)2259 debug_tree_bb (basic_block bb)
2260 {
2261   dump_bb (bb, stderr, 0);
2262 }
2263 
2264 
2265 /* Dump basic block with index N on stderr.  */
2266 
2267 basic_block
debug_tree_bb_n(int n)2268 debug_tree_bb_n (int n)
2269 {
2270   debug_tree_bb (BASIC_BLOCK (n));
2271   return BASIC_BLOCK (n);
2272 }
2273 
2274 
2275 /* Dump the CFG on stderr.
2276 
2277    FLAGS are the same used by the tree dumping functions
2278    (see TDF_* in tree-pass.h).  */
2279 
2280 void
debug_tree_cfg(int flags)2281 debug_tree_cfg (int flags)
2282 {
2283   dump_tree_cfg (stderr, flags);
2284 }
2285 
2286 
2287 /* Dump the program showing basic block boundaries on the given FILE.
2288 
2289    FLAGS are the same used by the tree dumping functions (see TDF_* in
2290    tree.h).  */
2291 
2292 void
dump_tree_cfg(FILE * file,int flags)2293 dump_tree_cfg (FILE *file, int flags)
2294 {
2295   if (flags & TDF_DETAILS)
2296     {
2297       const char *funcname
2298 	= lang_hooks.decl_printable_name (current_function_decl, 2);
2299 
2300       fputc ('\n', file);
2301       fprintf (file, ";; Function %s\n\n", funcname);
2302       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2303 	       n_basic_blocks, n_edges, last_basic_block);
2304 
2305       brief_dump_cfg (file);
2306       fprintf (file, "\n");
2307     }
2308 
2309   if (flags & TDF_STATS)
2310     dump_cfg_stats (file);
2311 
2312   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2313 }
2314 
2315 
2316 /* Dump CFG statistics on FILE.  */
2317 
2318 void
dump_cfg_stats(FILE * file)2319 dump_cfg_stats (FILE *file)
2320 {
2321   static long max_num_merged_labels = 0;
2322   unsigned long size, total = 0;
2323   long num_edges;
2324   basic_block bb;
2325   const char * const fmt_str   = "%-30s%-13s%12s\n";
2326   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2327   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2328   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2329   const char *funcname
2330     = lang_hooks.decl_printable_name (current_function_decl, 2);
2331 
2332 
2333   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2334 
2335   fprintf (file, "---------------------------------------------------------\n");
2336   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2337   fprintf (file, fmt_str, "", "  instances  ", "used ");
2338   fprintf (file, "---------------------------------------------------------\n");
2339 
2340   size = n_basic_blocks * sizeof (struct basic_block_def);
2341   total += size;
2342   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2343 	   SCALE (size), LABEL (size));
2344 
2345   num_edges = 0;
2346   FOR_EACH_BB (bb)
2347     num_edges += EDGE_COUNT (bb->succs);
2348   size = num_edges * sizeof (struct edge_def);
2349   total += size;
2350   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2351 
2352   fprintf (file, "---------------------------------------------------------\n");
2353   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2354 	   LABEL (total));
2355   fprintf (file, "---------------------------------------------------------\n");
2356   fprintf (file, "\n");
2357 
2358   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2359     max_num_merged_labels = cfg_stats.num_merged_labels;
2360 
2361   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2362 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2363 
2364   fprintf (file, "\n");
2365 }
2366 
2367 
2368 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2369    linked in the final executable.  */
2370 
2371 void
debug_cfg_stats(void)2372 debug_cfg_stats (void)
2373 {
2374   dump_cfg_stats (stderr);
2375 }
2376 
2377 
2378 /* Dump the flowgraph to a .vcg FILE.  */
2379 
2380 static void
tree_cfg2vcg(FILE * file)2381 tree_cfg2vcg (FILE *file)
2382 {
2383   edge e;
2384   edge_iterator ei;
2385   basic_block bb;
2386   const char *funcname
2387     = lang_hooks.decl_printable_name (current_function_decl, 2);
2388 
2389   /* Write the file header.  */
2390   fprintf (file, "graph: { title: \"%s\"\n", funcname);
2391   fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2392   fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2393 
2394   /* Write blocks and edges.  */
2395   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2396     {
2397       fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2398 	       e->dest->index);
2399 
2400       if (e->flags & EDGE_FAKE)
2401 	fprintf (file, " linestyle: dotted priority: 10");
2402       else
2403 	fprintf (file, " linestyle: solid priority: 100");
2404 
2405       fprintf (file, " }\n");
2406     }
2407   fputc ('\n', file);
2408 
2409   FOR_EACH_BB (bb)
2410     {
2411       enum tree_code head_code, end_code;
2412       const char *head_name, *end_name;
2413       int head_line = 0;
2414       int end_line = 0;
2415       tree first = first_stmt (bb);
2416       tree last = last_stmt (bb);
2417 
2418       if (first)
2419 	{
2420 	  head_code = TREE_CODE (first);
2421 	  head_name = tree_code_name[head_code];
2422 	  head_line = get_lineno (first);
2423 	}
2424       else
2425 	head_name = "no-statement";
2426 
2427       if (last)
2428 	{
2429 	  end_code = TREE_CODE (last);
2430 	  end_name = tree_code_name[end_code];
2431 	  end_line = get_lineno (last);
2432 	}
2433       else
2434 	end_name = "no-statement";
2435 
2436       fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2437 	       bb->index, bb->index, head_name, head_line, end_name,
2438 	       end_line);
2439 
2440       FOR_EACH_EDGE (e, ei, bb->succs)
2441 	{
2442 	  if (e->dest == EXIT_BLOCK_PTR)
2443 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2444 	  else
2445 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2446 
2447 	  if (e->flags & EDGE_FAKE)
2448 	    fprintf (file, " priority: 10 linestyle: dotted");
2449 	  else
2450 	    fprintf (file, " priority: 100 linestyle: solid");
2451 
2452 	  fprintf (file, " }\n");
2453 	}
2454 
2455       if (bb->next_bb != EXIT_BLOCK_PTR)
2456 	fputc ('\n', file);
2457     }
2458 
2459   fputs ("}\n\n", file);
2460 }
2461 
2462 
2463 
2464 /*---------------------------------------------------------------------------
2465 			     Miscellaneous helpers
2466 ---------------------------------------------------------------------------*/
2467 
2468 /* Return true if T represents a stmt that always transfers control.  */
2469 
2470 bool
is_ctrl_stmt(tree t)2471 is_ctrl_stmt (tree t)
2472 {
2473   return (TREE_CODE (t) == COND_EXPR
2474 	  || TREE_CODE (t) == SWITCH_EXPR
2475 	  || TREE_CODE (t) == GOTO_EXPR
2476 	  || TREE_CODE (t) == RETURN_EXPR
2477 	  || TREE_CODE (t) == RESX_EXPR);
2478 }
2479 
2480 
2481 /* Return true if T is a statement that may alter the flow of control
2482    (e.g., a call to a non-returning function).  */
2483 
2484 bool
is_ctrl_altering_stmt(tree t)2485 is_ctrl_altering_stmt (tree t)
2486 {
2487   tree call;
2488 
2489   gcc_assert (t);
2490   call = get_call_expr_in (t);
2491   if (call)
2492     {
2493       /* A non-pure/const CALL_EXPR alters flow control if the current
2494 	 function has nonlocal labels.  */
2495       if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2496 	return true;
2497 
2498       /* A CALL_EXPR also alters control flow if it does not return.  */
2499       if (call_expr_flags (call) & ECF_NORETURN)
2500 	return true;
2501     }
2502 
2503   /* OpenMP directives alter control flow.  */
2504   if (OMP_DIRECTIVE_P (t))
2505     return true;
2506 
2507   /* If a statement can throw, it alters control flow.  */
2508   return tree_can_throw_internal (t);
2509 }
2510 
2511 
2512 /* Return true if T is a computed goto.  */
2513 
2514 bool
computed_goto_p(tree t)2515 computed_goto_p (tree t)
2516 {
2517   return (TREE_CODE (t) == GOTO_EXPR
2518 	  && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2519 }
2520 
2521 
2522 /* Return true if T is a simple local goto.  */
2523 
2524 bool
simple_goto_p(tree t)2525 simple_goto_p (tree t)
2526 {
2527   return (TREE_CODE (t) == GOTO_EXPR
2528 	  && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2529 }
2530 
2531 
2532 /* Return true if T can make an abnormal transfer of control flow.
2533    Transfers of control flow associated with EH are excluded.  */
2534 
2535 bool
tree_can_make_abnormal_goto(tree t)2536 tree_can_make_abnormal_goto (tree t)
2537 {
2538   if (computed_goto_p (t))
2539     return true;
2540   if (TREE_CODE (t) == MODIFY_EXPR)
2541     t = TREE_OPERAND (t, 1);
2542   if (TREE_CODE (t) == WITH_SIZE_EXPR)
2543     t = TREE_OPERAND (t, 0);
2544   if (TREE_CODE (t) == CALL_EXPR)
2545     return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2546   return false;
2547 }
2548 
2549 
2550 /* Return true if T should start a new basic block.  PREV_T is the
2551    statement preceding T.  It is used when T is a label or a case label.
2552    Labels should only start a new basic block if their previous statement
2553    wasn't a label.  Otherwise, sequence of labels would generate
2554    unnecessary basic blocks that only contain a single label.  */
2555 
2556 static inline bool
stmt_starts_bb_p(tree t,tree prev_t)2557 stmt_starts_bb_p (tree t, tree prev_t)
2558 {
2559   if (t == NULL_TREE)
2560     return false;
2561 
2562   /* LABEL_EXPRs start a new basic block only if the preceding
2563      statement wasn't a label of the same type.  This prevents the
2564      creation of consecutive blocks that have nothing but a single
2565      label.  */
2566   if (TREE_CODE (t) == LABEL_EXPR)
2567     {
2568       /* Nonlocal and computed GOTO targets always start a new block.  */
2569       if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2570 	  || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2571 	return true;
2572 
2573       if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2574 	{
2575 	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2576 	    return true;
2577 
2578 	  cfg_stats.num_merged_labels++;
2579 	  return false;
2580 	}
2581       else
2582 	return true;
2583     }
2584 
2585   return false;
2586 }
2587 
2588 
2589 /* Return true if T should end a basic block.  */
2590 
2591 bool
stmt_ends_bb_p(tree t)2592 stmt_ends_bb_p (tree t)
2593 {
2594   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2595 }
2596 
2597 
2598 /* Add gotos that used to be represented implicitly in the CFG.  */
2599 
2600 void
disband_implicit_edges(void)2601 disband_implicit_edges (void)
2602 {
2603   basic_block bb;
2604   block_stmt_iterator last;
2605   edge e;
2606   edge_iterator ei;
2607   tree stmt, label;
2608 
2609   FOR_EACH_BB (bb)
2610     {
2611       last = bsi_last (bb);
2612       stmt = last_stmt (bb);
2613 
2614       if (stmt && TREE_CODE (stmt) == COND_EXPR)
2615 	{
2616 	  /* Remove superfluous gotos from COND_EXPR branches.  Moved
2617 	     from cfg_remove_useless_stmts here since it violates the
2618 	     invariants for tree--cfg correspondence and thus fits better
2619 	     here where we do it anyway.  */
2620 	  e = find_edge (bb, bb->next_bb);
2621 	  if (e)
2622 	    {
2623 	      if (e->flags & EDGE_TRUE_VALUE)
2624 		COND_EXPR_THEN (stmt) = build_empty_stmt ();
2625 	      else if (e->flags & EDGE_FALSE_VALUE)
2626 		COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2627 	      else
2628 		gcc_unreachable ();
2629 	      e->flags |= EDGE_FALLTHRU;
2630 	    }
2631 
2632 	  continue;
2633 	}
2634 
2635       if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2636 	{
2637 	  /* Remove the RETURN_EXPR if we may fall though to the exit
2638 	     instead.  */
2639 	  gcc_assert (single_succ_p (bb));
2640 	  gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2641 
2642 	  if (bb->next_bb == EXIT_BLOCK_PTR
2643 	      && !TREE_OPERAND (stmt, 0))
2644 	    {
2645 	      bsi_remove (&last, true);
2646 	      single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2647 	    }
2648 	  continue;
2649 	}
2650 
2651       /* There can be no fallthru edge if the last statement is a control
2652 	 one.  */
2653       if (stmt && is_ctrl_stmt (stmt))
2654 	continue;
2655 
2656       /* Find a fallthru edge and emit the goto if necessary.  */
2657       FOR_EACH_EDGE (e, ei, bb->succs)
2658 	if (e->flags & EDGE_FALLTHRU)
2659 	  break;
2660 
2661       if (!e || e->dest == bb->next_bb)
2662 	continue;
2663 
2664       gcc_assert (e->dest != EXIT_BLOCK_PTR);
2665       label = tree_block_label (e->dest);
2666 
2667       stmt = build1 (GOTO_EXPR, void_type_node, label);
2668 #ifdef USE_MAPPED_LOCATION
2669       SET_EXPR_LOCATION (stmt, e->goto_locus);
2670 #else
2671       SET_EXPR_LOCUS (stmt, e->goto_locus);
2672 #endif
2673       bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2674       e->flags &= ~EDGE_FALLTHRU;
2675     }
2676 }
2677 
2678 /* Remove block annotations and other datastructures.  */
2679 
2680 void
delete_tree_cfg_annotations(void)2681 delete_tree_cfg_annotations (void)
2682 {
2683   label_to_block_map = NULL;
2684 }
2685 
2686 
2687 /* Return the first statement in basic block BB.  */
2688 
2689 tree
first_stmt(basic_block bb)2690 first_stmt (basic_block bb)
2691 {
2692   block_stmt_iterator i = bsi_start (bb);
2693   return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2694 }
2695 
2696 
2697 /* Return the last statement in basic block BB.  */
2698 
2699 tree
last_stmt(basic_block bb)2700 last_stmt (basic_block bb)
2701 {
2702   block_stmt_iterator b = bsi_last (bb);
2703   return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2704 }
2705 
2706 
2707 /* Return a pointer to the last statement in block BB.  */
2708 
2709 tree *
last_stmt_ptr(basic_block bb)2710 last_stmt_ptr (basic_block bb)
2711 {
2712   block_stmt_iterator last = bsi_last (bb);
2713   return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2714 }
2715 
2716 
2717 /* Return the last statement of an otherwise empty block.  Return NULL
2718    if the block is totally empty, or if it contains more than one
2719    statement.  */
2720 
2721 tree
last_and_only_stmt(basic_block bb)2722 last_and_only_stmt (basic_block bb)
2723 {
2724   block_stmt_iterator i = bsi_last (bb);
2725   tree last, prev;
2726 
2727   if (bsi_end_p (i))
2728     return NULL_TREE;
2729 
2730   last = bsi_stmt (i);
2731   bsi_prev (&i);
2732   if (bsi_end_p (i))
2733     return last;
2734 
2735   /* Empty statements should no longer appear in the instruction stream.
2736      Everything that might have appeared before should be deleted by
2737      remove_useless_stmts, and the optimizers should just bsi_remove
2738      instead of smashing with build_empty_stmt.
2739 
2740      Thus the only thing that should appear here in a block containing
2741      one executable statement is a label.  */
2742   prev = bsi_stmt (i);
2743   if (TREE_CODE (prev) == LABEL_EXPR)
2744     return last;
2745   else
2746     return NULL_TREE;
2747 }
2748 
2749 
2750 /* Mark BB as the basic block holding statement T.  */
2751 
2752 void
set_bb_for_stmt(tree t,basic_block bb)2753 set_bb_for_stmt (tree t, basic_block bb)
2754 {
2755   if (TREE_CODE (t) == PHI_NODE)
2756     PHI_BB (t) = bb;
2757   else if (TREE_CODE (t) == STATEMENT_LIST)
2758     {
2759       tree_stmt_iterator i;
2760       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2761 	set_bb_for_stmt (tsi_stmt (i), bb);
2762     }
2763   else
2764     {
2765       stmt_ann_t ann = get_stmt_ann (t);
2766       ann->bb = bb;
2767 
2768       /* If the statement is a label, add the label to block-to-labels map
2769         so that we can speed up edge creation for GOTO_EXPRs.  */
2770       if (TREE_CODE (t) == LABEL_EXPR)
2771 	{
2772 	  int uid;
2773 
2774 	  t = LABEL_EXPR_LABEL (t);
2775 	  uid = LABEL_DECL_UID (t);
2776 	  if (uid == -1)
2777 	    {
2778 	      unsigned old_len = VEC_length (basic_block, label_to_block_map);
2779 	      LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2780 	      if (old_len <= (unsigned) uid)
2781 		{
2782 		  basic_block *addr;
2783 		  unsigned new_len = 3 * uid / 2;
2784 
2785 		  VEC_safe_grow (basic_block, gc, label_to_block_map,
2786 				 new_len);
2787 		  addr = VEC_address (basic_block, label_to_block_map);
2788 		  memset (&addr[old_len],
2789 			  0, sizeof (basic_block) * (new_len - old_len));
2790 		}
2791 	    }
2792 	  else
2793 	    /* We're moving an existing label.  Make sure that we've
2794 		removed it from the old block.  */
2795 	    gcc_assert (!bb
2796 			|| !VEC_index (basic_block, label_to_block_map, uid));
2797 	  VEC_replace (basic_block, label_to_block_map, uid, bb);
2798 	}
2799     }
2800 }
2801 
2802 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2803    from one basic block to another.
2804    For BB splitting we can run into quadratic case, so performance is quite
2805    important and knowing that the tables are big enough, change_bb_for_stmt
2806    can inline as leaf function.  */
2807 static inline void
change_bb_for_stmt(tree t,basic_block bb)2808 change_bb_for_stmt (tree t, basic_block bb)
2809 {
2810   get_stmt_ann (t)->bb = bb;
2811   if (TREE_CODE (t) == LABEL_EXPR)
2812     VEC_replace (basic_block, label_to_block_map,
2813 		 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2814 }
2815 
2816 /* Finds iterator for STMT.  */
2817 
2818 extern block_stmt_iterator
bsi_for_stmt(tree stmt)2819 bsi_for_stmt (tree stmt)
2820 {
2821   block_stmt_iterator bsi;
2822 
2823   for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2824     if (bsi_stmt (bsi) == stmt)
2825       return bsi;
2826 
2827   gcc_unreachable ();
2828 }
2829 
2830 /* Mark statement T as modified, and update it.  */
2831 static inline void
update_modified_stmts(tree t)2832 update_modified_stmts (tree t)
2833 {
2834   if (TREE_CODE (t) == STATEMENT_LIST)
2835     {
2836       tree_stmt_iterator i;
2837       tree stmt;
2838       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2839         {
2840 	  stmt = tsi_stmt (i);
2841 	  update_stmt_if_modified (stmt);
2842 	}
2843     }
2844   else
2845     update_stmt_if_modified (t);
2846 }
2847 
2848 /* Insert statement (or statement list) T before the statement
2849    pointed-to by iterator I.  M specifies how to update iterator I
2850    after insertion (see enum bsi_iterator_update).  */
2851 
2852 void
bsi_insert_before(block_stmt_iterator * i,tree t,enum bsi_iterator_update m)2853 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2854 {
2855   set_bb_for_stmt (t, i->bb);
2856   update_modified_stmts (t);
2857   tsi_link_before (&i->tsi, t, m);
2858 }
2859 
2860 
2861 /* Insert statement (or statement list) T after the statement
2862    pointed-to by iterator I.  M specifies how to update iterator I
2863    after insertion (see enum bsi_iterator_update).  */
2864 
2865 void
bsi_insert_after(block_stmt_iterator * i,tree t,enum bsi_iterator_update m)2866 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2867 {
2868   set_bb_for_stmt (t, i->bb);
2869   update_modified_stmts (t);
2870   tsi_link_after (&i->tsi, t, m);
2871 }
2872 
2873 
2874 /* Remove the statement pointed to by iterator I.  The iterator is updated
2875    to the next statement.
2876 
2877    When REMOVE_EH_INFO is true we remove the statement pointed to by
2878    iterator I from the EH tables.  Otherwise we do not modify the EH
2879    tables.
2880 
2881    Generally, REMOVE_EH_INFO should be true when the statement is going to
2882    be removed from the IL and not reinserted elsewhere.  */
2883 
2884 void
bsi_remove(block_stmt_iterator * i,bool remove_eh_info)2885 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2886 {
2887   tree t = bsi_stmt (*i);
2888   set_bb_for_stmt (t, NULL);
2889   delink_stmt_imm_use (t);
2890   tsi_delink (&i->tsi);
2891   mark_stmt_modified (t);
2892   if (remove_eh_info)
2893     remove_stmt_from_eh_region (t);
2894 }
2895 
2896 
2897 /* Move the statement at FROM so it comes right after the statement at TO.  */
2898 
2899 void
bsi_move_after(block_stmt_iterator * from,block_stmt_iterator * to)2900 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2901 {
2902   tree stmt = bsi_stmt (*from);
2903   bsi_remove (from, false);
2904   bsi_insert_after (to, stmt, BSI_SAME_STMT);
2905 }
2906 
2907 
2908 /* Move the statement at FROM so it comes right before the statement at TO.  */
2909 
2910 void
bsi_move_before(block_stmt_iterator * from,block_stmt_iterator * to)2911 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2912 {
2913   tree stmt = bsi_stmt (*from);
2914   bsi_remove (from, false);
2915   bsi_insert_before (to, stmt, BSI_SAME_STMT);
2916 }
2917 
2918 
2919 /* Move the statement at FROM to the end of basic block BB.  */
2920 
2921 void
bsi_move_to_bb_end(block_stmt_iterator * from,basic_block bb)2922 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2923 {
2924   block_stmt_iterator last = bsi_last (bb);
2925 
2926   /* Have to check bsi_end_p because it could be an empty block.  */
2927   if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2928     bsi_move_before (from, &last);
2929   else
2930     bsi_move_after (from, &last);
2931 }
2932 
2933 
2934 /* Replace the contents of the statement pointed to by iterator BSI
2935    with STMT.  If UPDATE_EH_INFO is true, the exception handling
2936    information of the original statement is moved to the new statement.  */
2937 
2938 void
bsi_replace(const block_stmt_iterator * bsi,tree stmt,bool update_eh_info)2939 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2940 {
2941   int eh_region;
2942   tree orig_stmt = bsi_stmt (*bsi);
2943 
2944   SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2945   set_bb_for_stmt (stmt, bsi->bb);
2946 
2947   /* Preserve EH region information from the original statement, if
2948      requested by the caller.  */
2949   if (update_eh_info)
2950     {
2951       eh_region = lookup_stmt_eh_region (orig_stmt);
2952       if (eh_region >= 0)
2953 	{
2954 	  remove_stmt_from_eh_region (orig_stmt);
2955 	  add_stmt_to_eh_region (stmt, eh_region);
2956 	}
2957     }
2958 
2959   delink_stmt_imm_use (orig_stmt);
2960   *bsi_stmt_ptr (*bsi) = stmt;
2961   mark_stmt_modified (stmt);
2962   update_modified_stmts (stmt);
2963 }
2964 
2965 
2966 /* Insert the statement pointed-to by BSI into edge E.  Every attempt
2967    is made to place the statement in an existing basic block, but
2968    sometimes that isn't possible.  When it isn't possible, the edge is
2969    split and the statement is added to the new block.
2970 
2971    In all cases, the returned *BSI points to the correct location.  The
2972    return value is true if insertion should be done after the location,
2973    or false if it should be done before the location.  If new basic block
2974    has to be created, it is stored in *NEW_BB.  */
2975 
2976 static bool
tree_find_edge_insert_loc(edge e,block_stmt_iterator * bsi,basic_block * new_bb)2977 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2978 			   basic_block *new_bb)
2979 {
2980   basic_block dest, src;
2981   tree tmp;
2982 
2983   dest = e->dest;
2984  restart:
2985 
2986   /* If the destination has one predecessor which has no PHI nodes,
2987      insert there.  Except for the exit block.
2988 
2989      The requirement for no PHI nodes could be relaxed.  Basically we
2990      would have to examine the PHIs to prove that none of them used
2991      the value set by the statement we want to insert on E.  That
2992      hardly seems worth the effort.  */
2993   if (single_pred_p (dest)
2994       && ! phi_nodes (dest)
2995       && dest != EXIT_BLOCK_PTR)
2996     {
2997       *bsi = bsi_start (dest);
2998       if (bsi_end_p (*bsi))
2999 	return true;
3000 
3001       /* Make sure we insert after any leading labels.  */
3002       tmp = bsi_stmt (*bsi);
3003       while (TREE_CODE (tmp) == LABEL_EXPR)
3004 	{
3005 	  bsi_next (bsi);
3006 	  if (bsi_end_p (*bsi))
3007 	    break;
3008 	  tmp = bsi_stmt (*bsi);
3009 	}
3010 
3011       if (bsi_end_p (*bsi))
3012 	{
3013 	  *bsi = bsi_last (dest);
3014 	  return true;
3015 	}
3016       else
3017 	return false;
3018     }
3019 
3020   /* If the source has one successor, the edge is not abnormal and
3021      the last statement does not end a basic block, insert there.
3022      Except for the entry block.  */
3023   src = e->src;
3024   if ((e->flags & EDGE_ABNORMAL) == 0
3025       && single_succ_p (src)
3026       && src != ENTRY_BLOCK_PTR)
3027     {
3028       *bsi = bsi_last (src);
3029       if (bsi_end_p (*bsi))
3030 	return true;
3031 
3032       tmp = bsi_stmt (*bsi);
3033       if (!stmt_ends_bb_p (tmp))
3034 	return true;
3035 
3036       /* Insert code just before returning the value.  We may need to decompose
3037          the return in the case it contains non-trivial operand.  */
3038       if (TREE_CODE (tmp) == RETURN_EXPR)
3039         {
3040 	  tree op = TREE_OPERAND (tmp, 0);
3041 	  if (op && !is_gimple_val (op))
3042 	    {
3043 	      gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3044 	      bsi_insert_before (bsi, op, BSI_NEW_STMT);
3045 	      TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3046 	    }
3047 	  bsi_prev (bsi);
3048 	  return true;
3049         }
3050     }
3051 
3052   /* Otherwise, create a new basic block, and split this edge.  */
3053   dest = split_edge (e);
3054   if (new_bb)
3055     *new_bb = dest;
3056   e = single_pred_edge (dest);
3057   goto restart;
3058 }
3059 
3060 
3061 /* This routine will commit all pending edge insertions, creating any new
3062    basic blocks which are necessary.  */
3063 
3064 void
bsi_commit_edge_inserts(void)3065 bsi_commit_edge_inserts (void)
3066 {
3067   basic_block bb;
3068   edge e;
3069   edge_iterator ei;
3070 
3071   bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3072 
3073   FOR_EACH_BB (bb)
3074     FOR_EACH_EDGE (e, ei, bb->succs)
3075       bsi_commit_one_edge_insert (e, NULL);
3076 }
3077 
3078 
3079 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3080    to this block, otherwise set it to NULL.  */
3081 
3082 void
bsi_commit_one_edge_insert(edge e,basic_block * new_bb)3083 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3084 {
3085   if (new_bb)
3086     *new_bb = NULL;
3087   if (PENDING_STMT (e))
3088     {
3089       block_stmt_iterator bsi;
3090       tree stmt = PENDING_STMT (e);
3091 
3092       PENDING_STMT (e) = NULL_TREE;
3093 
3094       if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3095 	bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3096       else
3097 	bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3098     }
3099 }
3100 
3101 
3102 /* Add STMT to the pending list of edge E.  No actual insertion is
3103    made until a call to bsi_commit_edge_inserts () is made.  */
3104 
3105 void
bsi_insert_on_edge(edge e,tree stmt)3106 bsi_insert_on_edge (edge e, tree stmt)
3107 {
3108   append_to_statement_list (stmt, &PENDING_STMT (e));
3109 }
3110 
3111 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts.  If a new
3112    block has to be created, it is returned.  */
3113 
3114 basic_block
bsi_insert_on_edge_immediate(edge e,tree stmt)3115 bsi_insert_on_edge_immediate (edge e, tree stmt)
3116 {
3117   block_stmt_iterator bsi;
3118   basic_block new_bb = NULL;
3119 
3120   gcc_assert (!PENDING_STMT (e));
3121 
3122   if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3123     bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3124   else
3125     bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3126 
3127   return new_bb;
3128 }
3129 
3130 /*---------------------------------------------------------------------------
3131 	     Tree specific functions for CFG manipulation
3132 ---------------------------------------------------------------------------*/
3133 
3134 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
3135 
3136 static void
reinstall_phi_args(edge new_edge,edge old_edge)3137 reinstall_phi_args (edge new_edge, edge old_edge)
3138 {
3139   tree var, phi;
3140 
3141   if (!PENDING_STMT (old_edge))
3142     return;
3143 
3144   for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3145        var && phi;
3146        var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3147     {
3148       tree result = TREE_PURPOSE (var);
3149       tree arg = TREE_VALUE (var);
3150 
3151       gcc_assert (result == PHI_RESULT (phi));
3152 
3153       add_phi_arg (phi, arg, new_edge);
3154     }
3155 
3156   PENDING_STMT (old_edge) = NULL;
3157 }
3158 
3159 /* Returns the basic block after which the new basic block created
3160    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
3161    near its "logical" location.  This is of most help to humans looking
3162    at debugging dumps.  */
3163 
3164 static basic_block
split_edge_bb_loc(edge edge_in)3165 split_edge_bb_loc (edge edge_in)
3166 {
3167   basic_block dest = edge_in->dest;
3168 
3169   if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3170     return edge_in->src;
3171   else
3172     return dest->prev_bb;
3173 }
3174 
3175 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
3176    Abort on abnormal edges.  */
3177 
3178 static basic_block
tree_split_edge(edge edge_in)3179 tree_split_edge (edge edge_in)
3180 {
3181   basic_block new_bb, after_bb, dest;
3182   edge new_edge, e;
3183 
3184   /* Abnormal edges cannot be split.  */
3185   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3186 
3187   dest = edge_in->dest;
3188 
3189   after_bb = split_edge_bb_loc (edge_in);
3190 
3191   new_bb = create_empty_bb (after_bb);
3192   new_bb->frequency = EDGE_FREQUENCY (edge_in);
3193   new_bb->count = edge_in->count;
3194   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3195   new_edge->probability = REG_BR_PROB_BASE;
3196   new_edge->count = edge_in->count;
3197 
3198   e = redirect_edge_and_branch (edge_in, new_bb);
3199   gcc_assert (e);
3200   reinstall_phi_args (new_edge, e);
3201 
3202   return new_bb;
3203 }
3204 
3205 
3206 /* Return true when BB has label LABEL in it.  */
3207 
3208 static bool
has_label_p(basic_block bb,tree label)3209 has_label_p (basic_block bb, tree label)
3210 {
3211   block_stmt_iterator bsi;
3212 
3213   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3214     {
3215       tree stmt = bsi_stmt (bsi);
3216 
3217       if (TREE_CODE (stmt) != LABEL_EXPR)
3218 	return false;
3219       if (LABEL_EXPR_LABEL (stmt) == label)
3220 	return true;
3221     }
3222   return false;
3223 }
3224 
3225 
3226 /* Callback for walk_tree, check that all elements with address taken are
3227    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
3228    inside a PHI node.  */
3229 
3230 static tree
verify_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)3231 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3232 {
3233   tree t = *tp, x;
3234   bool in_phi = (data != NULL);
3235 
3236   if (TYPE_P (t))
3237     *walk_subtrees = 0;
3238 
3239   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
3240 #define CHECK_OP(N, MSG) \
3241   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
3242        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3243 
3244   switch (TREE_CODE (t))
3245     {
3246     case SSA_NAME:
3247       if (SSA_NAME_IN_FREE_LIST (t))
3248 	{
3249 	  error ("SSA name in freelist but still referenced");
3250 	  return *tp;
3251 	}
3252       break;
3253 
3254     case ASSERT_EXPR:
3255       x = fold (ASSERT_EXPR_COND (t));
3256       if (x == boolean_false_node)
3257 	{
3258 	  error ("ASSERT_EXPR with an always-false condition");
3259 	  return *tp;
3260 	}
3261       break;
3262 
3263     case MODIFY_EXPR:
3264       x = TREE_OPERAND (t, 0);
3265       if (TREE_CODE (x) == BIT_FIELD_REF
3266 	  && is_gimple_reg (TREE_OPERAND (x, 0)))
3267 	{
3268 	  error ("GIMPLE register modified with BIT_FIELD_REF");
3269 	  return t;
3270 	}
3271       break;
3272 
3273     case ADDR_EXPR:
3274       {
3275 	bool old_invariant;
3276 	bool old_constant;
3277 	bool old_side_effects;
3278 	bool new_invariant;
3279 	bool new_constant;
3280 	bool new_side_effects;
3281 
3282         /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3283 	   dead PHIs that take the address of something.  But if the PHI
3284 	   result is dead, the fact that it takes the address of anything
3285 	   is irrelevant.  Because we can not tell from here if a PHI result
3286 	   is dead, we just skip this check for PHIs altogether.  This means
3287 	   we may be missing "valid" checks, but what can you do?
3288 	   This was PR19217.  */
3289         if (in_phi)
3290 	  break;
3291 
3292 	old_invariant = TREE_INVARIANT (t);
3293 	old_constant = TREE_CONSTANT (t);
3294 	old_side_effects = TREE_SIDE_EFFECTS (t);
3295 
3296 	recompute_tree_invariant_for_addr_expr (t);
3297 	new_invariant = TREE_INVARIANT (t);
3298 	new_side_effects = TREE_SIDE_EFFECTS (t);
3299 	new_constant = TREE_CONSTANT (t);
3300 
3301 	if (old_invariant != new_invariant)
3302 	  {
3303 	    error ("invariant not recomputed when ADDR_EXPR changed");
3304 	    return t;
3305 	  }
3306 
3307         if (old_constant != new_constant)
3308 	  {
3309 	    error ("constant not recomputed when ADDR_EXPR changed");
3310 	    return t;
3311 	  }
3312 	if (old_side_effects != new_side_effects)
3313 	  {
3314 	    error ("side effects not recomputed when ADDR_EXPR changed");
3315 	    return t;
3316 	  }
3317 
3318 	/* Skip any references (they will be checked when we recurse down the
3319 	   tree) and ensure that any variable used as a prefix is marked
3320 	   addressable.  */
3321 	for (x = TREE_OPERAND (t, 0);
3322 	     handled_component_p (x);
3323 	     x = TREE_OPERAND (x, 0))
3324 	  ;
3325 
3326 	if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3327 	  return NULL;
3328 	if (!TREE_ADDRESSABLE (x))
3329 	  {
3330 	    error ("address taken, but ADDRESSABLE bit not set");
3331 	    return x;
3332 	  }
3333 	break;
3334       }
3335 
3336     case COND_EXPR:
3337       x = COND_EXPR_COND (t);
3338       if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3339 	{
3340 	  error ("non-boolean used in condition");
3341 	  return x;
3342 	}
3343       if (!is_gimple_condexpr (x))
3344         {
3345 	  error ("invalid conditional operand");
3346 	  return x;
3347 	}
3348       break;
3349 
3350     case NOP_EXPR:
3351     case CONVERT_EXPR:
3352     case FIX_TRUNC_EXPR:
3353     case FIX_CEIL_EXPR:
3354     case FIX_FLOOR_EXPR:
3355     case FIX_ROUND_EXPR:
3356     case FLOAT_EXPR:
3357     case NEGATE_EXPR:
3358     case ABS_EXPR:
3359     case BIT_NOT_EXPR:
3360     case NON_LVALUE_EXPR:
3361     case TRUTH_NOT_EXPR:
3362       CHECK_OP (0, "invalid operand to unary operator");
3363       break;
3364 
3365     case REALPART_EXPR:
3366     case IMAGPART_EXPR:
3367     case COMPONENT_REF:
3368     case ARRAY_REF:
3369     case ARRAY_RANGE_REF:
3370     case BIT_FIELD_REF:
3371     case VIEW_CONVERT_EXPR:
3372       /* We have a nest of references.  Verify that each of the operands
3373 	 that determine where to reference is either a constant or a variable,
3374 	 verify that the base is valid, and then show we've already checked
3375 	 the subtrees.  */
3376       while (handled_component_p (t))
3377 	{
3378 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3379 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3380 	  else if (TREE_CODE (t) == ARRAY_REF
3381 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
3382 	    {
3383 	      CHECK_OP (1, "invalid array index");
3384 	      if (TREE_OPERAND (t, 2))
3385 		CHECK_OP (2, "invalid array lower bound");
3386 	      if (TREE_OPERAND (t, 3))
3387 		CHECK_OP (3, "invalid array stride");
3388 	    }
3389 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
3390 	    {
3391 	      CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3392 	      CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3393 	    }
3394 
3395 	  t = TREE_OPERAND (t, 0);
3396 	}
3397 
3398       if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3399 	{
3400 	  error ("invalid reference prefix");
3401 	  return t;
3402 	}
3403       *walk_subtrees = 0;
3404       break;
3405 
3406     case LT_EXPR:
3407     case LE_EXPR:
3408     case GT_EXPR:
3409     case GE_EXPR:
3410     case EQ_EXPR:
3411     case NE_EXPR:
3412     case UNORDERED_EXPR:
3413     case ORDERED_EXPR:
3414     case UNLT_EXPR:
3415     case UNLE_EXPR:
3416     case UNGT_EXPR:
3417     case UNGE_EXPR:
3418     case UNEQ_EXPR:
3419     case LTGT_EXPR:
3420     case PLUS_EXPR:
3421     case MINUS_EXPR:
3422     case MULT_EXPR:
3423     case TRUNC_DIV_EXPR:
3424     case CEIL_DIV_EXPR:
3425     case FLOOR_DIV_EXPR:
3426     case ROUND_DIV_EXPR:
3427     case TRUNC_MOD_EXPR:
3428     case CEIL_MOD_EXPR:
3429     case FLOOR_MOD_EXPR:
3430     case ROUND_MOD_EXPR:
3431     case RDIV_EXPR:
3432     case EXACT_DIV_EXPR:
3433     case MIN_EXPR:
3434     case MAX_EXPR:
3435     case LSHIFT_EXPR:
3436     case RSHIFT_EXPR:
3437     case LROTATE_EXPR:
3438     case RROTATE_EXPR:
3439     case BIT_IOR_EXPR:
3440     case BIT_XOR_EXPR:
3441     case BIT_AND_EXPR:
3442       CHECK_OP (0, "invalid operand to binary operator");
3443       CHECK_OP (1, "invalid operand to binary operator");
3444       break;
3445 
3446     case CONSTRUCTOR:
3447       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3448 	*walk_subtrees = 0;
3449       break;
3450 
3451     default:
3452       break;
3453     }
3454   return NULL;
3455 
3456 #undef CHECK_OP
3457 }
3458 
3459 
3460 /* Verify STMT, return true if STMT is not in GIMPLE form.
3461    TODO: Implement type checking.  */
3462 
3463 static bool
verify_stmt(tree stmt,bool last_in_block)3464 verify_stmt (tree stmt, bool last_in_block)
3465 {
3466   tree addr;
3467 
3468   if (OMP_DIRECTIVE_P (stmt))
3469     {
3470       /* OpenMP directives are validated by the FE and never operated
3471 	 on by the optimizers.  Furthermore, OMP_FOR may contain
3472 	 non-gimple expressions when the main index variable has had
3473 	 its address taken.  This does not affect the loop itself
3474 	 because the header of an OMP_FOR is merely used to determine
3475 	 how to setup the parallel iteration.  */
3476       return false;
3477     }
3478 
3479   if (!is_gimple_stmt (stmt))
3480     {
3481       error ("is not a valid GIMPLE statement");
3482       goto fail;
3483     }
3484 
3485   addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3486   if (addr)
3487     {
3488       debug_generic_stmt (addr);
3489       return true;
3490     }
3491 
3492   /* If the statement is marked as part of an EH region, then it is
3493      expected that the statement could throw.  Verify that when we
3494      have optimizations that simplify statements such that we prove
3495      that they cannot throw, that we update other data structures
3496      to match.  */
3497   if (lookup_stmt_eh_region (stmt) >= 0)
3498     {
3499       if (!tree_could_throw_p (stmt))
3500 	{
3501 	  error ("statement marked for throw, but doesn%'t");
3502 	  goto fail;
3503 	}
3504       if (!last_in_block && tree_can_throw_internal (stmt))
3505 	{
3506 	  error ("statement marked for throw in middle of block");
3507 	  goto fail;
3508 	}
3509     }
3510 
3511   return false;
3512 
3513  fail:
3514   debug_generic_stmt (stmt);
3515   return true;
3516 }
3517 
3518 
3519 /* Return true when the T can be shared.  */
3520 
3521 static bool
tree_node_can_be_shared(tree t)3522 tree_node_can_be_shared (tree t)
3523 {
3524   if (IS_TYPE_OR_DECL_P (t)
3525       || is_gimple_min_invariant (t)
3526       || TREE_CODE (t) == SSA_NAME
3527       || t == error_mark_node
3528       || TREE_CODE (t) == IDENTIFIER_NODE)
3529     return true;
3530 
3531   if (TREE_CODE (t) == CASE_LABEL_EXPR)
3532     return true;
3533 
3534   while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3535 	   && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3536 	 || TREE_CODE (t) == COMPONENT_REF
3537 	 || TREE_CODE (t) == REALPART_EXPR
3538 	 || TREE_CODE (t) == IMAGPART_EXPR)
3539     t = TREE_OPERAND (t, 0);
3540 
3541   if (DECL_P (t))
3542     return true;
3543 
3544   return false;
3545 }
3546 
3547 
3548 /* Called via walk_trees.  Verify tree sharing.  */
3549 
3550 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)3551 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3552 {
3553   htab_t htab = (htab_t) data;
3554   void **slot;
3555 
3556   if (tree_node_can_be_shared (*tp))
3557     {
3558       *walk_subtrees = false;
3559       return NULL;
3560     }
3561 
3562   slot = htab_find_slot (htab, *tp, INSERT);
3563   if (*slot)
3564     return (tree) *slot;
3565   *slot = *tp;
3566 
3567   return NULL;
3568 }
3569 
3570 
3571 /* Verify the GIMPLE statement chain.  */
3572 
3573 void
verify_stmts(void)3574 verify_stmts (void)
3575 {
3576   basic_block bb;
3577   block_stmt_iterator bsi;
3578   bool err = false;
3579   htab_t htab;
3580   tree addr;
3581 
3582   timevar_push (TV_TREE_STMT_VERIFY);
3583   htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3584 
3585   FOR_EACH_BB (bb)
3586     {
3587       tree phi;
3588       int i;
3589 
3590       for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3591 	{
3592 	  int phi_num_args = PHI_NUM_ARGS (phi);
3593 
3594 	  if (bb_for_stmt (phi) != bb)
3595 	    {
3596 	      error ("bb_for_stmt (phi) is set to a wrong basic block");
3597 	      err |= true;
3598 	    }
3599 
3600 	  for (i = 0; i < phi_num_args; i++)
3601 	    {
3602 	      tree t = PHI_ARG_DEF (phi, i);
3603 	      tree addr;
3604 
3605 	      /* Addressable variables do have SSA_NAMEs but they
3606 		 are not considered gimple values.  */
3607 	      if (TREE_CODE (t) != SSA_NAME
3608 		  && TREE_CODE (t) != FUNCTION_DECL
3609 		  && !is_gimple_val (t))
3610 		{
3611 		  error ("PHI def is not a GIMPLE value");
3612 		  debug_generic_stmt (phi);
3613 		  debug_generic_stmt (t);
3614 		  err |= true;
3615 		}
3616 
3617 	      addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3618 	      if (addr)
3619 		{
3620 		  debug_generic_stmt (addr);
3621 		  err |= true;
3622 		}
3623 
3624 	      addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3625 	      if (addr)
3626 		{
3627 		  error ("incorrect sharing of tree nodes");
3628 		  debug_generic_stmt (phi);
3629 		  debug_generic_stmt (addr);
3630 		  err |= true;
3631 		}
3632 	    }
3633 	}
3634 
3635       for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3636 	{
3637 	  tree stmt = bsi_stmt (bsi);
3638 
3639 	  if (bb_for_stmt (stmt) != bb)
3640 	    {
3641 	      error ("bb_for_stmt (stmt) is set to a wrong basic block");
3642 	      err |= true;
3643 	    }
3644 
3645 	  bsi_next (&bsi);
3646 	  err |= verify_stmt (stmt, bsi_end_p (bsi));
3647 	  addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3648 	  if (addr)
3649 	    {
3650 	      error ("incorrect sharing of tree nodes");
3651 	      debug_generic_stmt (stmt);
3652 	      debug_generic_stmt (addr);
3653 	      err |= true;
3654 	    }
3655 	}
3656     }
3657 
3658   if (err)
3659     internal_error ("verify_stmts failed");
3660 
3661   htab_delete (htab);
3662   timevar_pop (TV_TREE_STMT_VERIFY);
3663 }
3664 
3665 
3666 /* Verifies that the flow information is OK.  */
3667 
3668 static int
tree_verify_flow_info(void)3669 tree_verify_flow_info (void)
3670 {
3671   int err = 0;
3672   basic_block bb;
3673   block_stmt_iterator bsi;
3674   tree stmt;
3675   edge e;
3676   edge_iterator ei;
3677 
3678   if (ENTRY_BLOCK_PTR->stmt_list)
3679     {
3680       error ("ENTRY_BLOCK has a statement list associated with it");
3681       err = 1;
3682     }
3683 
3684   if (EXIT_BLOCK_PTR->stmt_list)
3685     {
3686       error ("EXIT_BLOCK has a statement list associated with it");
3687       err = 1;
3688     }
3689 
3690   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3691     if (e->flags & EDGE_FALLTHRU)
3692       {
3693 	error ("fallthru to exit from bb %d", e->src->index);
3694 	err = 1;
3695       }
3696 
3697   FOR_EACH_BB (bb)
3698     {
3699       bool found_ctrl_stmt = false;
3700 
3701       stmt = NULL_TREE;
3702 
3703       /* Skip labels on the start of basic block.  */
3704       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3705 	{
3706 	  tree prev_stmt = stmt;
3707 
3708 	  stmt = bsi_stmt (bsi);
3709 
3710 	  if (TREE_CODE (stmt) != LABEL_EXPR)
3711 	    break;
3712 
3713 	  if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3714 	    {
3715 	      error ("nonlocal label ");
3716 	      print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3717 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
3718 		       bb->index);
3719 	      err = 1;
3720 	    }
3721 
3722 	  if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3723 	    {
3724 	      error ("label ");
3725 	      print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3726 	      fprintf (stderr, " to block does not match in bb %d",
3727 		       bb->index);
3728 	      err = 1;
3729 	    }
3730 
3731 	  if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3732 	      != current_function_decl)
3733 	    {
3734 	      error ("label ");
3735 	      print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3736 	      fprintf (stderr, " has incorrect context in bb %d",
3737 		       bb->index);
3738 	      err = 1;
3739 	    }
3740 	}
3741 
3742       /* Verify that body of basic block BB is free of control flow.  */
3743       for (; !bsi_end_p (bsi); bsi_next (&bsi))
3744 	{
3745 	  tree stmt = bsi_stmt (bsi);
3746 
3747 	  if (found_ctrl_stmt)
3748 	    {
3749 	      error ("control flow in the middle of basic block %d",
3750 		     bb->index);
3751 	      err = 1;
3752 	    }
3753 
3754 	  if (stmt_ends_bb_p (stmt))
3755 	    found_ctrl_stmt = true;
3756 
3757 	  if (TREE_CODE (stmt) == LABEL_EXPR)
3758 	    {
3759 	      error ("label ");
3760 	      print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3761 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
3762 	      err = 1;
3763 	    }
3764 	}
3765 
3766       bsi = bsi_last (bb);
3767       if (bsi_end_p (bsi))
3768 	continue;
3769 
3770       stmt = bsi_stmt (bsi);
3771 
3772       err |= verify_eh_edges (stmt);
3773 
3774       if (is_ctrl_stmt (stmt))
3775 	{
3776 	  FOR_EACH_EDGE (e, ei, bb->succs)
3777 	    if (e->flags & EDGE_FALLTHRU)
3778 	      {
3779 		error ("fallthru edge after a control statement in bb %d",
3780 		       bb->index);
3781 		err = 1;
3782 	      }
3783 	}
3784 
3785       if (TREE_CODE (stmt) != COND_EXPR)
3786 	{
3787 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3788 	     after anything else but if statement.  */
3789 	  FOR_EACH_EDGE (e, ei, bb->succs)
3790 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3791 	      {
3792 		error ("true/false edge after a non-COND_EXPR in bb %d",
3793 		       bb->index);
3794 		err = 1;
3795 	      }
3796 	}
3797 
3798       switch (TREE_CODE (stmt))
3799 	{
3800 	case COND_EXPR:
3801 	  {
3802 	    edge true_edge;
3803 	    edge false_edge;
3804 	    if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3805 		|| TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3806 	      {
3807 		error ("structured COND_EXPR at the end of bb %d", bb->index);
3808 		err = 1;
3809 	      }
3810 
3811 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3812 
3813 	    if (!true_edge || !false_edge
3814 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
3815 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
3816 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3817 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3818 		|| EDGE_COUNT (bb->succs) >= 3)
3819 	      {
3820 		error ("wrong outgoing edge flags at end of bb %d",
3821 		       bb->index);
3822 		err = 1;
3823 	      }
3824 
3825 	    if (!has_label_p (true_edge->dest,
3826 			      GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3827 	      {
3828 		error ("%<then%> label does not match edge at end of bb %d",
3829 		       bb->index);
3830 		err = 1;
3831 	      }
3832 
3833 	    if (!has_label_p (false_edge->dest,
3834 			      GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3835 	      {
3836 		error ("%<else%> label does not match edge at end of bb %d",
3837 		       bb->index);
3838 		err = 1;
3839 	      }
3840 	  }
3841 	  break;
3842 
3843 	case GOTO_EXPR:
3844 	  if (simple_goto_p (stmt))
3845 	    {
3846 	      error ("explicit goto at end of bb %d", bb->index);
3847 	      err = 1;
3848 	    }
3849 	  else
3850 	    {
3851 	      /* FIXME.  We should double check that the labels in the
3852 		 destination blocks have their address taken.  */
3853 	      FOR_EACH_EDGE (e, ei, bb->succs)
3854 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3855 				 | EDGE_FALSE_VALUE))
3856 		    || !(e->flags & EDGE_ABNORMAL))
3857 		  {
3858 		    error ("wrong outgoing edge flags at end of bb %d",
3859 			   bb->index);
3860 		    err = 1;
3861 		  }
3862 	    }
3863 	  break;
3864 
3865 	case RETURN_EXPR:
3866 	  if (!single_succ_p (bb)
3867 	      || (single_succ_edge (bb)->flags
3868 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
3869 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3870 	    {
3871 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
3872 	      err = 1;
3873 	    }
3874 	  if (single_succ (bb) != EXIT_BLOCK_PTR)
3875 	    {
3876 	      error ("return edge does not point to exit in bb %d",
3877 		     bb->index);
3878 	      err = 1;
3879 	    }
3880 	  break;
3881 
3882 	case SWITCH_EXPR:
3883 	  {
3884 	    tree prev;
3885 	    edge e;
3886 	    size_t i, n;
3887 	    tree vec;
3888 
3889 	    vec = SWITCH_LABELS (stmt);
3890 	    n = TREE_VEC_LENGTH (vec);
3891 
3892 	    /* Mark all the destination basic blocks.  */
3893 	    for (i = 0; i < n; ++i)
3894 	      {
3895 		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3896 		basic_block label_bb = label_to_block (lab);
3897 
3898 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3899 		label_bb->aux = (void *)1;
3900 	      }
3901 
3902 	    /* Verify that the case labels are sorted.  */
3903 	    prev = TREE_VEC_ELT (vec, 0);
3904 	    for (i = 1; i < n - 1; ++i)
3905 	      {
3906 		tree c = TREE_VEC_ELT (vec, i);
3907 		if (! CASE_LOW (c))
3908 		  {
3909 		    error ("found default case not at end of case vector");
3910 		    err = 1;
3911 		    continue;
3912 		  }
3913 		if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3914 		  {
3915 		    error ("case labels not sorted: ");
3916 		    print_generic_expr (stderr, prev, 0);
3917 		    fprintf (stderr," is greater than ");
3918 		    print_generic_expr (stderr, c, 0);
3919 		    fprintf (stderr," but comes before it.\n");
3920 		    err = 1;
3921 		  }
3922 		prev = c;
3923 	      }
3924 	    if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3925 	      {
3926 		error ("no default case found at end of case vector");
3927 		err = 1;
3928 	      }
3929 
3930 	    FOR_EACH_EDGE (e, ei, bb->succs)
3931 	      {
3932 		if (!e->dest->aux)
3933 		  {
3934 		    error ("extra outgoing edge %d->%d",
3935 			   bb->index, e->dest->index);
3936 		    err = 1;
3937 		  }
3938 		e->dest->aux = (void *)2;
3939 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3940 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3941 		  {
3942 		    error ("wrong outgoing edge flags at end of bb %d",
3943 			   bb->index);
3944 		    err = 1;
3945 		  }
3946 	      }
3947 
3948 	    /* Check that we have all of them.  */
3949 	    for (i = 0; i < n; ++i)
3950 	      {
3951 		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3952 		basic_block label_bb = label_to_block (lab);
3953 
3954 		if (label_bb->aux != (void *)2)
3955 		  {
3956 		    error ("missing edge %i->%i",
3957 			   bb->index, label_bb->index);
3958 		    err = 1;
3959 		  }
3960 	      }
3961 
3962 	    FOR_EACH_EDGE (e, ei, bb->succs)
3963 	      e->dest->aux = (void *)0;
3964 	  }
3965 
3966 	default: ;
3967 	}
3968     }
3969 
3970   if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3971     verify_dominators (CDI_DOMINATORS);
3972 
3973   return err;
3974 }
3975 
3976 
3977 /* Updates phi nodes after creating a forwarder block joined
3978    by edge FALLTHRU.  */
3979 
3980 static void
tree_make_forwarder_block(edge fallthru)3981 tree_make_forwarder_block (edge fallthru)
3982 {
3983   edge e;
3984   edge_iterator ei;
3985   basic_block dummy, bb;
3986   tree phi, new_phi, var;
3987 
3988   dummy = fallthru->src;
3989   bb = fallthru->dest;
3990 
3991   if (single_pred_p (bb))
3992     return;
3993 
3994   /* If we redirected a branch we must create new phi nodes at the
3995      start of BB.  */
3996   for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3997     {
3998       var = PHI_RESULT (phi);
3999       new_phi = create_phi_node (var, bb);
4000       SSA_NAME_DEF_STMT (var) = new_phi;
4001       SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4002       add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4003     }
4004 
4005   /* Ensure that the PHI node chain is in the same order.  */
4006   set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4007 
4008   /* Add the arguments we have stored on edges.  */
4009   FOR_EACH_EDGE (e, ei, bb->preds)
4010     {
4011       if (e == fallthru)
4012 	continue;
4013 
4014       flush_pending_stmts (e);
4015     }
4016 }
4017 
4018 
4019 /* Return a non-special label in the head of basic block BLOCK.
4020    Create one if it doesn't exist.  */
4021 
4022 tree
tree_block_label(basic_block bb)4023 tree_block_label (basic_block bb)
4024 {
4025   block_stmt_iterator i, s = bsi_start (bb);
4026   bool first = true;
4027   tree label, stmt;
4028 
4029   for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4030     {
4031       stmt = bsi_stmt (i);
4032       if (TREE_CODE (stmt) != LABEL_EXPR)
4033 	break;
4034       label = LABEL_EXPR_LABEL (stmt);
4035       if (!DECL_NONLOCAL (label))
4036 	{
4037 	  if (!first)
4038 	    bsi_move_before (&i, &s);
4039 	  return label;
4040 	}
4041     }
4042 
4043   label = create_artificial_label ();
4044   stmt = build1 (LABEL_EXPR, void_type_node, label);
4045   bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4046   return label;
4047 }
4048 
4049 
4050 /* Attempt to perform edge redirection by replacing a possibly complex
4051    jump instruction by a goto or by removing the jump completely.
4052    This can apply only if all edges now point to the same block.  The
4053    parameters and return values are equivalent to
4054    redirect_edge_and_branch.  */
4055 
4056 static edge
tree_try_redirect_by_replacing_jump(edge e,basic_block target)4057 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4058 {
4059   basic_block src = e->src;
4060   block_stmt_iterator b;
4061   tree stmt;
4062 
4063   /* We can replace or remove a complex jump only when we have exactly
4064      two edges.  */
4065   if (EDGE_COUNT (src->succs) != 2
4066       /* Verify that all targets will be TARGET.  Specifically, the
4067 	 edge that is not E must also go to TARGET.  */
4068       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4069     return NULL;
4070 
4071   b = bsi_last (src);
4072   if (bsi_end_p (b))
4073     return NULL;
4074   stmt = bsi_stmt (b);
4075 
4076   if (TREE_CODE (stmt) == COND_EXPR
4077       || TREE_CODE (stmt) == SWITCH_EXPR)
4078     {
4079       bsi_remove (&b, true);
4080       e = ssa_redirect_edge (e, target);
4081       e->flags = EDGE_FALLTHRU;
4082       return e;
4083     }
4084 
4085   return NULL;
4086 }
4087 
4088 
4089 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
4090    edge representing the redirected branch.  */
4091 
4092 static edge
tree_redirect_edge_and_branch(edge e,basic_block dest)4093 tree_redirect_edge_and_branch (edge e, basic_block dest)
4094 {
4095   basic_block bb = e->src;
4096   block_stmt_iterator bsi;
4097   edge ret;
4098   tree label, stmt;
4099 
4100   if (e->flags & EDGE_ABNORMAL)
4101     return NULL;
4102 
4103   if (e->src != ENTRY_BLOCK_PTR
4104       && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4105     return ret;
4106 
4107   if (e->dest == dest)
4108     return NULL;
4109 
4110   label = tree_block_label (dest);
4111 
4112   bsi = bsi_last (bb);
4113   stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4114 
4115   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4116     {
4117     case COND_EXPR:
4118       stmt = (e->flags & EDGE_TRUE_VALUE
4119 	      ? COND_EXPR_THEN (stmt)
4120 	      : COND_EXPR_ELSE (stmt));
4121       GOTO_DESTINATION (stmt) = label;
4122       break;
4123 
4124     case GOTO_EXPR:
4125       /* No non-abnormal edges should lead from a non-simple goto, and
4126 	 simple ones should be represented implicitly.  */
4127       gcc_unreachable ();
4128 
4129     case SWITCH_EXPR:
4130       {
4131         tree cases = get_cases_for_edge (e, stmt);
4132 
4133 	/* If we have a list of cases associated with E, then use it
4134 	   as it's a lot faster than walking the entire case vector.  */
4135 	if (cases)
4136 	  {
4137 	    edge e2 = find_edge (e->src, dest);
4138 	    tree last, first;
4139 
4140 	    first = cases;
4141 	    while (cases)
4142 	      {
4143 		last = cases;
4144 		CASE_LABEL (cases) = label;
4145 		cases = TREE_CHAIN (cases);
4146 	      }
4147 
4148 	    /* If there was already an edge in the CFG, then we need
4149 	       to move all the cases associated with E to E2.  */
4150 	    if (e2)
4151 	      {
4152 		tree cases2 = get_cases_for_edge (e2, stmt);
4153 
4154 		TREE_CHAIN (last) = TREE_CHAIN (cases2);
4155 		TREE_CHAIN (cases2) = first;
4156 	      }
4157 	  }
4158 	else
4159 	  {
4160 	    tree vec = SWITCH_LABELS (stmt);
4161 	    size_t i, n = TREE_VEC_LENGTH (vec);
4162 
4163 	    for (i = 0; i < n; i++)
4164 	      {
4165 		tree elt = TREE_VEC_ELT (vec, i);
4166 
4167 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
4168 		  CASE_LABEL (elt) = label;
4169 	      }
4170 	  }
4171 
4172 	break;
4173       }
4174 
4175     case RETURN_EXPR:
4176       bsi_remove (&bsi, true);
4177       e->flags |= EDGE_FALLTHRU;
4178       break;
4179 
4180     default:
4181       /* Otherwise it must be a fallthru edge, and we don't need to
4182 	 do anything besides redirecting it.  */
4183       gcc_assert (e->flags & EDGE_FALLTHRU);
4184       break;
4185     }
4186 
4187   /* Update/insert PHI nodes as necessary.  */
4188 
4189   /* Now update the edges in the CFG.  */
4190   e = ssa_redirect_edge (e, dest);
4191 
4192   return e;
4193 }
4194 
4195 
4196 /* Simple wrapper, as we can always redirect fallthru edges.  */
4197 
4198 static basic_block
tree_redirect_edge_and_branch_force(edge e,basic_block dest)4199 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4200 {
4201   e = tree_redirect_edge_and_branch (e, dest);
4202   gcc_assert (e);
4203 
4204   return NULL;
4205 }
4206 
4207 
4208 /* Splits basic block BB after statement STMT (but at least after the
4209    labels).  If STMT is NULL, BB is split just after the labels.  */
4210 
4211 static basic_block
tree_split_block(basic_block bb,void * stmt)4212 tree_split_block (basic_block bb, void *stmt)
4213 {
4214   block_stmt_iterator bsi;
4215   tree_stmt_iterator tsi_tgt;
4216   tree act;
4217   basic_block new_bb;
4218   edge e;
4219   edge_iterator ei;
4220 
4221   new_bb = create_empty_bb (bb);
4222 
4223   /* Redirect the outgoing edges.  */
4224   new_bb->succs = bb->succs;
4225   bb->succs = NULL;
4226   FOR_EACH_EDGE (e, ei, new_bb->succs)
4227     e->src = new_bb;
4228 
4229   if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4230     stmt = NULL;
4231 
4232   /* Move everything from BSI to the new basic block.  */
4233   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4234     {
4235       act = bsi_stmt (bsi);
4236       if (TREE_CODE (act) == LABEL_EXPR)
4237 	continue;
4238 
4239       if (!stmt)
4240 	break;
4241 
4242       if (stmt == act)
4243 	{
4244 	  bsi_next (&bsi);
4245 	  break;
4246 	}
4247     }
4248 
4249   if (bsi_end_p (bsi))
4250     return new_bb;
4251 
4252   /* Split the statement list - avoid re-creating new containers as this
4253      brings ugly quadratic memory consumption in the inliner.
4254      (We are still quadratic since we need to update stmt BB pointers,
4255      sadly.)  */
4256   new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4257   for (tsi_tgt = tsi_start (new_bb->stmt_list);
4258        !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4259     change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4260 
4261   return new_bb;
4262 }
4263 
4264 
4265 /* Moves basic block BB after block AFTER.  */
4266 
4267 static bool
tree_move_block_after(basic_block bb,basic_block after)4268 tree_move_block_after (basic_block bb, basic_block after)
4269 {
4270   if (bb->prev_bb == after)
4271     return true;
4272 
4273   unlink_block (bb);
4274   link_block (bb, after);
4275 
4276   return true;
4277 }
4278 
4279 
4280 /* Return true if basic_block can be duplicated.  */
4281 
4282 static bool
tree_can_duplicate_bb_p(basic_block bb ATTRIBUTE_UNUSED)4283 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4284 {
4285   return true;
4286 }
4287 
4288 
4289 /* Create a duplicate of the basic block BB.  NOTE: This does not
4290    preserve SSA form.  */
4291 
4292 static basic_block
tree_duplicate_bb(basic_block bb)4293 tree_duplicate_bb (basic_block bb)
4294 {
4295   basic_block new_bb;
4296   block_stmt_iterator bsi, bsi_tgt;
4297   tree phi;
4298 
4299   new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4300 
4301   /* Copy the PHI nodes.  We ignore PHI node arguments here because
4302      the incoming edges have not been setup yet.  */
4303   for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4304     {
4305       tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4306       create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4307     }
4308 
4309   /* Keep the chain of PHI nodes in the same order so that they can be
4310      updated by ssa_redirect_edge.  */
4311   set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4312 
4313   bsi_tgt = bsi_start (new_bb);
4314   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4315     {
4316       def_operand_p def_p;
4317       ssa_op_iter op_iter;
4318       tree stmt, copy;
4319       int region;
4320 
4321       stmt = bsi_stmt (bsi);
4322       if (TREE_CODE (stmt) == LABEL_EXPR)
4323 	continue;
4324 
4325       /* Create a new copy of STMT and duplicate STMT's virtual
4326 	 operands.  */
4327       copy = unshare_expr (stmt);
4328       bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4329       copy_virtual_operands (copy, stmt);
4330       region = lookup_stmt_eh_region (stmt);
4331       if (region >= 0)
4332 	add_stmt_to_eh_region (copy, region);
4333 
4334       /* Create new names for all the definitions created by COPY and
4335 	 add replacement mappings for each new name.  */
4336       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4337 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4338     }
4339 
4340   return new_bb;
4341 }
4342 
4343 
4344 /* Basic block BB_COPY was created by code duplication.  Add phi node
4345    arguments for edges going out of BB_COPY.  The blocks that were
4346    duplicated have BB_DUPLICATED set.  */
4347 
4348 void
add_phi_args_after_copy_bb(basic_block bb_copy)4349 add_phi_args_after_copy_bb (basic_block bb_copy)
4350 {
4351   basic_block bb, dest;
4352   edge e, e_copy;
4353   edge_iterator ei;
4354   tree phi, phi_copy, phi_next, def;
4355 
4356   bb = get_bb_original (bb_copy);
4357 
4358   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4359     {
4360       if (!phi_nodes (e_copy->dest))
4361 	continue;
4362 
4363       if (e_copy->dest->flags & BB_DUPLICATED)
4364 	dest = get_bb_original (e_copy->dest);
4365       else
4366 	dest = e_copy->dest;
4367 
4368       e = find_edge (bb, dest);
4369       if (!e)
4370 	{
4371 	  /* During loop unrolling the target of the latch edge is copied.
4372 	     In this case we are not looking for edge to dest, but to
4373 	     duplicated block whose original was dest.  */
4374 	  FOR_EACH_EDGE (e, ei, bb->succs)
4375 	    if ((e->dest->flags & BB_DUPLICATED)
4376 		&& get_bb_original (e->dest) == dest)
4377 	      break;
4378 
4379 	  gcc_assert (e != NULL);
4380 	}
4381 
4382       for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4383 	   phi;
4384 	   phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4385 	{
4386 	  phi_next = PHI_CHAIN (phi);
4387 	  def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4388 	  add_phi_arg (phi_copy, def, e_copy);
4389 	}
4390     }
4391 }
4392 
4393 /* Blocks in REGION_COPY array of length N_REGION were created by
4394    duplication of basic blocks.  Add phi node arguments for edges
4395    going from these blocks.  */
4396 
4397 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region)4398 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4399 {
4400   unsigned i;
4401 
4402   for (i = 0; i < n_region; i++)
4403     region_copy[i]->flags |= BB_DUPLICATED;
4404 
4405   for (i = 0; i < n_region; i++)
4406     add_phi_args_after_copy_bb (region_copy[i]);
4407 
4408   for (i = 0; i < n_region; i++)
4409     region_copy[i]->flags &= ~BB_DUPLICATED;
4410 }
4411 
4412 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4413    important exit edge EXIT.  By important we mean that no SSA name defined
4414    inside region is live over the other exit edges of the region.  All entry
4415    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
4416    to the duplicate of the region.  SSA form, dominance and loop information
4417    is updated.  The new basic blocks are stored to REGION_COPY in the same
4418    order as they had in REGION, provided that REGION_COPY is not NULL.
4419    The function returns false if it is unable to copy the region,
4420    true otherwise.  */
4421 
4422 bool
tree_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)4423 tree_duplicate_sese_region (edge entry, edge exit,
4424 			    basic_block *region, unsigned n_region,
4425 			    basic_block *region_copy)
4426 {
4427   unsigned i, n_doms;
4428   bool free_region_copy = false, copying_header = false;
4429   struct loop *loop = entry->dest->loop_father;
4430   edge exit_copy;
4431   basic_block *doms;
4432   edge redirected;
4433   int total_freq = 0, entry_freq = 0;
4434   gcov_type total_count = 0, entry_count = 0;
4435 
4436   if (!can_copy_bbs_p (region, n_region))
4437     return false;
4438 
4439   /* Some sanity checking.  Note that we do not check for all possible
4440      missuses of the functions.  I.e. if you ask to copy something weird,
4441      it will work, but the state of structures probably will not be
4442      correct.  */
4443   for (i = 0; i < n_region; i++)
4444     {
4445       /* We do not handle subloops, i.e. all the blocks must belong to the
4446 	 same loop.  */
4447       if (region[i]->loop_father != loop)
4448 	return false;
4449 
4450       if (region[i] != entry->dest
4451 	  && region[i] == loop->header)
4452 	return false;
4453     }
4454 
4455   loop->copy = loop;
4456 
4457   /* In case the function is used for loop header copying (which is the primary
4458      use), ensure that EXIT and its copy will be new latch and entry edges.  */
4459   if (loop->header == entry->dest)
4460     {
4461       copying_header = true;
4462       loop->copy = loop->outer;
4463 
4464       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4465 	return false;
4466 
4467       for (i = 0; i < n_region; i++)
4468 	if (region[i] != exit->src
4469 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4470 	  return false;
4471     }
4472 
4473   if (!region_copy)
4474     {
4475       region_copy = XNEWVEC (basic_block, n_region);
4476       free_region_copy = true;
4477     }
4478 
4479   gcc_assert (!need_ssa_update_p ());
4480 
4481   /* Record blocks outside the region that are dominated by something
4482      inside.  */
4483   doms = XNEWVEC (basic_block, n_basic_blocks);
4484   initialize_original_copy_tables ();
4485 
4486   n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4487 
4488   if (entry->dest->count)
4489     {
4490       total_count = entry->dest->count;
4491       entry_count = entry->count;
4492       /* Fix up corner cases, to avoid division by zero or creation of negative
4493 	 frequencies.  */
4494       if (entry_count > total_count)
4495 	entry_count = total_count;
4496     }
4497   else
4498     {
4499       total_freq = entry->dest->frequency;
4500       entry_freq = EDGE_FREQUENCY (entry);
4501       /* Fix up corner cases, to avoid division by zero or creation of negative
4502 	 frequencies.  */
4503       if (total_freq == 0)
4504 	total_freq = 1;
4505       else if (entry_freq > total_freq)
4506 	entry_freq = total_freq;
4507     }
4508 
4509   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4510 	    split_edge_bb_loc (entry));
4511   if (total_count)
4512     {
4513       scale_bbs_frequencies_gcov_type (region, n_region,
4514 				       total_count - entry_count,
4515 				       total_count);
4516       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4517 				       total_count);
4518     }
4519   else
4520     {
4521       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4522 				 total_freq);
4523       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4524     }
4525 
4526   if (copying_header)
4527     {
4528       loop->header = exit->dest;
4529       loop->latch = exit->src;
4530     }
4531 
4532   /* Redirect the entry and add the phi node arguments.  */
4533   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4534   gcc_assert (redirected != NULL);
4535   flush_pending_stmts (entry);
4536 
4537   /* Concerning updating of dominators:  We must recount dominators
4538      for entry block and its copy.  Anything that is outside of the
4539      region, but was dominated by something inside needs recounting as
4540      well.  */
4541   set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4542   doms[n_doms++] = get_bb_original (entry->dest);
4543   iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4544   free (doms);
4545 
4546   /* Add the other PHI node arguments.  */
4547   add_phi_args_after_copy (region_copy, n_region);
4548 
4549   /* Update the SSA web.  */
4550   update_ssa (TODO_update_ssa);
4551 
4552   if (free_region_copy)
4553     free (region_copy);
4554 
4555   free_original_copy_tables ();
4556   return true;
4557 }
4558 
4559 /*
4560 DEF_VEC_P(basic_block);
4561 DEF_VEC_ALLOC_P(basic_block,heap);
4562 */
4563 
4564 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
4565    adding blocks when the dominator traversal reaches EXIT.  This
4566    function silently assumes that ENTRY strictly dominates EXIT.  */
4567 
4568 static void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,VEC (basic_block,heap)** bbs_p)4569 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4570 			      VEC(basic_block,heap) **bbs_p)
4571 {
4572   basic_block son;
4573 
4574   for (son = first_dom_son (CDI_DOMINATORS, entry);
4575        son;
4576        son = next_dom_son (CDI_DOMINATORS, son))
4577     {
4578       VEC_safe_push (basic_block, heap, *bbs_p, son);
4579       if (son != exit)
4580 	gather_blocks_in_sese_region (son, exit, bbs_p);
4581     }
4582 }
4583 
4584 
4585 struct move_stmt_d
4586 {
4587   tree block;
4588   tree from_context;
4589   tree to_context;
4590   bitmap vars_to_remove;
4591   htab_t new_label_map;
4592   bool remap_decls_p;
4593 };
4594 
4595 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
4596    contained in *TP and change the DECL_CONTEXT of every local
4597    variable referenced in *TP.  */
4598 
4599 static tree
move_stmt_r(tree * tp,int * walk_subtrees,void * data)4600 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4601 {
4602   struct move_stmt_d *p = (struct move_stmt_d *) data;
4603   tree t = *tp;
4604 
4605   if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4606     TREE_BLOCK (t) = p->block;
4607 
4608   if (OMP_DIRECTIVE_P (t)
4609       && TREE_CODE (t) != OMP_RETURN
4610       && TREE_CODE (t) != OMP_CONTINUE)
4611     {
4612       /* Do not remap variables inside OMP directives.  Variables
4613 	 referenced in clauses and directive header belong to the
4614 	 parent function and should not be moved into the child
4615 	 function.  */
4616       bool save_remap_decls_p = p->remap_decls_p;
4617       p->remap_decls_p = false;
4618       *walk_subtrees = 0;
4619 
4620       walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4621 
4622       p->remap_decls_p = save_remap_decls_p;
4623     }
4624   else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4625     {
4626       if (TREE_CODE (t) == LABEL_DECL)
4627 	{
4628 	  if (p->new_label_map)
4629 	    {
4630 	      struct tree_map in, *out;
4631 	      in.from = t;
4632 	      out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4633 	      if (out)
4634 		*tp = t = out->to;
4635 	    }
4636 
4637 	  DECL_CONTEXT (t) = p->to_context;
4638 	}
4639       else if (p->remap_decls_p)
4640 	{
4641 	  DECL_CONTEXT (t) = p->to_context;
4642 
4643 	  if (TREE_CODE (t) == VAR_DECL)
4644 	    {
4645 	      struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4646 	      f->unexpanded_var_list
4647 		= tree_cons (0, t, f->unexpanded_var_list);
4648 
4649 	      /* Mark T to be removed from the original function,
4650 	         otherwise it will be given a DECL_RTL when the
4651 		 original function is expanded.  */
4652 	      bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4653 	    }
4654 	}
4655     }
4656   else if (TYPE_P (t))
4657     *walk_subtrees = 0;
4658 
4659   return NULL_TREE;
4660 }
4661 
4662 
4663 /* Move basic block BB from function CFUN to function DEST_FN.  The
4664    block is moved out of the original linked list and placed after
4665    block AFTER in the new list.  Also, the block is removed from the
4666    original array of blocks and placed in DEST_FN's array of blocks.
4667    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4668    updated to reflect the moved edges.
4669 
4670    On exit, local variables that need to be removed from
4671    CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE.  */
4672 
4673 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,bitmap vars_to_remove,htab_t new_label_map,int eh_offset)4674 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4675 		  basic_block after, bool update_edge_count_p,
4676 		  bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4677 {
4678   struct control_flow_graph *cfg;
4679   edge_iterator ei;
4680   edge e;
4681   block_stmt_iterator si;
4682   struct move_stmt_d d;
4683   unsigned old_len, new_len;
4684   basic_block *addr;
4685 
4686   /* Link BB to the new linked list.  */
4687   move_block_after (bb, after);
4688 
4689   /* Update the edge count in the corresponding flowgraphs.  */
4690   if (update_edge_count_p)
4691     FOR_EACH_EDGE (e, ei, bb->succs)
4692       {
4693 	cfun->cfg->x_n_edges--;
4694 	dest_cfun->cfg->x_n_edges++;
4695       }
4696 
4697   /* Remove BB from the original basic block array.  */
4698   VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4699   cfun->cfg->x_n_basic_blocks--;
4700 
4701   /* Grow DEST_CFUN's basic block array if needed.  */
4702   cfg = dest_cfun->cfg;
4703   cfg->x_n_basic_blocks++;
4704   if (bb->index > cfg->x_last_basic_block)
4705     cfg->x_last_basic_block = bb->index;
4706 
4707   old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4708   if ((unsigned) cfg->x_last_basic_block >= old_len)
4709     {
4710       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4711       VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4712       addr = VEC_address (basic_block, cfg->x_basic_block_info);
4713       memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4714     }
4715 
4716   VEC_replace (basic_block, cfg->x_basic_block_info,
4717                cfg->x_last_basic_block, bb);
4718 
4719   /* The statements in BB need to be associated with a new TREE_BLOCK.
4720      Labels need to be associated with a new label-to-block map.  */
4721   memset (&d, 0, sizeof (d));
4722   d.vars_to_remove = vars_to_remove;
4723 
4724   for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4725     {
4726       tree stmt = bsi_stmt (si);
4727       int region;
4728 
4729       d.from_context = cfun->decl;
4730       d.to_context = dest_cfun->decl;
4731       d.remap_decls_p = true;
4732       d.new_label_map = new_label_map;
4733       if (TREE_BLOCK (stmt))
4734 	d.block = DECL_INITIAL (dest_cfun->decl);
4735 
4736       walk_tree (&stmt, move_stmt_r, &d, NULL);
4737 
4738       if (TREE_CODE (stmt) == LABEL_EXPR)
4739 	{
4740 	  tree label = LABEL_EXPR_LABEL (stmt);
4741 	  int uid = LABEL_DECL_UID (label);
4742 
4743 	  gcc_assert (uid > -1);
4744 
4745 	  old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4746 	  if (old_len <= (unsigned) uid)
4747 	    {
4748 	      new_len = 3 * uid / 2;
4749 	      VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4750 			     new_len);
4751 	      addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4752 	      memset (&addr[old_len], 0,
4753 		      sizeof (basic_block) * (new_len - old_len));
4754 	    }
4755 
4756 	  VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4757 	  VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4758 
4759 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4760 
4761 	  if (uid >= dest_cfun->last_label_uid)
4762 	    dest_cfun->last_label_uid = uid + 1;
4763 	}
4764       else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4765 	TREE_OPERAND (stmt, 0) =
4766 	  build_int_cst (NULL_TREE,
4767 			 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4768 			 + eh_offset);
4769 
4770       region = lookup_stmt_eh_region (stmt);
4771       if (region >= 0)
4772 	{
4773 	  add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4774 	  remove_stmt_from_eh_region (stmt);
4775 	}
4776     }
4777 }
4778 
4779 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4780    the outermost EH region.  Use REGION as the incoming base EH region.  */
4781 
4782 static int
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,int region)4783 find_outermost_region_in_block (struct function *src_cfun,
4784 				basic_block bb, int region)
4785 {
4786   block_stmt_iterator si;
4787 
4788   for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4789     {
4790       tree stmt = bsi_stmt (si);
4791       int stmt_region;
4792 
4793       if (TREE_CODE (stmt) == RESX_EXPR)
4794 	stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4795       else
4796 	stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4797       if (stmt_region > 0)
4798 	{
4799 	  if (region < 0)
4800 	    region = stmt_region;
4801 	  else if (stmt_region != region)
4802 	    {
4803 	      region = eh_region_outermost (src_cfun, stmt_region, region);
4804 	      gcc_assert (region != -1);
4805 	    }
4806 	}
4807     }
4808 
4809   return region;
4810 }
4811 
4812 static tree
new_label_mapper(tree decl,void * data)4813 new_label_mapper (tree decl, void *data)
4814 {
4815   htab_t hash = (htab_t) data;
4816   struct tree_map *m;
4817   void **slot;
4818 
4819   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4820 
4821   m = xmalloc (sizeof (struct tree_map));
4822   m->hash = DECL_UID (decl);
4823   m->from = decl;
4824   m->to = create_artificial_label ();
4825   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4826 
4827   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4828   gcc_assert (*slot == NULL);
4829 
4830   *slot = m;
4831 
4832   return m->to;
4833 }
4834 
4835 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4836    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
4837    single basic block in the original CFG and the new basic block is
4838    returned.  DEST_CFUN must not have a CFG yet.
4839 
4840    Note that the region need not be a pure SESE region.  Blocks inside
4841    the region may contain calls to abort/exit.  The only restriction
4842    is that ENTRY_BB should be the only entry point and it must
4843    dominate EXIT_BB.
4844 
4845    All local variables referenced in the region are assumed to be in
4846    the corresponding BLOCK_VARS and unexpanded variable lists
4847    associated with DEST_CFUN.  */
4848 
4849 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb)4850 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4851 		        basic_block exit_bb)
4852 {
4853   VEC(basic_block,heap) *bbs;
4854   basic_block after, bb, *entry_pred, *exit_succ;
4855   struct function *saved_cfun;
4856   int *entry_flag, *exit_flag, eh_offset;
4857   unsigned i, num_entry_edges, num_exit_edges;
4858   edge e;
4859   edge_iterator ei;
4860   bitmap vars_to_remove;
4861   htab_t new_label_map;
4862 
4863   saved_cfun = cfun;
4864 
4865   /* Collect all the blocks in the region.  Manually add ENTRY_BB
4866      because it won't be added by dfs_enumerate_from.  */
4867   calculate_dominance_info (CDI_DOMINATORS);
4868 
4869   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4870      region.  */
4871   gcc_assert (entry_bb != exit_bb
4872               && (!exit_bb
4873 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4874 
4875   bbs = NULL;
4876   VEC_safe_push (basic_block, heap, bbs, entry_bb);
4877   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4878 
4879   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
4880      the predecessor edges to ENTRY_BB and the successor edges to
4881      EXIT_BB so that we can re-attach them to the new basic block that
4882      will replace the region.  */
4883   num_entry_edges = EDGE_COUNT (entry_bb->preds);
4884   entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4885   entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4886   i = 0;
4887   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4888     {
4889       entry_flag[i] = e->flags;
4890       entry_pred[i++] = e->src;
4891       remove_edge (e);
4892     }
4893 
4894   if (exit_bb)
4895     {
4896       num_exit_edges = EDGE_COUNT (exit_bb->succs);
4897       exit_succ = (basic_block *) xcalloc (num_exit_edges,
4898 					   sizeof (basic_block));
4899       exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4900       i = 0;
4901       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4902 	{
4903 	  exit_flag[i] = e->flags;
4904 	  exit_succ[i++] = e->dest;
4905 	  remove_edge (e);
4906 	}
4907     }
4908   else
4909     {
4910       num_exit_edges = 0;
4911       exit_succ = NULL;
4912       exit_flag = NULL;
4913     }
4914 
4915   /* Switch context to the child function to initialize DEST_FN's CFG.  */
4916   gcc_assert (dest_cfun->cfg == NULL);
4917   cfun = dest_cfun;
4918 
4919   init_empty_tree_cfg ();
4920 
4921   /* Initialize EH information for the new function.  */
4922   eh_offset = 0;
4923   new_label_map = NULL;
4924   if (saved_cfun->eh)
4925     {
4926       int region = -1;
4927 
4928       for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4929 	region = find_outermost_region_in_block (saved_cfun, bb, region);
4930 
4931       init_eh_for_function ();
4932       if (region != -1)
4933 	{
4934 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4935 	  eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4936 					    new_label_map, region, 0);
4937 	}
4938     }
4939 
4940   cfun = saved_cfun;
4941 
4942   /* Move blocks from BBS into DEST_CFUN.  */
4943   gcc_assert (VEC_length (basic_block, bbs) >= 2);
4944   after = dest_cfun->cfg->x_entry_block_ptr;
4945   vars_to_remove = BITMAP_ALLOC (NULL);
4946   for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4947     {
4948       /* No need to update edge counts on the last block.  It has
4949 	 already been updated earlier when we detached the region from
4950 	 the original CFG.  */
4951       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4952 	                new_label_map, eh_offset);
4953       after = bb;
4954     }
4955 
4956   if (new_label_map)
4957     htab_delete (new_label_map);
4958 
4959   /* Remove the variables marked in VARS_TO_REMOVE from
4960      CFUN->UNEXPANDED_VAR_LIST.  Otherwise, they will be given a
4961      DECL_RTL in the context of CFUN.  */
4962   if (!bitmap_empty_p (vars_to_remove))
4963     {
4964       tree *p;
4965 
4966       for (p = &cfun->unexpanded_var_list; *p; )
4967 	{
4968 	  tree var = TREE_VALUE (*p);
4969 	  if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4970 	    {
4971 	      *p = TREE_CHAIN (*p);
4972 	      continue;
4973 	    }
4974 
4975 	  p = &TREE_CHAIN (*p);
4976 	}
4977     }
4978 
4979   BITMAP_FREE (vars_to_remove);
4980 
4981   /* Rewire the entry and exit blocks.  The successor to the entry
4982      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4983      the child function.  Similarly, the predecessor of DEST_FN's
4984      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
4985      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4986      various CFG manipulation function get to the right CFG.
4987 
4988      FIXME, this is silly.  The CFG ought to become a parameter to
4989      these helpers.  */
4990   cfun = dest_cfun;
4991   make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4992   if (exit_bb)
4993     make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
4994   cfun = saved_cfun;
4995 
4996   /* Back in the original function, the SESE region has disappeared,
4997      create a new basic block in its place.  */
4998   bb = create_empty_bb (entry_pred[0]);
4999   for (i = 0; i < num_entry_edges; i++)
5000     make_edge (entry_pred[i], bb, entry_flag[i]);
5001 
5002   for (i = 0; i < num_exit_edges; i++)
5003     make_edge (bb, exit_succ[i], exit_flag[i]);
5004 
5005   if (exit_bb)
5006     {
5007       free (exit_flag);
5008       free (exit_succ);
5009     }
5010   free (entry_flag);
5011   free (entry_pred);
5012   free_dominance_info (CDI_DOMINATORS);
5013   free_dominance_info (CDI_POST_DOMINATORS);
5014   VEC_free (basic_block, heap, bbs);
5015 
5016   return bb;
5017 }
5018 
5019 
5020 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h)  */
5021 
5022 void
dump_function_to_file(tree fn,FILE * file,int flags)5023 dump_function_to_file (tree fn, FILE *file, int flags)
5024 {
5025   tree arg, vars, var;
5026   bool ignore_topmost_bind = false, any_var = false;
5027   basic_block bb;
5028   tree chain;
5029   struct function *saved_cfun;
5030 
5031   fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5032 
5033   arg = DECL_ARGUMENTS (fn);
5034   while (arg)
5035     {
5036       print_generic_expr (file, arg, dump_flags);
5037       if (TREE_CHAIN (arg))
5038 	fprintf (file, ", ");
5039       arg = TREE_CHAIN (arg);
5040     }
5041   fprintf (file, ")\n");
5042 
5043   if (flags & TDF_DETAILS)
5044     dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5045   if (flags & TDF_RAW)
5046     {
5047       dump_node (fn, TDF_SLIM | flags, file);
5048       return;
5049     }
5050 
5051   /* Switch CFUN to point to FN.  */
5052   saved_cfun = cfun;
5053   cfun = DECL_STRUCT_FUNCTION (fn);
5054 
5055   /* When GIMPLE is lowered, the variables are no longer available in
5056      BIND_EXPRs, so display them separately.  */
5057   if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5058     {
5059       ignore_topmost_bind = true;
5060 
5061       fprintf (file, "{\n");
5062       for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5063 	{
5064 	  var = TREE_VALUE (vars);
5065 
5066 	  print_generic_decl (file, var, flags);
5067 	  fprintf (file, "\n");
5068 
5069 	  any_var = true;
5070 	}
5071     }
5072 
5073   if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5074     {
5075       /* Make a CFG based dump.  */
5076       check_bb_profile (ENTRY_BLOCK_PTR, file);
5077       if (!ignore_topmost_bind)
5078 	fprintf (file, "{\n");
5079 
5080       if (any_var && n_basic_blocks)
5081 	fprintf (file, "\n");
5082 
5083       FOR_EACH_BB (bb)
5084 	dump_generic_bb (file, bb, 2, flags);
5085 
5086       fprintf (file, "}\n");
5087       check_bb_profile (EXIT_BLOCK_PTR, file);
5088     }
5089   else
5090     {
5091       int indent;
5092 
5093       /* Make a tree based dump.  */
5094       chain = DECL_SAVED_TREE (fn);
5095 
5096       if (chain && TREE_CODE (chain) == BIND_EXPR)
5097 	{
5098 	  if (ignore_topmost_bind)
5099 	    {
5100 	      chain = BIND_EXPR_BODY (chain);
5101 	      indent = 2;
5102 	    }
5103 	  else
5104 	    indent = 0;
5105 	}
5106       else
5107 	{
5108 	  if (!ignore_topmost_bind)
5109 	    fprintf (file, "{\n");
5110 	  indent = 2;
5111 	}
5112 
5113       if (any_var)
5114 	fprintf (file, "\n");
5115 
5116       print_generic_stmt_indented (file, chain, flags, indent);
5117       if (ignore_topmost_bind)
5118 	fprintf (file, "}\n");
5119     }
5120 
5121   fprintf (file, "\n\n");
5122 
5123   /* Restore CFUN.  */
5124   cfun = saved_cfun;
5125 }
5126 
5127 
5128 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
5129 
5130 void
debug_function(tree fn,int flags)5131 debug_function (tree fn, int flags)
5132 {
5133   dump_function_to_file (fn, stderr, flags);
5134 }
5135 
5136 
5137 /* Pretty print of the loops intermediate representation.  */
5138 static void print_loop (FILE *, struct loop *, int);
5139 static void print_pred_bbs (FILE *, basic_block bb);
5140 static void print_succ_bbs (FILE *, basic_block bb);
5141 
5142 
5143 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
5144 
5145 static void
print_pred_bbs(FILE * file,basic_block bb)5146 print_pred_bbs (FILE *file, basic_block bb)
5147 {
5148   edge e;
5149   edge_iterator ei;
5150 
5151   FOR_EACH_EDGE (e, ei, bb->preds)
5152     fprintf (file, "bb_%d ", e->src->index);
5153 }
5154 
5155 
5156 /* Print on FILE the indexes for the successors of basic_block BB.  */
5157 
5158 static void
print_succ_bbs(FILE * file,basic_block bb)5159 print_succ_bbs (FILE *file, basic_block bb)
5160 {
5161   edge e;
5162   edge_iterator ei;
5163 
5164   FOR_EACH_EDGE (e, ei, bb->succs)
5165     fprintf (file, "bb_%d ", e->dest->index);
5166 }
5167 
5168 
5169 /* Pretty print LOOP on FILE, indented INDENT spaces.  */
5170 
5171 static void
print_loop(FILE * file,struct loop * loop,int indent)5172 print_loop (FILE *file, struct loop *loop, int indent)
5173 {
5174   char *s_indent;
5175   basic_block bb;
5176 
5177   if (loop == NULL)
5178     return;
5179 
5180   s_indent = (char *) alloca ((size_t) indent + 1);
5181   memset ((void *) s_indent, ' ', (size_t) indent);
5182   s_indent[indent] = '\0';
5183 
5184   /* Print the loop's header.  */
5185   fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5186 
5187   /* Print the loop's body.  */
5188   fprintf (file, "%s{\n", s_indent);
5189   FOR_EACH_BB (bb)
5190     if (bb->loop_father == loop)
5191       {
5192 	/* Print the basic_block's header.  */
5193 	fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
5194 	print_pred_bbs (file, bb);
5195 	fprintf (file, "}, succs = {");
5196 	print_succ_bbs (file, bb);
5197 	fprintf (file, "})\n");
5198 
5199 	/* Print the basic_block's body.  */
5200 	fprintf (file, "%s  {\n", s_indent);
5201 	tree_dump_bb (bb, file, indent + 4);
5202 	fprintf (file, "%s  }\n", s_indent);
5203       }
5204 
5205   print_loop (file, loop->inner, indent + 2);
5206   fprintf (file, "%s}\n", s_indent);
5207   print_loop (file, loop->next, indent);
5208 }
5209 
5210 
5211 /* Follow a CFG edge from the entry point of the program, and on entry
5212    of a loop, pretty print the loop structure on FILE.  */
5213 
5214 void
print_loop_ir(FILE * file)5215 print_loop_ir (FILE *file)
5216 {
5217   basic_block bb;
5218 
5219   bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5220   if (bb && bb->loop_father)
5221     print_loop (file, bb->loop_father, 0);
5222 }
5223 
5224 
5225 /* Debugging loops structure at tree level.  */
5226 
5227 void
debug_loop_ir(void)5228 debug_loop_ir (void)
5229 {
5230   print_loop_ir (stderr);
5231 }
5232 
5233 
5234 /* Return true if BB ends with a call, possibly followed by some
5235    instructions that must stay with the call.  Return false,
5236    otherwise.  */
5237 
5238 static bool
tree_block_ends_with_call_p(basic_block bb)5239 tree_block_ends_with_call_p (basic_block bb)
5240 {
5241   block_stmt_iterator bsi = bsi_last (bb);
5242   return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5243 }
5244 
5245 
5246 /* Return true if BB ends with a conditional branch.  Return false,
5247    otherwise.  */
5248 
5249 static bool
tree_block_ends_with_condjump_p(basic_block bb)5250 tree_block_ends_with_condjump_p (basic_block bb)
5251 {
5252   tree stmt = last_stmt (bb);
5253   return (stmt && TREE_CODE (stmt) == COND_EXPR);
5254 }
5255 
5256 
5257 /* Return true if we need to add fake edge to exit at statement T.
5258    Helper function for tree_flow_call_edges_add.  */
5259 
5260 static bool
need_fake_edge_p(tree t)5261 need_fake_edge_p (tree t)
5262 {
5263   tree call;
5264 
5265   /* NORETURN and LONGJMP calls already have an edge to exit.
5266      CONST and PURE calls do not need one.
5267      We don't currently check for CONST and PURE here, although
5268      it would be a good idea, because those attributes are
5269      figured out from the RTL in mark_constant_function, and
5270      the counter incrementation code from -fprofile-arcs
5271      leads to different results from -fbranch-probabilities.  */
5272   call = get_call_expr_in (t);
5273   if (call
5274       && !(call_expr_flags (call) & ECF_NORETURN))
5275     return true;
5276 
5277   if (TREE_CODE (t) == ASM_EXPR
5278        && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5279     return true;
5280 
5281   return false;
5282 }
5283 
5284 
5285 /* Add fake edges to the function exit for any non constant and non
5286    noreturn calls, volatile inline assembly in the bitmap of blocks
5287    specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
5288    the number of blocks that were split.
5289 
5290    The goal is to expose cases in which entering a basic block does
5291    not imply that all subsequent instructions must be executed.  */
5292 
5293 static int
tree_flow_call_edges_add(sbitmap blocks)5294 tree_flow_call_edges_add (sbitmap blocks)
5295 {
5296   int i;
5297   int blocks_split = 0;
5298   int last_bb = last_basic_block;
5299   bool check_last_block = false;
5300 
5301   if (n_basic_blocks == NUM_FIXED_BLOCKS)
5302     return 0;
5303 
5304   if (! blocks)
5305     check_last_block = true;
5306   else
5307     check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5308 
5309   /* In the last basic block, before epilogue generation, there will be
5310      a fallthru edge to EXIT.  Special care is required if the last insn
5311      of the last basic block is a call because make_edge folds duplicate
5312      edges, which would result in the fallthru edge also being marked
5313      fake, which would result in the fallthru edge being removed by
5314      remove_fake_edges, which would result in an invalid CFG.
5315 
5316      Moreover, we can't elide the outgoing fake edge, since the block
5317      profiler needs to take this into account in order to solve the minimal
5318      spanning tree in the case that the call doesn't return.
5319 
5320      Handle this by adding a dummy instruction in a new last basic block.  */
5321   if (check_last_block)
5322     {
5323       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5324       block_stmt_iterator bsi = bsi_last (bb);
5325       tree t = NULL_TREE;
5326       if (!bsi_end_p (bsi))
5327 	t = bsi_stmt (bsi);
5328 
5329       if (t && need_fake_edge_p (t))
5330 	{
5331 	  edge e;
5332 
5333 	  e = find_edge (bb, EXIT_BLOCK_PTR);
5334 	  if (e)
5335 	    {
5336 	      bsi_insert_on_edge (e, build_empty_stmt ());
5337 	      bsi_commit_edge_inserts ();
5338 	    }
5339 	}
5340     }
5341 
5342   /* Now add fake edges to the function exit for any non constant
5343      calls since there is no way that we can determine if they will
5344      return or not...  */
5345   for (i = 0; i < last_bb; i++)
5346     {
5347       basic_block bb = BASIC_BLOCK (i);
5348       block_stmt_iterator bsi;
5349       tree stmt, last_stmt;
5350 
5351       if (!bb)
5352 	continue;
5353 
5354       if (blocks && !TEST_BIT (blocks, i))
5355 	continue;
5356 
5357       bsi = bsi_last (bb);
5358       if (!bsi_end_p (bsi))
5359 	{
5360 	  last_stmt = bsi_stmt (bsi);
5361 	  do
5362 	    {
5363 	      stmt = bsi_stmt (bsi);
5364 	      if (need_fake_edge_p (stmt))
5365 		{
5366 		  edge e;
5367 		  /* The handling above of the final block before the
5368 		     epilogue should be enough to verify that there is
5369 		     no edge to the exit block in CFG already.
5370 		     Calling make_edge in such case would cause us to
5371 		     mark that edge as fake and remove it later.  */
5372 #ifdef ENABLE_CHECKING
5373 		  if (stmt == last_stmt)
5374 		    {
5375 		      e = find_edge (bb, EXIT_BLOCK_PTR);
5376 		      gcc_assert (e == NULL);
5377 		    }
5378 #endif
5379 
5380 		  /* Note that the following may create a new basic block
5381 		     and renumber the existing basic blocks.  */
5382 		  if (stmt != last_stmt)
5383 		    {
5384 		      e = split_block (bb, stmt);
5385 		      if (e)
5386 			blocks_split++;
5387 		    }
5388 		  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5389 		}
5390 	      bsi_prev (&bsi);
5391 	    }
5392 	  while (!bsi_end_p (bsi));
5393 	}
5394     }
5395 
5396   if (blocks_split)
5397     verify_flow_info ();
5398 
5399   return blocks_split;
5400 }
5401 
5402 /* Purge dead abnormal call edges from basic block BB.  */
5403 
5404 bool
tree_purge_dead_abnormal_call_edges(basic_block bb)5405 tree_purge_dead_abnormal_call_edges (basic_block bb)
5406 {
5407   bool changed = tree_purge_dead_eh_edges (bb);
5408 
5409   if (current_function_has_nonlocal_label)
5410     {
5411       tree stmt = last_stmt (bb);
5412       edge_iterator ei;
5413       edge e;
5414 
5415       if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5416 	for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5417 	  {
5418 	    if (e->flags & EDGE_ABNORMAL)
5419 	      {
5420 		remove_edge (e);
5421 		changed = true;
5422 	      }
5423 	    else
5424 	      ei_next (&ei);
5425 	  }
5426 
5427       /* See tree_purge_dead_eh_edges below.  */
5428       if (changed)
5429 	free_dominance_info (CDI_DOMINATORS);
5430     }
5431 
5432   return changed;
5433 }
5434 
5435 /* Purge dead EH edges from basic block BB.  */
5436 
5437 bool
tree_purge_dead_eh_edges(basic_block bb)5438 tree_purge_dead_eh_edges (basic_block bb)
5439 {
5440   bool changed = false;
5441   edge e;
5442   edge_iterator ei;
5443   tree stmt = last_stmt (bb);
5444 
5445   if (stmt && tree_can_throw_internal (stmt))
5446     return false;
5447 
5448   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5449     {
5450       if (e->flags & EDGE_EH)
5451 	{
5452 	  remove_edge (e);
5453 	  changed = true;
5454 	}
5455       else
5456 	ei_next (&ei);
5457     }
5458 
5459   /* Removal of dead EH edges might change dominators of not
5460      just immediate successors.  E.g. when bb1 is changed so that
5461      it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5462      eh edges purged by this function in:
5463            0
5464 	  / \
5465 	 v   v
5466 	 1-->2
5467         / \  |
5468        v   v |
5469        3-->4 |
5470         \    v
5471 	 --->5
5472 	     |
5473 	     -
5474      idom(bb5) must be recomputed.  For now just free the dominance
5475      info.  */
5476   if (changed)
5477     free_dominance_info (CDI_DOMINATORS);
5478 
5479   return changed;
5480 }
5481 
5482 bool
tree_purge_all_dead_eh_edges(bitmap blocks)5483 tree_purge_all_dead_eh_edges (bitmap blocks)
5484 {
5485   bool changed = false;
5486   unsigned i;
5487   bitmap_iterator bi;
5488 
5489   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5490     {
5491       changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5492     }
5493 
5494   return changed;
5495 }
5496 
5497 /* This function is called whenever a new edge is created or
5498    redirected.  */
5499 
5500 static void
tree_execute_on_growing_pred(edge e)5501 tree_execute_on_growing_pred (edge e)
5502 {
5503   basic_block bb = e->dest;
5504 
5505   if (phi_nodes (bb))
5506     reserve_phi_args_for_new_edge (bb);
5507 }
5508 
5509 /* This function is called immediately before edge E is removed from
5510    the edge vector E->dest->preds.  */
5511 
5512 static void
tree_execute_on_shrinking_pred(edge e)5513 tree_execute_on_shrinking_pred (edge e)
5514 {
5515   if (phi_nodes (e->dest))
5516     remove_phi_args (e);
5517 }
5518 
5519 /*---------------------------------------------------------------------------
5520   Helper functions for Loop versioning
5521   ---------------------------------------------------------------------------*/
5522 
5523 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
5524    of 'first'. Both of them are dominated by 'new_head' basic block. When
5525    'new_head' was created by 'second's incoming edge it received phi arguments
5526    on the edge by split_edge(). Later, additional edge 'e' was created to
5527    connect 'new_head' and 'first'. Now this routine adds phi args on this
5528    additional edge 'e' that new_head to second edge received as part of edge
5529    splitting.
5530 */
5531 
5532 static void
tree_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)5533 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5534 				basic_block new_head, edge e)
5535 {
5536   tree phi1, phi2;
5537   edge e2 = find_edge (new_head, second);
5538 
5539   /* Because NEW_HEAD has been created by splitting SECOND's incoming
5540      edge, we should always have an edge from NEW_HEAD to SECOND.  */
5541   gcc_assert (e2 != NULL);
5542 
5543   /* Browse all 'second' basic block phi nodes and add phi args to
5544      edge 'e' for 'first' head. PHI args are always in correct order.  */
5545 
5546   for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5547        phi2 && phi1;
5548        phi2 = PHI_CHAIN (phi2),  phi1 = PHI_CHAIN (phi1))
5549     {
5550       tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5551       add_phi_arg (phi1, def, e);
5552     }
5553 }
5554 
5555 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5556    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5557    the destination of the ELSE part.  */
5558 static void
tree_lv_add_condition_to_bb(basic_block first_head,basic_block second_head,basic_block cond_bb,void * cond_e)5559 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5560                             basic_block cond_bb, void *cond_e)
5561 {
5562   block_stmt_iterator bsi;
5563   tree goto1 = NULL_TREE;
5564   tree goto2 = NULL_TREE;
5565   tree new_cond_expr = NULL_TREE;
5566   tree cond_expr = (tree) cond_e;
5567   edge e0;
5568 
5569   /* Build new conditional expr */
5570   goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5571   goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5572   new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5573 
5574   /* Add new cond in cond_bb.  */
5575   bsi = bsi_start (cond_bb);
5576   bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5577   /* Adjust edges appropriately to connect new head with first head
5578      as well as second head.  */
5579   e0 = single_succ_edge (cond_bb);
5580   e0->flags &= ~EDGE_FALLTHRU;
5581   e0->flags |= EDGE_FALSE_VALUE;
5582 }
5583 
5584 struct cfg_hooks tree_cfg_hooks = {
5585   "tree",
5586   tree_verify_flow_info,
5587   tree_dump_bb,			/* dump_bb  */
5588   create_bb,			/* create_basic_block  */
5589   tree_redirect_edge_and_branch,/* redirect_edge_and_branch  */
5590   tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force  */
5591   remove_bb,			/* delete_basic_block  */
5592   tree_split_block,		/* split_block  */
5593   tree_move_block_after,	/* move_block_after  */
5594   tree_can_merge_blocks_p,	/* can_merge_blocks_p  */
5595   tree_merge_blocks,		/* merge_blocks  */
5596   tree_predict_edge,		/* predict_edge  */
5597   tree_predicted_by_p,		/* predicted_by_p  */
5598   tree_can_duplicate_bb_p,	/* can_duplicate_block_p  */
5599   tree_duplicate_bb,		/* duplicate_block  */
5600   tree_split_edge,		/* split_edge  */
5601   tree_make_forwarder_block,	/* make_forward_block  */
5602   NULL,				/* tidy_fallthru_edge  */
5603   tree_block_ends_with_call_p,	/* block_ends_with_call_p */
5604   tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5605   tree_flow_call_edges_add,     /* flow_call_edges_add */
5606   tree_execute_on_growing_pred,	/* execute_on_growing_pred */
5607   tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5608   tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5609   tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5610   tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5611   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5612   flush_pending_stmts		/* flush_pending_stmts */
5613 };
5614 
5615 
5616 /* Split all critical edges.  */
5617 
5618 static unsigned int
split_critical_edges(void)5619 split_critical_edges (void)
5620 {
5621   basic_block bb;
5622   edge e;
5623   edge_iterator ei;
5624 
5625   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5626      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
5627      mappings around the calls to split_edge.  */
5628   start_recording_case_labels ();
5629   FOR_ALL_BB (bb)
5630     {
5631       FOR_EACH_EDGE (e, ei, bb->succs)
5632 	if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5633 	  {
5634 	    split_edge (e);
5635 	  }
5636     }
5637   end_recording_case_labels ();
5638   return 0;
5639 }
5640 
5641 struct tree_opt_pass pass_split_crit_edges =
5642 {
5643   "crited",                          /* name */
5644   NULL,                          /* gate */
5645   split_critical_edges,          /* execute */
5646   NULL,                          /* sub */
5647   NULL,                          /* next */
5648   0,                             /* static_pass_number */
5649   TV_TREE_SPLIT_EDGES,           /* tv_id */
5650   PROP_cfg,                      /* properties required */
5651   PROP_no_crit_edges,            /* properties_provided */
5652   0,                             /* properties_destroyed */
5653   0,                             /* todo_flags_start */
5654   TODO_dump_func,                /* todo_flags_finish */
5655   0                              /* letter */
5656 };
5657 
5658 
5659 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5660    a temporary, make sure and register it to be renamed if necessary,
5661    and finally return the temporary.  Put the statements to compute
5662    EXP before the current statement in BSI.  */
5663 
5664 tree
gimplify_val(block_stmt_iterator * bsi,tree type,tree exp)5665 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5666 {
5667   tree t, new_stmt, orig_stmt;
5668 
5669   if (is_gimple_val (exp))
5670     return exp;
5671 
5672   t = make_rename_temp (type, NULL);
5673   new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5674 
5675   orig_stmt = bsi_stmt (*bsi);
5676   SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5677   TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5678 
5679   bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5680   if (in_ssa_p)
5681     mark_new_vars_to_rename (new_stmt);
5682 
5683   return t;
5684 }
5685 
5686 /* Build a ternary operation and gimplify it.  Emit code before BSI.
5687    Return the gimple_val holding the result.  */
5688 
5689 tree
gimplify_build3(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a,tree b,tree c)5690 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5691 		 tree type, tree a, tree b, tree c)
5692 {
5693   tree ret;
5694 
5695   ret = fold_build3 (code, type, a, b, c);
5696   STRIP_NOPS (ret);
5697 
5698   return gimplify_val (bsi, type, ret);
5699 }
5700 
5701 /* Build a binary operation and gimplify it.  Emit code before BSI.
5702    Return the gimple_val holding the result.  */
5703 
5704 tree
gimplify_build2(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a,tree b)5705 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5706 		 tree type, tree a, tree b)
5707 {
5708   tree ret;
5709 
5710   ret = fold_build2 (code, type, a, b);
5711   STRIP_NOPS (ret);
5712 
5713   return gimplify_val (bsi, type, ret);
5714 }
5715 
5716 /* Build a unary operation and gimplify it.  Emit code before BSI.
5717    Return the gimple_val holding the result.  */
5718 
5719 tree
gimplify_build1(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a)5720 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5721 		 tree a)
5722 {
5723   tree ret;
5724 
5725   ret = fold_build1 (code, type, a);
5726   STRIP_NOPS (ret);
5727 
5728   return gimplify_val (bsi, type, ret);
5729 }
5730 
5731 
5732 
5733 /* Emit return warnings.  */
5734 
5735 static unsigned int
execute_warn_function_return(void)5736 execute_warn_function_return (void)
5737 {
5738 #ifdef USE_MAPPED_LOCATION
5739   source_location location;
5740 #else
5741   location_t *locus;
5742 #endif
5743   tree last;
5744   edge e;
5745   edge_iterator ei;
5746 
5747   /* If we have a path to EXIT, then we do return.  */
5748   if (TREE_THIS_VOLATILE (cfun->decl)
5749       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5750     {
5751 #ifdef USE_MAPPED_LOCATION
5752       location = UNKNOWN_LOCATION;
5753 #else
5754       locus = NULL;
5755 #endif
5756       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5757 	{
5758 	  last = last_stmt (e->src);
5759 	  if (TREE_CODE (last) == RETURN_EXPR
5760 #ifdef USE_MAPPED_LOCATION
5761 	      && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5762 #else
5763 	      && (locus = EXPR_LOCUS (last)) != NULL)
5764 #endif
5765 	    break;
5766 	}
5767 #ifdef USE_MAPPED_LOCATION
5768       if (location == UNKNOWN_LOCATION)
5769 	location = cfun->function_end_locus;
5770       warning (0, "%H%<noreturn%> function does return", &location);
5771 #else
5772       if (!locus)
5773 	locus = &cfun->function_end_locus;
5774       warning (0, "%H%<noreturn%> function does return", locus);
5775 #endif
5776     }
5777 
5778   /* If we see "return;" in some basic block, then we do reach the end
5779      without returning a value.  */
5780   else if (warn_return_type
5781 	   && !TREE_NO_WARNING (cfun->decl)
5782 	   && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5783 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5784     {
5785       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5786 	{
5787 	  tree last = last_stmt (e->src);
5788 	  if (TREE_CODE (last) == RETURN_EXPR
5789 	      && TREE_OPERAND (last, 0) == NULL
5790 	      && !TREE_NO_WARNING (last))
5791 	    {
5792 #ifdef USE_MAPPED_LOCATION
5793 	      location = EXPR_LOCATION (last);
5794 	      if (location == UNKNOWN_LOCATION)
5795 		  location = cfun->function_end_locus;
5796 	      warning (0, "%Hcontrol reaches end of non-void function", &location);
5797 #else
5798 	      locus = EXPR_LOCUS (last);
5799 	      if (!locus)
5800 		locus = &cfun->function_end_locus;
5801 	      warning (0, "%Hcontrol reaches end of non-void function", locus);
5802 #endif
5803 	      TREE_NO_WARNING (cfun->decl) = 1;
5804 	      break;
5805 	    }
5806 	}
5807     }
5808   return 0;
5809 }
5810 
5811 
5812 /* Given a basic block B which ends with a conditional and has
5813    precisely two successors, determine which of the edges is taken if
5814    the conditional is true and which is taken if the conditional is
5815    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
5816 
5817 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)5818 extract_true_false_edges_from_block (basic_block b,
5819 				     edge *true_edge,
5820 				     edge *false_edge)
5821 {
5822   edge e = EDGE_SUCC (b, 0);
5823 
5824   if (e->flags & EDGE_TRUE_VALUE)
5825     {
5826       *true_edge = e;
5827       *false_edge = EDGE_SUCC (b, 1);
5828     }
5829   else
5830     {
5831       *false_edge = e;
5832       *true_edge = EDGE_SUCC (b, 1);
5833     }
5834 }
5835 
5836 struct tree_opt_pass pass_warn_function_return =
5837 {
5838   NULL,					/* name */
5839   NULL,					/* gate */
5840   execute_warn_function_return,		/* execute */
5841   NULL,					/* sub */
5842   NULL,					/* next */
5843   0,					/* static_pass_number */
5844   0,					/* tv_id */
5845   PROP_cfg,				/* properties_required */
5846   0,					/* properties_provided */
5847   0,					/* properties_destroyed */
5848   0,					/* todo_flags_start */
5849   0,					/* todo_flags_finish */
5850   0					/* letter */
5851 };
5852 
5853 /* Emit noreturn warnings.  */
5854 
5855 static unsigned int
execute_warn_function_noreturn(void)5856 execute_warn_function_noreturn (void)
5857 {
5858   if (warn_missing_noreturn
5859       && !TREE_THIS_VOLATILE (cfun->decl)
5860       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5861       && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5862     warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5863 	     "for attribute %<noreturn%>",
5864 	     cfun->decl);
5865   return 0;
5866 }
5867 
5868 struct tree_opt_pass pass_warn_function_noreturn =
5869 {
5870   NULL,					/* name */
5871   NULL,					/* gate */
5872   execute_warn_function_noreturn,	/* execute */
5873   NULL,					/* sub */
5874   NULL,					/* next */
5875   0,					/* static_pass_number */
5876   0,					/* tv_id */
5877   PROP_cfg,				/* properties_required */
5878   0,					/* properties_provided */
5879   0,					/* properties_destroyed */
5880   0,					/* todo_flags_start */
5881   0,					/* todo_flags_finish */
5882   0					/* letter */
5883 };
5884