1 /* Control flow functions for trees.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
48 
49 /* This file contains functions for building the Control Flow Graph (CFG)
50    for a function tree.  */
51 
52 /* Local declarations.  */
53 
54 /* Initial capacity for the basic block array.  */
55 static const int initial_cfg_capacity = 20;
56 
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
59    via their TREE_CHAIN field, which we clear after we're done with the
60    hash table to prevent problems with duplication of SWITCH_EXPRs.
61 
62    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63    update the case vector in response to edge redirections.
64 
65    Right now this table is set up and torn down at key points in the
66    compilation process.  It would be nice if we could make the table
67    more persistent.  The key is getting notification of changes to
68    the CFG (particularly edge removal, creation and redirection).  */
69 
70 struct edge_to_cases_elt
71 {
72   /* The edge itself.  Necessary for hashing and equality tests.  */
73   edge e;
74 
75   /* The case labels associated with this edge.  We link these up via
76      their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77      when we destroy the hash table.  This prevents problems when copying
78      SWITCH_EXPRs.  */
79   tree case_labels;
80 };
81 
82 static htab_t edge_to_cases;
83 
84 /* CFG statistics.  */
85 struct cfg_stats_d
86 {
87   long num_merged_labels;
88 };
89 
90 static struct cfg_stats_d cfg_stats;
91 
92 /* Nonzero if we found a computed goto while building basic blocks.  */
93 static bool found_computed_goto;
94 
95 /* Basic blocks and flowgraphs.  */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
99 
100 /* Edges.  */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
110 
111 /* Various helpers.  */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
116 
117 /* Flowgraph optimization and cleanup.  */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
125 
126 void
init_empty_tree_cfg(void)127 init_empty_tree_cfg (void)
128 {
129   /* Initialize the basic block array.  */
130   init_flow ();
131   profile_status = PROFILE_ABSENT;
132   n_basic_blocks = 0;
133   last_basic_block = 0;
134   VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
135 
136   /* Build a mapping of labels to their associated blocks.  */
137   VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
138 		  "label to block map");
139 
140   ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
141   EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
142 }
143 
144 /*---------------------------------------------------------------------------
145 			      Create basic blocks
146 ---------------------------------------------------------------------------*/
147 
148 /* Entry point to the CFG builder for trees.  TP points to the list of
149    statements to be added to the flowgraph.  */
150 
151 static void
build_tree_cfg(tree * tp)152 build_tree_cfg (tree *tp)
153 {
154   /* Register specific tree functions.  */
155   tree_register_cfg_hooks ();
156 
157   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
158 
159   init_empty_tree_cfg ();
160 
161   found_computed_goto = 0;
162   make_blocks (*tp);
163 
164   /* Computed gotos are hell to deal with, especially if there are
165      lots of them with a large number of destinations.  So we factor
166      them to a common computed goto location before we build the
167      edge list.  After we convert back to normal form, we will un-factor
168      the computed gotos since factoring introduces an unwanted jump.  */
169   if (found_computed_goto)
170     factor_computed_gotos ();
171 
172   /* Make sure there is always at least one block, even if it's empty.  */
173   if (n_basic_blocks == 0)
174     create_empty_bb (ENTRY_BLOCK_PTR);
175 
176   /* Adjust the size of the array.  */
177   VARRAY_GROW (basic_block_info, n_basic_blocks);
178 
179   /* To speed up statement iterator walks, we first purge dead labels.  */
180   cleanup_dead_labels ();
181 
182   /* Group case nodes to reduce the number of edges.
183      We do this after cleaning up dead labels because otherwise we miss
184      a lot of obvious case merging opportunities.  */
185   group_case_labels ();
186 
187   /* Create the edges of the flowgraph.  */
188   make_edges ();
189 
190   /* Debugging dumps.  */
191 
192   /* Write the flowgraph to a VCG file.  */
193   {
194     int local_dump_flags;
195     FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
196     if (dump_file)
197       {
198 	tree_cfg2vcg (dump_file);
199 	dump_end (TDI_vcg, dump_file);
200       }
201   }
202 
203 #ifdef ENABLE_CHECKING
204   verify_stmts ();
205 #endif
206 
207   /* Dump a textual representation of the flowgraph.  */
208   if (dump_file)
209     dump_tree_cfg (dump_file, dump_flags);
210 }
211 
212 static void
execute_build_cfg(void)213 execute_build_cfg (void)
214 {
215   build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
216 }
217 
218 struct tree_opt_pass pass_build_cfg =
219 {
220   "cfg",				/* name */
221   NULL,					/* gate */
222   execute_build_cfg,			/* execute */
223   NULL,					/* sub */
224   NULL,					/* next */
225   0,					/* static_pass_number */
226   TV_TREE_CFG,				/* tv_id */
227   PROP_gimple_leh,			/* properties_required */
228   PROP_cfg,				/* properties_provided */
229   0,					/* properties_destroyed */
230   0,					/* todo_flags_start */
231   TODO_verify_stmts,			/* todo_flags_finish */
232   0					/* letter */
233 };
234 
235 /* Search the CFG for any computed gotos.  If found, factor them to a
236    common computed goto site.  Also record the location of that site so
237    that we can un-factor the gotos after we have converted back to
238    normal form.  */
239 
240 static void
factor_computed_gotos(void)241 factor_computed_gotos (void)
242 {
243   basic_block bb;
244   tree factored_label_decl = NULL;
245   tree var = NULL;
246   tree factored_computed_goto_label = NULL;
247   tree factored_computed_goto = NULL;
248 
249   /* We know there are one or more computed gotos in this function.
250      Examine the last statement in each basic block to see if the block
251      ends with a computed goto.  */
252 
253   FOR_EACH_BB (bb)
254     {
255       block_stmt_iterator bsi = bsi_last (bb);
256       tree last;
257 
258       if (bsi_end_p (bsi))
259 	continue;
260       last = bsi_stmt (bsi);
261 
262       /* Ignore the computed goto we create when we factor the original
263 	 computed gotos.  */
264       if (last == factored_computed_goto)
265 	continue;
266 
267       /* If the last statement is a computed goto, factor it.  */
268       if (computed_goto_p (last))
269 	{
270 	  tree assignment;
271 
272 	  /* The first time we find a computed goto we need to create
273 	     the factored goto block and the variable each original
274 	     computed goto will use for their goto destination.  */
275 	  if (! factored_computed_goto)
276 	    {
277 	      basic_block new_bb = create_empty_bb (bb);
278 	      block_stmt_iterator new_bsi = bsi_start (new_bb);
279 
280 	      /* Create the destination of the factored goto.  Each original
281 		 computed goto will put its desired destination into this
282 		 variable and jump to the label we create immediately
283 		 below.  */
284 	      var = create_tmp_var (ptr_type_node, "gotovar");
285 
286 	      /* Build a label for the new block which will contain the
287 		 factored computed goto.  */
288 	      factored_label_decl = create_artificial_label ();
289 	      factored_computed_goto_label
290 		= build1 (LABEL_EXPR, void_type_node, factored_label_decl);
291 	      bsi_insert_after (&new_bsi, factored_computed_goto_label,
292 				BSI_NEW_STMT);
293 
294 	      /* Build our new computed goto.  */
295 	      factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
296 	      bsi_insert_after (&new_bsi, factored_computed_goto,
297 				BSI_NEW_STMT);
298 	    }
299 
300 	  /* Copy the original computed goto's destination into VAR.  */
301 	  assignment = build (MODIFY_EXPR, ptr_type_node,
302 			      var, GOTO_DESTINATION (last));
303 	  bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304 
305 	  /* And re-vector the computed goto to the new destination.  */
306 	  GOTO_DESTINATION (last) = factored_label_decl;
307 	}
308     }
309 }
310 
311 
312 /* Build a flowgraph for the statement_list STMT_LIST.  */
313 
314 static void
make_blocks(tree stmt_list)315 make_blocks (tree stmt_list)
316 {
317   tree_stmt_iterator i = tsi_start (stmt_list);
318   tree stmt = NULL;
319   bool start_new_block = true;
320   bool first_stmt_of_list = true;
321   basic_block bb = ENTRY_BLOCK_PTR;
322 
323   while (!tsi_end_p (i))
324     {
325       tree prev_stmt;
326 
327       prev_stmt = stmt;
328       stmt = tsi_stmt (i);
329 
330       /* If the statement starts a new basic block or if we have determined
331 	 in a previous pass that we need to create a new block for STMT, do
332 	 so now.  */
333       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334 	{
335 	  if (!first_stmt_of_list)
336 	    stmt_list = tsi_split_statement_list_before (&i);
337 	  bb = create_basic_block (stmt_list, NULL, bb);
338 	  start_new_block = false;
339 	}
340 
341       /* Now add STMT to BB and create the subgraphs for special statement
342 	 codes.  */
343       set_bb_for_stmt (stmt, bb);
344 
345       if (computed_goto_p (stmt))
346 	found_computed_goto = true;
347 
348       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 	 next iteration.  */
350       if (stmt_ends_bb_p (stmt))
351 	start_new_block = true;
352 
353       tsi_next (&i);
354       first_stmt_of_list = false;
355     }
356 }
357 
358 
359 /* Create and return a new empty basic block after bb AFTER.  */
360 
361 static basic_block
create_bb(void * h,void * e,basic_block after)362 create_bb (void *h, void *e, basic_block after)
363 {
364   basic_block bb;
365 
366   gcc_assert (!e);
367 
368   /* Create and initialize a new basic block.  Since alloc_block uses
369      ggc_alloc_cleared to allocate a basic block, we do not have to
370      clear the newly allocated basic block here.  */
371   bb = alloc_block ();
372 
373   bb->index = last_basic_block;
374   bb->flags = BB_NEW;
375   bb->stmt_list = h ? h : alloc_stmt_list ();
376 
377   /* Add the new block to the linked list of blocks.  */
378   link_block (bb, after);
379 
380   /* Grow the basic block array if needed.  */
381   if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
382     {
383       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384       VARRAY_GROW (basic_block_info, new_size);
385     }
386 
387   /* Add the newly created block to the array.  */
388   BASIC_BLOCK (last_basic_block) = bb;
389 
390   n_basic_blocks++;
391   last_basic_block++;
392 
393   return bb;
394 }
395 
396 
397 /*---------------------------------------------------------------------------
398 				 Edge creation
399 ---------------------------------------------------------------------------*/
400 
401 /* Fold COND_EXPR_COND of each COND_EXPR.  */
402 
403 void
fold_cond_expr_cond(void)404 fold_cond_expr_cond (void)
405 {
406   basic_block bb;
407 
408   FOR_EACH_BB (bb)
409     {
410       tree stmt = last_stmt (bb);
411 
412       if (stmt
413 	  && TREE_CODE (stmt) == COND_EXPR)
414 	{
415 	  tree cond = fold (COND_EXPR_COND (stmt));
416 	  if (integer_zerop (cond))
417 	    COND_EXPR_COND (stmt) = boolean_false_node;
418 	  else if (integer_onep (cond))
419 	    COND_EXPR_COND (stmt) = boolean_true_node;
420 	}
421     }
422 }
423 
424 /* Join all the blocks in the flowgraph.  */
425 
426 static void
make_edges(void)427 make_edges (void)
428 {
429   basic_block bb;
430 
431   /* Create an edge from entry to the first block with executable
432      statements in it.  */
433   make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
434 
435   /* Traverse the basic block array placing edges.  */
436   FOR_EACH_BB (bb)
437     {
438       tree first = first_stmt (bb);
439       tree last = last_stmt (bb);
440 
441       if (first)
442 	{
443 	  /* Edges for statements that always alter flow control.  */
444 	  if (is_ctrl_stmt (last))
445 	    make_ctrl_stmt_edges (bb);
446 
447 	  /* Edges for statements that sometimes alter flow control.  */
448 	  if (is_ctrl_altering_stmt (last))
449 	    make_exit_edges (bb);
450 	}
451 
452       /* Finally, if no edges were created above, this is a regular
453 	 basic block that only needs a fallthru edge.  */
454       if (EDGE_COUNT (bb->succs) == 0)
455 	make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
456     }
457 
458   /* We do not care about fake edges, so remove any that the CFG
459      builder inserted for completeness.  */
460   remove_fake_exit_edges ();
461 
462   /* Fold COND_EXPR_COND of each COND_EXPR.  */
463   fold_cond_expr_cond ();
464 
465   /* Clean up the graph and warn for unreachable code.  */
466   cleanup_tree_cfg ();
467 }
468 
469 
470 /* Create edges for control statement at basic block BB.  */
471 
472 static void
make_ctrl_stmt_edges(basic_block bb)473 make_ctrl_stmt_edges (basic_block bb)
474 {
475   tree last = last_stmt (bb);
476 
477   gcc_assert (last);
478   switch (TREE_CODE (last))
479     {
480     case GOTO_EXPR:
481       make_goto_expr_edges (bb);
482       break;
483 
484     case RETURN_EXPR:
485       make_edge (bb, EXIT_BLOCK_PTR, 0);
486       break;
487 
488     case COND_EXPR:
489       make_cond_expr_edges (bb);
490       break;
491 
492     case SWITCH_EXPR:
493       make_switch_expr_edges (bb);
494       break;
495 
496     case RESX_EXPR:
497       make_eh_edges (last);
498       /* Yet another NORETURN hack.  */
499       if (EDGE_COUNT (bb->succs) == 0)
500 	make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
501       break;
502 
503     default:
504       gcc_unreachable ();
505     }
506 }
507 
508 
509 /* Create exit edges for statements in block BB that alter the flow of
510    control.  Statements that alter the control flow are 'goto', 'return'
511    and calls to non-returning functions.  */
512 
513 static void
make_exit_edges(basic_block bb)514 make_exit_edges (basic_block bb)
515 {
516   tree last = last_stmt (bb), op;
517 
518   gcc_assert (last);
519   switch (TREE_CODE (last))
520     {
521     case RESX_EXPR:
522       break;
523     case CALL_EXPR:
524       /* If this function receives a nonlocal goto, then we need to
525 	 make edges from this call site to all the nonlocal goto
526 	 handlers.  */
527       if (TREE_SIDE_EFFECTS (last)
528 	  && current_function_has_nonlocal_label)
529 	make_goto_expr_edges (bb);
530 
531       /* If this statement has reachable exception handlers, then
532 	 create abnormal edges to them.  */
533       make_eh_edges (last);
534 
535       /* Some calls are known not to return.  For such calls we create
536 	 a fake edge.
537 
538 	 We really need to revamp how we build edges so that it's not
539 	 such a bloody pain to avoid creating edges for this case since
540 	 all we do is remove these edges when we're done building the
541 	 CFG.  */
542       if (call_expr_flags (last) & ECF_NORETURN)
543 	{
544 	  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
545 	  return;
546 	}
547 
548       /* Don't forget the fall-thru edge.  */
549       make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
550       break;
551 
552     case MODIFY_EXPR:
553       /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
554 	 may have an abnormal edge.  Search the RHS for this case and
555 	 create any required edges.  */
556       op = get_call_expr_in (last);
557       if (op && TREE_SIDE_EFFECTS (op)
558 	  && current_function_has_nonlocal_label)
559 	make_goto_expr_edges (bb);
560 
561       make_eh_edges (last);
562       make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563       break;
564 
565     default:
566       gcc_unreachable ();
567     }
568 }
569 
570 
571 /* Create the edges for a COND_EXPR starting at block BB.
572    At this point, both clauses must contain only simple gotos.  */
573 
574 static void
make_cond_expr_edges(basic_block bb)575 make_cond_expr_edges (basic_block bb)
576 {
577   tree entry = last_stmt (bb);
578   basic_block then_bb, else_bb;
579   tree then_label, else_label;
580   edge e;
581 
582   gcc_assert (entry);
583   gcc_assert (TREE_CODE (entry) == COND_EXPR);
584 
585   /* Entry basic blocks for each component.  */
586   then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
587   else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
588   then_bb = label_to_block (then_label);
589   else_bb = label_to_block (else_label);
590 
591   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
592 #ifdef USE_MAPPED_LOCATION
593   e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
594 #else
595   e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
596 #endif
597   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
598   if (e)
599     {
600 #ifdef USE_MAPPED_LOCATION
601       e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
602 #else
603       e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
604 #endif
605     }
606 }
607 
608 /* Hashing routine for EDGE_TO_CASES.  */
609 
610 static hashval_t
edge_to_cases_hash(const void * p)611 edge_to_cases_hash (const void *p)
612 {
613   edge e = ((struct edge_to_cases_elt *)p)->e;
614 
615   /* Hash on the edge itself (which is a pointer).  */
616   return htab_hash_pointer (e);
617 }
618 
619 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
620    for equality is just a pointer comparison.  */
621 
622 static int
edge_to_cases_eq(const void * p1,const void * p2)623 edge_to_cases_eq (const void *p1, const void *p2)
624 {
625   edge e1 = ((struct edge_to_cases_elt *)p1)->e;
626   edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627 
628   return e1 == e2;
629 }
630 
631 /* Called for each element in the hash table (P) as we delete the
632    edge to cases hash table.
633 
634    Clear all the TREE_CHAINs to prevent problems with copying of
635    SWITCH_EXPRs and structure sharing rules, then free the hash table
636    element.  */
637 
638 static void
edge_to_cases_cleanup(void * p)639 edge_to_cases_cleanup (void *p)
640 {
641   struct edge_to_cases_elt *elt = p;
642   tree t, next;
643 
644   for (t = elt->case_labels; t; t = next)
645     {
646       next = TREE_CHAIN (t);
647       TREE_CHAIN (t) = NULL;
648     }
649   free (p);
650 }
651 
652 /* Start recording information mapping edges to case labels.  */
653 
654 void
start_recording_case_labels(void)655 start_recording_case_labels (void)
656 {
657   gcc_assert (edge_to_cases == NULL);
658 
659   edge_to_cases = htab_create (37,
660 			       edge_to_cases_hash,
661 			       edge_to_cases_eq,
662 			       edge_to_cases_cleanup);
663 }
664 
665 /* Return nonzero if we are recording information for case labels.  */
666 
667 static bool
recording_case_labels_p(void)668 recording_case_labels_p (void)
669 {
670   return (edge_to_cases != NULL);
671 }
672 
673 /* Stop recording information mapping edges to case labels and
674    remove any information we have recorded.  */
675 void
end_recording_case_labels(void)676 end_recording_case_labels (void)
677 {
678   htab_delete (edge_to_cases);
679   edge_to_cases = NULL;
680 }
681 
682 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E.  */
683 
684 static void
record_switch_edge(edge e,tree case_label)685 record_switch_edge (edge e, tree case_label)
686 {
687   struct edge_to_cases_elt *elt;
688   void **slot;
689 
690   /* Build a hash table element so we can see if E is already
691      in the table.  */
692   elt = xmalloc (sizeof (struct edge_to_cases_elt));
693   elt->e = e;
694   elt->case_labels = case_label;
695 
696   slot = htab_find_slot (edge_to_cases, elt, INSERT);
697 
698   if (*slot == NULL)
699     {
700       /* E was not in the hash table.  Install E into the hash table.  */
701       *slot = (void *)elt;
702     }
703   else
704     {
705       /* E was already in the hash table.  Free ELT as we do not need it
706 	 anymore.  */
707       free (elt);
708 
709       /* Get the entry stored in the hash table.  */
710       elt = (struct edge_to_cases_elt *) *slot;
711 
712       /* Add it to the chain of CASE_LABEL_EXPRs referencing E.  */
713       TREE_CHAIN (case_label) = elt->case_labels;
714       elt->case_labels = case_label;
715     }
716 }
717 
718 /* If we are inside a {start,end}_recording_cases block, then return
719    a chain of CASE_LABEL_EXPRs from T which reference E.
720 
721    Otherwise return NULL.  */
722 
723 static tree
get_cases_for_edge(edge e,tree t)724 get_cases_for_edge (edge e, tree t)
725 {
726   struct edge_to_cases_elt elt, *elt_p;
727   void **slot;
728   size_t i, n;
729   tree vec;
730 
731   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
732      chains available.  Return NULL so the caller can detect this case.  */
733   if (!recording_case_labels_p ())
734     return NULL;
735 
736 restart:
737   elt.e = e;
738   elt.case_labels = NULL;
739   slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740 
741   if (slot)
742     {
743       elt_p = (struct edge_to_cases_elt *)*slot;
744       return elt_p->case_labels;
745     }
746 
747   /* If we did not find E in the hash table, then this must be the first
748      time we have been queried for information about E & T.  Add all the
749      elements from T to the hash table then perform the query again.  */
750 
751   vec = SWITCH_LABELS (t);
752   n = TREE_VEC_LENGTH (vec);
753   for (i = 0; i < n; i++)
754     {
755       tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
756       basic_block label_bb = label_to_block (lab);
757       record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758     }
759   goto restart;
760 }
761 
762 /* Create the edges for a SWITCH_EXPR starting at block BB.
763    At this point, the switch body has been lowered and the
764    SWITCH_LABELS filled in, so this is in effect a multi-way branch.  */
765 
766 static void
make_switch_expr_edges(basic_block bb)767 make_switch_expr_edges (basic_block bb)
768 {
769   tree entry = last_stmt (bb);
770   size_t i, n;
771   tree vec;
772 
773   vec = SWITCH_LABELS (entry);
774   n = TREE_VEC_LENGTH (vec);
775 
776   for (i = 0; i < n; ++i)
777     {
778       tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
779       basic_block label_bb = label_to_block (lab);
780       make_edge (bb, label_bb, 0);
781     }
782 }
783 
784 
785 /* Return the basic block holding label DEST.  */
786 
787 basic_block
label_to_block_fn(struct function * ifun,tree dest)788 label_to_block_fn (struct function *ifun, tree dest)
789 {
790   int uid = LABEL_DECL_UID (dest);
791 
792   /* We would die hard when faced by an undefined label.  Emit a label to
793      the very first basic block.  This will hopefully make even the dataflow
794      and undefined variable warnings quite right.  */
795   if ((errorcount || sorrycount) && uid < 0)
796     {
797       block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
798       tree stmt;
799 
800       stmt = build1 (LABEL_EXPR, void_type_node, dest);
801       bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
802       uid = LABEL_DECL_UID (dest);
803     }
804   if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
805     return NULL;
806   return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
807 }
808 
809 /* Create edges for a goto statement at block BB.  */
810 
811 static void
make_goto_expr_edges(basic_block bb)812 make_goto_expr_edges (basic_block bb)
813 {
814   tree goto_t;
815   basic_block target_bb;
816   int for_call;
817   block_stmt_iterator last = bsi_last (bb);
818 
819   goto_t = bsi_stmt (last);
820 
821   /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
822      CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
823      from a nonlocal goto.  */
824   if (TREE_CODE (goto_t) != GOTO_EXPR)
825     for_call = 1;
826   else
827     {
828       tree dest = GOTO_DESTINATION (goto_t);
829       for_call = 0;
830 
831       /* A GOTO to a local label creates normal edges.  */
832       if (simple_goto_p (goto_t))
833 	{
834 	  edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
835 #ifdef USE_MAPPED_LOCATION
836 	  e->goto_locus = EXPR_LOCATION (goto_t);
837 #else
838 	  e->goto_locus = EXPR_LOCUS (goto_t);
839 #endif
840 	  bsi_remove (&last);
841 	  return;
842 	}
843 
844       /* Nothing more to do for nonlocal gotos.  */
845       if (TREE_CODE (dest) == LABEL_DECL)
846 	return;
847 
848       /* Computed gotos remain.  */
849     }
850 
851   /* Look for the block starting with the destination label.  In the
852      case of a computed goto, make an edge to any label block we find
853      in the CFG.  */
854   FOR_EACH_BB (target_bb)
855     {
856       block_stmt_iterator bsi;
857 
858       for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
859 	{
860 	  tree target = bsi_stmt (bsi);
861 
862 	  if (TREE_CODE (target) != LABEL_EXPR)
863 	    break;
864 
865 	  if (
866 	      /* Computed GOTOs.  Make an edge to every label block that has
867 		 been marked as a potential target for a computed goto.  */
868 	      (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
869 	      /* Nonlocal GOTO target.  Make an edge to every label block
870 		 that has been marked as a potential target for a nonlocal
871 		 goto.  */
872 	      || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
873 	    {
874 	      make_edge (bb, target_bb, EDGE_ABNORMAL);
875 	      break;
876 	    }
877 	}
878     }
879 
880   /* Degenerate case of computed goto with no labels.  */
881   if (!for_call && EDGE_COUNT (bb->succs) == 0)
882     make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
883 }
884 
885 
886 /*---------------------------------------------------------------------------
887 			       Flowgraph analysis
888 ---------------------------------------------------------------------------*/
889 
890 /* Cleanup useless labels in basic blocks.  This is something we wish
891    to do early because it allows us to group case labels before creating
892    the edges for the CFG, and it speeds up block statement iterators in
893    all passes later on.
894    We only run this pass once, running it more than once is probably not
895    profitable.  */
896 
897 /* A map from basic block index to the leading label of that block.  */
898 static tree *label_for_bb;
899 
900 /* Callback for for_each_eh_region.  Helper for cleanup_dead_labels.  */
901 static void
update_eh_label(struct eh_region * region)902 update_eh_label (struct eh_region *region)
903 {
904   tree old_label = get_eh_region_tree_label (region);
905   if (old_label)
906     {
907       tree new_label;
908       basic_block bb = label_to_block (old_label);
909 
910       /* ??? After optimizing, there may be EH regions with labels
911 	 that have already been removed from the function body, so
912 	 there is no basic block for them.  */
913       if (! bb)
914 	return;
915 
916       new_label = label_for_bb[bb->index];
917       set_eh_region_tree_label (region, new_label);
918     }
919 }
920 
921 /* Given LABEL return the first label in the same basic block.  */
922 static tree
main_block_label(tree label)923 main_block_label (tree label)
924 {
925   basic_block bb = label_to_block (label);
926 
927   /* label_to_block possibly inserted undefined label into the chain.  */
928   if (!label_for_bb[bb->index])
929     label_for_bb[bb->index] = label;
930   return label_for_bb[bb->index];
931 }
932 
933 /* Cleanup redundant labels.  This is a three-step process:
934      1) Find the leading label for each block.
935      2) Redirect all references to labels to the leading labels.
936      3) Cleanup all useless labels.  */
937 
938 void
cleanup_dead_labels(void)939 cleanup_dead_labels (void)
940 {
941   basic_block bb;
942   label_for_bb = xcalloc (last_basic_block, sizeof (tree));
943 
944   /* Find a suitable label for each block.  We use the first user-defined
945      label if there is one, or otherwise just the first label we see.  */
946   FOR_EACH_BB (bb)
947     {
948       block_stmt_iterator i;
949 
950       for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
951 	{
952 	  tree label, stmt = bsi_stmt (i);
953 
954 	  if (TREE_CODE (stmt) != LABEL_EXPR)
955 	    break;
956 
957 	  label = LABEL_EXPR_LABEL (stmt);
958 
959 	  /* If we have not yet seen a label for the current block,
960 	     remember this one and see if there are more labels.  */
961 	  if (! label_for_bb[bb->index])
962 	    {
963 	      label_for_bb[bb->index] = label;
964 	      continue;
965 	    }
966 
967 	  /* If we did see a label for the current block already, but it
968 	     is an artificially created label, replace it if the current
969 	     label is a user defined label.  */
970 	  if (! DECL_ARTIFICIAL (label)
971 	      && DECL_ARTIFICIAL (label_for_bb[bb->index]))
972 	    {
973 	      label_for_bb[bb->index] = label;
974 	      break;
975 	    }
976 	}
977     }
978 
979   /* Now redirect all jumps/branches to the selected label.
980      First do so for each block ending in a control statement.  */
981   FOR_EACH_BB (bb)
982     {
983       tree stmt = last_stmt (bb);
984       if (!stmt)
985 	continue;
986 
987       switch (TREE_CODE (stmt))
988 	{
989 	case COND_EXPR:
990 	  {
991 	    tree true_branch, false_branch;
992 
993 	    true_branch = COND_EXPR_THEN (stmt);
994 	    false_branch = COND_EXPR_ELSE (stmt);
995 
996 	    GOTO_DESTINATION (true_branch)
997 	      = main_block_label (GOTO_DESTINATION (true_branch));
998 	    GOTO_DESTINATION (false_branch)
999 	      = main_block_label (GOTO_DESTINATION (false_branch));
1000 
1001 	    break;
1002 	  }
1003 
1004 	case SWITCH_EXPR:
1005 	  {
1006 	    size_t i;
1007 	    tree vec = SWITCH_LABELS (stmt);
1008 	    size_t n = TREE_VEC_LENGTH (vec);
1009 
1010 	    /* Replace all destination labels.  */
1011 	    for (i = 0; i < n; ++i)
1012 	      {
1013 		tree elt = TREE_VEC_ELT (vec, i);
1014 		tree label = main_block_label (CASE_LABEL (elt));
1015 		CASE_LABEL (elt) = label;
1016 	      }
1017 	    break;
1018 	  }
1019 
1020 	/* We have to handle GOTO_EXPRs until they're removed, and we don't
1021 	   remove them until after we've created the CFG edges.  */
1022 	case GOTO_EXPR:
1023           if (! computed_goto_p (stmt))
1024 	    {
1025 	      GOTO_DESTINATION (stmt)
1026 		= main_block_label (GOTO_DESTINATION (stmt));
1027 	      break;
1028 	    }
1029 
1030 	default:
1031 	  break;
1032       }
1033     }
1034 
1035   for_each_eh_region (update_eh_label);
1036 
1037   /* Finally, purge dead labels.  All user-defined labels and labels that
1038      can be the target of non-local gotos and labels which have their
1039      address taken are preserved.  */
1040   FOR_EACH_BB (bb)
1041     {
1042       block_stmt_iterator i;
1043       tree label_for_this_bb = label_for_bb[bb->index];
1044 
1045       if (! label_for_this_bb)
1046 	continue;
1047 
1048       for (i = bsi_start (bb); !bsi_end_p (i); )
1049 	{
1050 	  tree label, stmt = bsi_stmt (i);
1051 
1052 	  if (TREE_CODE (stmt) != LABEL_EXPR)
1053 	    break;
1054 
1055 	  label = LABEL_EXPR_LABEL (stmt);
1056 
1057 	  if (label == label_for_this_bb
1058 	      || ! DECL_ARTIFICIAL (label)
1059 	      || DECL_NONLOCAL (label)
1060 	      || FORCED_LABEL (label))
1061 	    bsi_next (&i);
1062 	  else
1063 	    bsi_remove (&i);
1064 	}
1065     }
1066 
1067   free (label_for_bb);
1068 }
1069 
1070 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1071    and scan the sorted vector of cases.  Combine the ones jumping to the
1072    same label.
1073    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1074 
1075 void
group_case_labels(void)1076 group_case_labels (void)
1077 {
1078   basic_block bb;
1079 
1080   FOR_EACH_BB (bb)
1081     {
1082       tree stmt = last_stmt (bb);
1083       if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1084 	{
1085 	  tree labels = SWITCH_LABELS (stmt);
1086 	  int old_size = TREE_VEC_LENGTH (labels);
1087 	  int i, j, new_size = old_size;
1088 	  tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1089  	  tree default_label;
1090 
1091 	  /* The default label is always the last case in a switch
1092 	     statement after gimplification.  */
1093 	  default_label = CASE_LABEL (default_case);
1094 
1095 	  /* Look for possible opportunities to merge cases.
1096 	     Ignore the last element of the label vector because it
1097 	     must be the default case.  */
1098           i = 0;
1099 	  while (i < old_size - 1)
1100 	    {
1101 	      tree base_case, base_label, base_high;
1102 	      base_case = TREE_VEC_ELT (labels, i);
1103 
1104 	      gcc_assert (base_case);
1105 	      base_label = CASE_LABEL (base_case);
1106 
1107 	      /* Discard cases that have the same destination as the
1108 		 default case.  */
1109 	      if (base_label == default_label)
1110 		{
1111 		  TREE_VEC_ELT (labels, i) = NULL_TREE;
1112 		  i++;
1113 		  new_size--;
1114 		  continue;
1115 		}
1116 
1117 	      base_high = CASE_HIGH (base_case) ?
1118 		CASE_HIGH (base_case) : CASE_LOW (base_case);
1119 	      i++;
1120 	      /* Try to merge case labels.  Break out when we reach the end
1121 		 of the label vector or when we cannot merge the next case
1122 		 label with the current one.  */
1123 	      while (i < old_size - 1)
1124 		{
1125 		  tree merge_case = TREE_VEC_ELT (labels, i);
1126 	          tree merge_label = CASE_LABEL (merge_case);
1127 		  tree t = int_const_binop (PLUS_EXPR, base_high,
1128 					    integer_one_node, 1);
1129 
1130 		  /* Merge the cases if they jump to the same place,
1131 		     and their ranges are consecutive.  */
1132 		  if (merge_label == base_label
1133 		      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1134 		    {
1135 		      base_high = CASE_HIGH (merge_case) ?
1136 			CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1137 		      CASE_HIGH (base_case) = base_high;
1138 		      TREE_VEC_ELT (labels, i) = NULL_TREE;
1139 		      new_size--;
1140 		      i++;
1141 		    }
1142 		  else
1143 		    break;
1144 		}
1145 	    }
1146 
1147 	  /* Compress the case labels in the label vector, and adjust the
1148 	     length of the vector.  */
1149 	  for (i = 0, j = 0; i < new_size; i++)
1150 	    {
1151 	      while (! TREE_VEC_ELT (labels, j))
1152 		j++;
1153 	      TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1154 	    }
1155 	  TREE_VEC_LENGTH (labels) = new_size;
1156 	}
1157     }
1158 }
1159 
1160 /* Checks whether we can merge block B into block A.  */
1161 
1162 static bool
tree_can_merge_blocks_p(basic_block a,basic_block b)1163 tree_can_merge_blocks_p (basic_block a, basic_block b)
1164 {
1165   tree stmt;
1166   block_stmt_iterator bsi;
1167   tree phi;
1168 
1169   if (!single_succ_p (a))
1170     return false;
1171 
1172   if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1173     return false;
1174 
1175   if (single_succ (a) != b)
1176     return false;
1177 
1178   if (!single_pred_p (b))
1179     return false;
1180 
1181   if (b == EXIT_BLOCK_PTR)
1182     return false;
1183 
1184   /* If A ends by a statement causing exceptions or something similar, we
1185      cannot merge the blocks.  */
1186   stmt = last_stmt (a);
1187   if (stmt && stmt_ends_bb_p (stmt))
1188     return false;
1189 
1190   /* Do not allow a block with only a non-local label to be merged.  */
1191   if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1192       && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1193     return false;
1194 
1195   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1196      is not up-to-date, we cannot eliminate any phis.  */
1197   phi = phi_nodes (b);
1198   if (phi)
1199     {
1200       if (need_ssa_update_p ())
1201 	return false;
1202 
1203       for (; phi; phi = PHI_CHAIN (phi))
1204 	if (!is_gimple_reg (PHI_RESULT (phi))
1205 	    && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1206 	  return false;
1207     }
1208 
1209   /* Do not remove user labels.  */
1210   for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1211     {
1212       stmt = bsi_stmt (bsi);
1213       if (TREE_CODE (stmt) != LABEL_EXPR)
1214 	break;
1215       if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1216 	return false;
1217     }
1218 
1219   /* Protect the loop latches.  */
1220   if (current_loops
1221       && b->loop_father->latch == b)
1222     return false;
1223 
1224   return true;
1225 }
1226 
1227 /* Replaces all uses of NAME by VAL.  */
1228 
1229 void
replace_uses_by(tree name,tree val)1230 replace_uses_by (tree name, tree val)
1231 {
1232   imm_use_iterator imm_iter;
1233   use_operand_p use;
1234   tree stmt;
1235   edge e;
1236   unsigned i;
1237   VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1238 
1239   FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1240     {
1241       stmt = USE_STMT (use);
1242       replace_exp (use, val);
1243 
1244       if (TREE_CODE (stmt) == PHI_NODE)
1245 	{
1246 	  e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1247 	  if (e->flags & EDGE_ABNORMAL)
1248 	    {
1249 	      /* This can only occur for virtual operands, since
1250 		 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1251 		 would prevent replacement.  */
1252 	      gcc_assert (!is_gimple_reg (name));
1253 	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1254 	    }
1255 	}
1256       else
1257 	VEC_safe_push (tree, heap, stmts, stmt);
1258     }
1259 
1260   /* We do not update the statements in the loop above.  Consider
1261      x = w * w;
1262 
1263      If we performed the update in the first loop, the statement
1264      would be rescanned after first occurrence of w is replaced,
1265      the new uses would be placed to the beginning of the list,
1266      and we would never process them.  */
1267   for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1268     {
1269       tree rhs;
1270 
1271       fold_stmt_inplace (stmt);
1272 
1273       rhs = get_rhs (stmt);
1274       if (TREE_CODE (rhs) == ADDR_EXPR)
1275 	recompute_tree_invarant_for_addr_expr (rhs);
1276 
1277       maybe_clean_or_replace_eh_stmt (stmt, stmt);
1278       mark_new_vars_to_rename (stmt);
1279     }
1280 
1281   VEC_free (tree, heap, stmts);
1282 
1283   /* Also update the trees stored in loop structures.  */
1284   if (current_loops)
1285     {
1286       struct loop *loop;
1287 
1288       for (i = 0; i < current_loops->num; i++)
1289 	{
1290 	  loop = current_loops->parray[i];
1291 	  if (loop)
1292 	    substitute_in_loop_info (loop, name, val);
1293 	}
1294     }
1295 }
1296 
1297 /* Merge block B into block A.  */
1298 
1299 static void
tree_merge_blocks(basic_block a,basic_block b)1300 tree_merge_blocks (basic_block a, basic_block b)
1301 {
1302   block_stmt_iterator bsi;
1303   tree_stmt_iterator last;
1304   tree phi;
1305 
1306   if (dump_file)
1307     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1308 
1309   /* Remove all single-valued PHI nodes from block B of the form
1310      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1311   bsi = bsi_last (a);
1312   for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1313     {
1314       tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1315       tree copy;
1316       bool may_replace_uses = may_propagate_copy (def, use);
1317 
1318       /* In case we have loops to care about, do not propagate arguments of
1319 	 loop closed ssa phi nodes.  */
1320       if (current_loops
1321 	  && is_gimple_reg (def)
1322 	  && TREE_CODE (use) == SSA_NAME
1323 	  && a->loop_father != b->loop_father)
1324 	may_replace_uses = false;
1325 
1326       if (!may_replace_uses)
1327 	{
1328 	  gcc_assert (is_gimple_reg (def));
1329 
1330 	  /* Note that just emitting the copies is fine -- there is no problem
1331 	     with ordering of phi nodes.  This is because A is the single
1332 	     predecessor of B, therefore results of the phi nodes cannot
1333 	     appear as arguments of the phi nodes.  */
1334 	  copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1335 	  bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1336 	  SET_PHI_RESULT (phi, NULL_TREE);
1337 	  SSA_NAME_DEF_STMT (def) = copy;
1338 	}
1339       else
1340 	replace_uses_by (def, use);
1341 
1342       remove_phi_node (phi, NULL);
1343     }
1344 
1345   /* Ensure that B follows A.  */
1346   move_block_after (b, a);
1347 
1348   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1349   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1350 
1351   /* Remove labels from B and set bb_for_stmt to A for other statements.  */
1352   for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1353     {
1354       if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1355 	{
1356 	  tree label = bsi_stmt (bsi);
1357 
1358 	  bsi_remove (&bsi);
1359 	  /* Now that we can thread computed gotos, we might have
1360 	     a situation where we have a forced label in block B
1361 	     However, the label at the start of block B might still be
1362 	     used in other ways (think about the runtime checking for
1363 	     Fortran assigned gotos).  So we can not just delete the
1364 	     label.  Instead we move the label to the start of block A.  */
1365 	  if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1366 	    {
1367 	      block_stmt_iterator dest_bsi = bsi_start (a);
1368 	      bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1369 	    }
1370 	}
1371       else
1372 	{
1373 	  set_bb_for_stmt (bsi_stmt (bsi), a);
1374 	  bsi_next (&bsi);
1375 	}
1376     }
1377 
1378   /* Merge the chains.  */
1379   last = tsi_last (a->stmt_list);
1380   tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1381   b->stmt_list = NULL;
1382 }
1383 
1384 
1385 /* Return the one of two successors of BB that is not reachable by a
1386    reached by a complex edge, if there is one.  Else, return BB.  We use
1387    this in optimizations that use post-dominators for their heuristics,
1388    to catch the cases in C++ where function calls are involved.  */
1389 
1390 basic_block
single_noncomplex_succ(basic_block bb)1391 single_noncomplex_succ (basic_block bb)
1392 {
1393   edge e0, e1;
1394   if (EDGE_COUNT (bb->succs) != 2)
1395     return bb;
1396 
1397   e0 = EDGE_SUCC (bb, 0);
1398   e1 = EDGE_SUCC (bb, 1);
1399   if (e0->flags & EDGE_COMPLEX)
1400     return e1->dest;
1401   if (e1->flags & EDGE_COMPLEX)
1402     return e0->dest;
1403 
1404   return bb;
1405 }
1406 
1407 
1408 
1409 /* Walk the function tree removing unnecessary statements.
1410 
1411      * Empty statement nodes are removed
1412 
1413      * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1414 
1415      * Unnecessary COND_EXPRs are removed
1416 
1417      * Some unnecessary BIND_EXPRs are removed
1418 
1419    Clearly more work could be done.  The trick is doing the analysis
1420    and removal fast enough to be a net improvement in compile times.
1421 
1422    Note that when we remove a control structure such as a COND_EXPR
1423    BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1424    to ensure we eliminate all the useless code.  */
1425 
1426 struct rus_data
1427 {
1428   tree *last_goto;
1429   bool repeat;
1430   bool may_throw;
1431   bool may_branch;
1432   bool has_label;
1433 };
1434 
1435 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1436 
1437 static bool
remove_useless_stmts_warn_notreached(tree stmt)1438 remove_useless_stmts_warn_notreached (tree stmt)
1439 {
1440   if (EXPR_HAS_LOCATION (stmt))
1441     {
1442       location_t loc = EXPR_LOCATION (stmt);
1443       if (LOCATION_LINE (loc) > 0)
1444 	{
1445 	  warning (0, "%Hwill never be executed", &loc);
1446 	  return true;
1447 	}
1448     }
1449 
1450   switch (TREE_CODE (stmt))
1451     {
1452     case STATEMENT_LIST:
1453       {
1454 	tree_stmt_iterator i;
1455 	for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1456 	  if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1457 	    return true;
1458       }
1459       break;
1460 
1461     case COND_EXPR:
1462       if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1463 	return true;
1464       if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1465 	return true;
1466       if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1467 	return true;
1468       break;
1469 
1470     case TRY_FINALLY_EXPR:
1471     case TRY_CATCH_EXPR:
1472       if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1473 	return true;
1474       if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1475 	return true;
1476       break;
1477 
1478     case CATCH_EXPR:
1479       return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1480     case EH_FILTER_EXPR:
1481       return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1482     case BIND_EXPR:
1483       return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1484 
1485     default:
1486       /* Not a live container.  */
1487       break;
1488     }
1489 
1490   return false;
1491 }
1492 
1493 static void
remove_useless_stmts_cond(tree * stmt_p,struct rus_data * data)1494 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1495 {
1496   tree then_clause, else_clause, cond;
1497   bool save_has_label, then_has_label, else_has_label;
1498 
1499   save_has_label = data->has_label;
1500   data->has_label = false;
1501   data->last_goto = NULL;
1502 
1503   remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1504 
1505   then_has_label = data->has_label;
1506   data->has_label = false;
1507   data->last_goto = NULL;
1508 
1509   remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1510 
1511   else_has_label = data->has_label;
1512   data->has_label = save_has_label | then_has_label | else_has_label;
1513 
1514   then_clause = COND_EXPR_THEN (*stmt_p);
1515   else_clause = COND_EXPR_ELSE (*stmt_p);
1516   cond = fold (COND_EXPR_COND (*stmt_p));
1517 
1518   /* If neither arm does anything at all, we can remove the whole IF.  */
1519   if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1520     {
1521       *stmt_p = build_empty_stmt ();
1522       data->repeat = true;
1523     }
1524 
1525   /* If there are no reachable statements in an arm, then we can
1526      zap the entire conditional.  */
1527   else if (integer_nonzerop (cond) && !else_has_label)
1528     {
1529       if (warn_notreached)
1530 	remove_useless_stmts_warn_notreached (else_clause);
1531       *stmt_p = then_clause;
1532       data->repeat = true;
1533     }
1534   else if (integer_zerop (cond) && !then_has_label)
1535     {
1536       if (warn_notreached)
1537 	remove_useless_stmts_warn_notreached (then_clause);
1538       *stmt_p = else_clause;
1539       data->repeat = true;
1540     }
1541 
1542   /* Check a couple of simple things on then/else with single stmts.  */
1543   else
1544     {
1545       tree then_stmt = expr_only (then_clause);
1546       tree else_stmt = expr_only (else_clause);
1547 
1548       /* Notice branches to a common destination.  */
1549       if (then_stmt && else_stmt
1550 	  && TREE_CODE (then_stmt) == GOTO_EXPR
1551 	  && TREE_CODE (else_stmt) == GOTO_EXPR
1552 	  && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1553 	{
1554 	  *stmt_p = then_stmt;
1555 	  data->repeat = true;
1556 	}
1557 
1558       /* If the THEN/ELSE clause merely assigns a value to a variable or
1559 	 parameter which is already known to contain that value, then
1560 	 remove the useless THEN/ELSE clause.  */
1561       else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1562 	{
1563 	  if (else_stmt
1564 	      && TREE_CODE (else_stmt) == MODIFY_EXPR
1565 	      && TREE_OPERAND (else_stmt, 0) == cond
1566 	      && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1567 	    COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1568 	}
1569       else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1570 	       && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1571 		   || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1572 	       && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1573 	{
1574 	  tree stmt = (TREE_CODE (cond) == EQ_EXPR
1575 		       ? then_stmt : else_stmt);
1576 	  tree *location = (TREE_CODE (cond) == EQ_EXPR
1577 			    ? &COND_EXPR_THEN (*stmt_p)
1578 			    : &COND_EXPR_ELSE (*stmt_p));
1579 
1580 	  if (stmt
1581 	      && TREE_CODE (stmt) == MODIFY_EXPR
1582 	      && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1583 	      && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1584 	    *location = alloc_stmt_list ();
1585 	}
1586     }
1587 
1588   /* Protect GOTOs in the arm of COND_EXPRs from being removed.  They
1589      would be re-introduced during lowering.  */
1590   data->last_goto = NULL;
1591 }
1592 
1593 
1594 static void
remove_useless_stmts_tf(tree * stmt_p,struct rus_data * data)1595 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1596 {
1597   bool save_may_branch, save_may_throw;
1598   bool this_may_branch, this_may_throw;
1599 
1600   /* Collect may_branch and may_throw information for the body only.  */
1601   save_may_branch = data->may_branch;
1602   save_may_throw = data->may_throw;
1603   data->may_branch = false;
1604   data->may_throw = false;
1605   data->last_goto = NULL;
1606 
1607   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1608 
1609   this_may_branch = data->may_branch;
1610   this_may_throw = data->may_throw;
1611   data->may_branch |= save_may_branch;
1612   data->may_throw |= save_may_throw;
1613   data->last_goto = NULL;
1614 
1615   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1616 
1617   /* If the body is empty, then we can emit the FINALLY block without
1618      the enclosing TRY_FINALLY_EXPR.  */
1619   if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1620     {
1621       *stmt_p = TREE_OPERAND (*stmt_p, 1);
1622       data->repeat = true;
1623     }
1624 
1625   /* If the handler is empty, then we can emit the TRY block without
1626      the enclosing TRY_FINALLY_EXPR.  */
1627   else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1628     {
1629       *stmt_p = TREE_OPERAND (*stmt_p, 0);
1630       data->repeat = true;
1631     }
1632 
1633   /* If the body neither throws, nor branches, then we can safely
1634      string the TRY and FINALLY blocks together.  */
1635   else if (!this_may_branch && !this_may_throw)
1636     {
1637       tree stmt = *stmt_p;
1638       *stmt_p = TREE_OPERAND (stmt, 0);
1639       append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1640       data->repeat = true;
1641     }
1642 }
1643 
1644 
1645 static void
remove_useless_stmts_tc(tree * stmt_p,struct rus_data * data)1646 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1647 {
1648   bool save_may_throw, this_may_throw;
1649   tree_stmt_iterator i;
1650   tree stmt;
1651 
1652   /* Collect may_throw information for the body only.  */
1653   save_may_throw = data->may_throw;
1654   data->may_throw = false;
1655   data->last_goto = NULL;
1656 
1657   remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1658 
1659   this_may_throw = data->may_throw;
1660   data->may_throw = save_may_throw;
1661 
1662   /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR.  */
1663   if (!this_may_throw)
1664     {
1665       if (warn_notreached)
1666 	remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1667       *stmt_p = TREE_OPERAND (*stmt_p, 0);
1668       data->repeat = true;
1669       return;
1670     }
1671 
1672   /* Process the catch clause specially.  We may be able to tell that
1673      no exceptions propagate past this point.  */
1674 
1675   this_may_throw = true;
1676   i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1677   stmt = tsi_stmt (i);
1678   data->last_goto = NULL;
1679 
1680   switch (TREE_CODE (stmt))
1681     {
1682     case CATCH_EXPR:
1683       for (; !tsi_end_p (i); tsi_next (&i))
1684 	{
1685 	  stmt = tsi_stmt (i);
1686 	  /* If we catch all exceptions, then the body does not
1687 	     propagate exceptions past this point.  */
1688 	  if (CATCH_TYPES (stmt) == NULL)
1689 	    this_may_throw = false;
1690 	  data->last_goto = NULL;
1691 	  remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1692 	}
1693       break;
1694 
1695     case EH_FILTER_EXPR:
1696       if (EH_FILTER_MUST_NOT_THROW (stmt))
1697 	this_may_throw = false;
1698       else if (EH_FILTER_TYPES (stmt) == NULL)
1699 	this_may_throw = false;
1700       remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1701       break;
1702 
1703     default:
1704       /* Otherwise this is a cleanup.  */
1705       remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1706 
1707       /* If the cleanup is empty, then we can emit the TRY block without
1708 	 the enclosing TRY_CATCH_EXPR.  */
1709       if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1710 	{
1711 	  *stmt_p = TREE_OPERAND (*stmt_p, 0);
1712 	  data->repeat = true;
1713 	}
1714       break;
1715     }
1716   data->may_throw |= this_may_throw;
1717 }
1718 
1719 
1720 static void
remove_useless_stmts_bind(tree * stmt_p,struct rus_data * data)1721 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1722 {
1723   tree block;
1724 
1725   /* First remove anything underneath the BIND_EXPR.  */
1726   remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1727 
1728   /* If the BIND_EXPR has no variables, then we can pull everything
1729      up one level and remove the BIND_EXPR, unless this is the toplevel
1730      BIND_EXPR for the current function or an inlined function.
1731 
1732      When this situation occurs we will want to apply this
1733      optimization again.  */
1734   block = BIND_EXPR_BLOCK (*stmt_p);
1735   if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1736       && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1737       && (! block
1738 	  || ! BLOCK_ABSTRACT_ORIGIN (block)
1739 	  || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1740 	      != FUNCTION_DECL)))
1741     {
1742       *stmt_p = BIND_EXPR_BODY (*stmt_p);
1743       data->repeat = true;
1744     }
1745 }
1746 
1747 
1748 static void
remove_useless_stmts_goto(tree * stmt_p,struct rus_data * data)1749 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1750 {
1751   tree dest = GOTO_DESTINATION (*stmt_p);
1752 
1753   data->may_branch = true;
1754   data->last_goto = NULL;
1755 
1756   /* Record the last goto expr, so that we can delete it if unnecessary.  */
1757   if (TREE_CODE (dest) == LABEL_DECL)
1758     data->last_goto = stmt_p;
1759 }
1760 
1761 
1762 static void
remove_useless_stmts_label(tree * stmt_p,struct rus_data * data)1763 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1764 {
1765   tree label = LABEL_EXPR_LABEL (*stmt_p);
1766 
1767   data->has_label = true;
1768 
1769   /* We do want to jump across non-local label receiver code.  */
1770   if (DECL_NONLOCAL (label))
1771     data->last_goto = NULL;
1772 
1773   else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1774     {
1775       *data->last_goto = build_empty_stmt ();
1776       data->repeat = true;
1777     }
1778 
1779   /* ??? Add something here to delete unused labels.  */
1780 }
1781 
1782 
1783 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1784    decl.  This allows us to eliminate redundant or useless
1785    calls to "const" functions.
1786 
1787    Gimplifier already does the same operation, but we may notice functions
1788    being const and pure once their calls has been gimplified, so we need
1789    to update the flag.  */
1790 
1791 static void
update_call_expr_flags(tree call)1792 update_call_expr_flags (tree call)
1793 {
1794   tree decl = get_callee_fndecl (call);
1795   if (!decl)
1796     return;
1797   if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1798     TREE_SIDE_EFFECTS (call) = 0;
1799   if (TREE_NOTHROW (decl))
1800     TREE_NOTHROW (call) = 1;
1801 }
1802 
1803 
1804 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1805 
1806 void
notice_special_calls(tree t)1807 notice_special_calls (tree t)
1808 {
1809   int flags = call_expr_flags (t);
1810 
1811   if (flags & ECF_MAY_BE_ALLOCA)
1812     current_function_calls_alloca = true;
1813   if (flags & ECF_RETURNS_TWICE)
1814     current_function_calls_setjmp = true;
1815 }
1816 
1817 
1818 /* Clear flags set by notice_special_calls.  Used by dead code removal
1819    to update the flags.  */
1820 
1821 void
clear_special_calls(void)1822 clear_special_calls (void)
1823 {
1824   current_function_calls_alloca = false;
1825   current_function_calls_setjmp = false;
1826 }
1827 
1828 
1829 static void
remove_useless_stmts_1(tree * tp,struct rus_data * data)1830 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1831 {
1832   tree t = *tp, op;
1833 
1834   switch (TREE_CODE (t))
1835     {
1836     case COND_EXPR:
1837       remove_useless_stmts_cond (tp, data);
1838       break;
1839 
1840     case TRY_FINALLY_EXPR:
1841       remove_useless_stmts_tf (tp, data);
1842       break;
1843 
1844     case TRY_CATCH_EXPR:
1845       remove_useless_stmts_tc (tp, data);
1846       break;
1847 
1848     case BIND_EXPR:
1849       remove_useless_stmts_bind (tp, data);
1850       break;
1851 
1852     case GOTO_EXPR:
1853       remove_useless_stmts_goto (tp, data);
1854       break;
1855 
1856     case LABEL_EXPR:
1857       remove_useless_stmts_label (tp, data);
1858       break;
1859 
1860     case RETURN_EXPR:
1861       fold_stmt (tp);
1862       data->last_goto = NULL;
1863       data->may_branch = true;
1864       break;
1865 
1866     case CALL_EXPR:
1867       fold_stmt (tp);
1868       data->last_goto = NULL;
1869       notice_special_calls (t);
1870       update_call_expr_flags (t);
1871       if (tree_could_throw_p (t))
1872 	data->may_throw = true;
1873       break;
1874 
1875     case MODIFY_EXPR:
1876       data->last_goto = NULL;
1877       fold_stmt (tp);
1878       op = get_call_expr_in (t);
1879       if (op)
1880 	{
1881 	  update_call_expr_flags (op);
1882 	  notice_special_calls (op);
1883 	}
1884       if (tree_could_throw_p (t))
1885 	data->may_throw = true;
1886       break;
1887 
1888     case STATEMENT_LIST:
1889       {
1890 	tree_stmt_iterator i = tsi_start (t);
1891 	while (!tsi_end_p (i))
1892 	  {
1893 	    t = tsi_stmt (i);
1894 	    if (IS_EMPTY_STMT (t))
1895 	      {
1896 		tsi_delink (&i);
1897 		continue;
1898 	      }
1899 
1900 	    remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1901 
1902 	    t = tsi_stmt (i);
1903 	    if (TREE_CODE (t) == STATEMENT_LIST)
1904 	      {
1905 		tsi_link_before (&i, t, TSI_SAME_STMT);
1906 		tsi_delink (&i);
1907 	      }
1908 	    else
1909 	      tsi_next (&i);
1910 	  }
1911       }
1912       break;
1913     case ASM_EXPR:
1914       fold_stmt (tp);
1915       data->last_goto = NULL;
1916       break;
1917 
1918     default:
1919       data->last_goto = NULL;
1920       break;
1921     }
1922 }
1923 
1924 static void
remove_useless_stmts(void)1925 remove_useless_stmts (void)
1926 {
1927   struct rus_data data;
1928 
1929   clear_special_calls ();
1930 
1931   do
1932     {
1933       memset (&data, 0, sizeof (data));
1934       remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1935     }
1936   while (data.repeat);
1937 }
1938 
1939 
1940 struct tree_opt_pass pass_remove_useless_stmts =
1941 {
1942   "useless",				/* name */
1943   NULL,					/* gate */
1944   remove_useless_stmts,			/* execute */
1945   NULL,					/* sub */
1946   NULL,					/* next */
1947   0,					/* static_pass_number */
1948   0,					/* tv_id */
1949   PROP_gimple_any,			/* properties_required */
1950   0,					/* properties_provided */
1951   0,					/* properties_destroyed */
1952   0,					/* todo_flags_start */
1953   TODO_dump_func,			/* todo_flags_finish */
1954   0					/* letter */
1955 };
1956 
1957 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1958 
1959 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)1960 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1961 {
1962   tree phi;
1963 
1964   /* Since this block is no longer reachable, we can just delete all
1965      of its PHI nodes.  */
1966   phi = phi_nodes (bb);
1967   while (phi)
1968     {
1969       tree next = PHI_CHAIN (phi);
1970       remove_phi_node (phi, NULL_TREE);
1971       phi = next;
1972     }
1973 
1974   /* Remove edges to BB's successors.  */
1975   while (EDGE_COUNT (bb->succs) > 0)
1976     remove_edge (EDGE_SUCC (bb, 0));
1977 }
1978 
1979 
1980 /* Remove statements of basic block BB.  */
1981 
1982 static void
remove_bb(basic_block bb)1983 remove_bb (basic_block bb)
1984 {
1985   block_stmt_iterator i;
1986 #ifdef USE_MAPPED_LOCATION
1987   source_location loc = UNKNOWN_LOCATION;
1988 #else
1989   source_locus loc = 0;
1990 #endif
1991 
1992   if (dump_file)
1993     {
1994       fprintf (dump_file, "Removing basic block %d\n", bb->index);
1995       if (dump_flags & TDF_DETAILS)
1996 	{
1997 	  dump_bb (bb, dump_file, 0);
1998 	  fprintf (dump_file, "\n");
1999 	}
2000     }
2001 
2002   /* If we remove the header or the latch of a loop, mark the loop for
2003      removal by setting its header and latch to NULL.  */
2004   if (current_loops)
2005     {
2006       struct loop *loop = bb->loop_father;
2007 
2008       if (loop->latch == bb
2009 	  || loop->header == bb)
2010 	{
2011 	  loop->latch = NULL;
2012 	  loop->header = NULL;
2013 
2014 	  /* Also clean up the information associated with the loop.  Updating
2015 	     it would waste time. More importantly, it may refer to ssa
2016 	     names that were defined in other removed basic block -- these
2017 	     ssa names are now removed and invalid.  */
2018 	  free_numbers_of_iterations_estimates_loop (loop);
2019 	}
2020     }
2021 
2022   /* Remove all the instructions in the block.  */
2023   for (i = bsi_start (bb); !bsi_end_p (i);)
2024     {
2025       tree stmt = bsi_stmt (i);
2026       if (TREE_CODE (stmt) == LABEL_EXPR
2027           && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2028 	      || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2029 	{
2030 	  basic_block new_bb;
2031 	  block_stmt_iterator new_bsi;
2032 
2033 	  /* A non-reachable non-local label may still be referenced.
2034 	     But it no longer needs to carry the extra semantics of
2035 	     non-locality.  */
2036 	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2037 	    {
2038 	      DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2039 	      FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2040 	    }
2041 
2042 	  new_bb = bb->prev_bb;
2043 	  new_bsi = bsi_start (new_bb);
2044 	  bsi_remove (&i);
2045 	  bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2046 	}
2047       else
2048         {
2049 	  /* Release SSA definitions if we are in SSA.  Note that we
2050 	     may be called when not in SSA.  For example,
2051 	     final_cleanup calls this function via
2052 	     cleanup_tree_cfg.  */
2053 	  if (in_ssa_p)
2054 	    release_defs (stmt);
2055 
2056 	  bsi_remove (&i);
2057 	}
2058 
2059       /* Don't warn for removed gotos.  Gotos are often removed due to
2060 	 jump threading, thus resulting in bogus warnings.  Not great,
2061 	 since this way we lose warnings for gotos in the original
2062 	 program that are indeed unreachable.  */
2063       if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2064 	{
2065 #ifdef USE_MAPPED_LOCATION
2066 	  if (EXPR_HAS_LOCATION (stmt))
2067 	    loc = EXPR_LOCATION (stmt);
2068 #else
2069 	  source_locus t;
2070 	  t = EXPR_LOCUS (stmt);
2071 	  if (t && LOCATION_LINE (*t) > 0)
2072 	    loc = t;
2073 #endif
2074 	}
2075     }
2076 
2077   /* If requested, give a warning that the first statement in the
2078      block is unreachable.  We walk statements backwards in the
2079      loop above, so the last statement we process is the first statement
2080      in the block.  */
2081 #ifdef USE_MAPPED_LOCATION
2082   if (loc > BUILTINS_LOCATION)
2083     warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2084 #else
2085   if (loc)
2086     warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2087 #endif
2088 
2089   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2090 }
2091 
2092 
2093 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2094    predicate VAL, return the edge that will be taken out of the block.
2095    If VAL does not match a unique edge, NULL is returned.  */
2096 
2097 edge
find_taken_edge(basic_block bb,tree val)2098 find_taken_edge (basic_block bb, tree val)
2099 {
2100   tree stmt;
2101 
2102   stmt = last_stmt (bb);
2103 
2104   gcc_assert (stmt);
2105   gcc_assert (is_ctrl_stmt (stmt));
2106   gcc_assert (val);
2107 
2108   if (! is_gimple_min_invariant (val))
2109     return NULL;
2110 
2111   if (TREE_CODE (stmt) == COND_EXPR)
2112     return find_taken_edge_cond_expr (bb, val);
2113 
2114   if (TREE_CODE (stmt) == SWITCH_EXPR)
2115     return find_taken_edge_switch_expr (bb, val);
2116 
2117   if (computed_goto_p (stmt))
2118     return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2119 
2120   gcc_unreachable ();
2121 }
2122 
2123 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2124    statement, determine which of the outgoing edges will be taken out of the
2125    block.  Return NULL if either edge may be taken.  */
2126 
2127 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2128 find_taken_edge_computed_goto (basic_block bb, tree val)
2129 {
2130   basic_block dest;
2131   edge e = NULL;
2132 
2133   dest = label_to_block (val);
2134   if (dest)
2135     {
2136       e = find_edge (bb, dest);
2137       gcc_assert (e != NULL);
2138     }
2139 
2140   return e;
2141 }
2142 
2143 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2144    statement, determine which of the two edges will be taken out of the
2145    block.  Return NULL if either edge may be taken.  */
2146 
2147 static edge
find_taken_edge_cond_expr(basic_block bb,tree val)2148 find_taken_edge_cond_expr (basic_block bb, tree val)
2149 {
2150   edge true_edge, false_edge;
2151 
2152   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2153 
2154   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2155   return (zero_p (val) ? false_edge : true_edge);
2156 }
2157 
2158 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2159    statement, determine which edge will be taken out of the block.  Return
2160    NULL if any edge may be taken.  */
2161 
2162 static edge
find_taken_edge_switch_expr(basic_block bb,tree val)2163 find_taken_edge_switch_expr (basic_block bb, tree val)
2164 {
2165   tree switch_expr, taken_case;
2166   basic_block dest_bb;
2167   edge e;
2168 
2169   switch_expr = last_stmt (bb);
2170   taken_case = find_case_label_for_value (switch_expr, val);
2171   dest_bb = label_to_block (CASE_LABEL (taken_case));
2172 
2173   e = find_edge (bb, dest_bb);
2174   gcc_assert (e);
2175   return e;
2176 }
2177 
2178 
2179 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2180    We can make optimal use here of the fact that the case labels are
2181    sorted: We can do a binary search for a case matching VAL.  */
2182 
2183 static tree
find_case_label_for_value(tree switch_expr,tree val)2184 find_case_label_for_value (tree switch_expr, tree val)
2185 {
2186   tree vec = SWITCH_LABELS (switch_expr);
2187   size_t low, high, n = TREE_VEC_LENGTH (vec);
2188   tree default_case = TREE_VEC_ELT (vec, n - 1);
2189 
2190   for (low = -1, high = n - 1; high - low > 1; )
2191     {
2192       size_t i = (high + low) / 2;
2193       tree t = TREE_VEC_ELT (vec, i);
2194       int cmp;
2195 
2196       /* Cache the result of comparing CASE_LOW and val.  */
2197       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2198 
2199       if (cmp > 0)
2200 	high = i;
2201       else
2202 	low = i;
2203 
2204       if (CASE_HIGH (t) == NULL)
2205 	{
2206 	  /* A singe-valued case label.  */
2207 	  if (cmp == 0)
2208 	    return t;
2209 	}
2210       else
2211 	{
2212 	  /* A case range.  We can only handle integer ranges.  */
2213 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2214 	    return t;
2215 	}
2216     }
2217 
2218   return default_case;
2219 }
2220 
2221 
2222 
2223 
2224 /*---------------------------------------------------------------------------
2225 			      Debugging functions
2226 ---------------------------------------------------------------------------*/
2227 
2228 /* Dump tree-specific information of block BB to file OUTF.  */
2229 
2230 void
tree_dump_bb(basic_block bb,FILE * outf,int indent)2231 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2232 {
2233   dump_generic_bb (outf, bb, indent, TDF_VOPS);
2234 }
2235 
2236 
2237 /* Dump a basic block on stderr.  */
2238 
2239 void
debug_tree_bb(basic_block bb)2240 debug_tree_bb (basic_block bb)
2241 {
2242   dump_bb (bb, stderr, 0);
2243 }
2244 
2245 
2246 /* Dump basic block with index N on stderr.  */
2247 
2248 basic_block
debug_tree_bb_n(int n)2249 debug_tree_bb_n (int n)
2250 {
2251   debug_tree_bb (BASIC_BLOCK (n));
2252   return BASIC_BLOCK (n);
2253 }
2254 
2255 
2256 /* Dump the CFG on stderr.
2257 
2258    FLAGS are the same used by the tree dumping functions
2259    (see TDF_* in tree.h).  */
2260 
2261 void
debug_tree_cfg(int flags)2262 debug_tree_cfg (int flags)
2263 {
2264   dump_tree_cfg (stderr, flags);
2265 }
2266 
2267 
2268 /* Dump the program showing basic block boundaries on the given FILE.
2269 
2270    FLAGS are the same used by the tree dumping functions (see TDF_* in
2271    tree.h).  */
2272 
2273 void
dump_tree_cfg(FILE * file,int flags)2274 dump_tree_cfg (FILE *file, int flags)
2275 {
2276   if (flags & TDF_DETAILS)
2277     {
2278       const char *funcname
2279 	= lang_hooks.decl_printable_name (current_function_decl, 2);
2280 
2281       fputc ('\n', file);
2282       fprintf (file, ";; Function %s\n\n", funcname);
2283       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2284 	       n_basic_blocks, n_edges, last_basic_block);
2285 
2286       brief_dump_cfg (file);
2287       fprintf (file, "\n");
2288     }
2289 
2290   if (flags & TDF_STATS)
2291     dump_cfg_stats (file);
2292 
2293   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2294 }
2295 
2296 
2297 /* Dump CFG statistics on FILE.  */
2298 
2299 void
dump_cfg_stats(FILE * file)2300 dump_cfg_stats (FILE *file)
2301 {
2302   static long max_num_merged_labels = 0;
2303   unsigned long size, total = 0;
2304   long num_edges;
2305   basic_block bb;
2306   const char * const fmt_str   = "%-30s%-13s%12s\n";
2307   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2308   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2309   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2310   const char *funcname
2311     = lang_hooks.decl_printable_name (current_function_decl, 2);
2312 
2313 
2314   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2315 
2316   fprintf (file, "---------------------------------------------------------\n");
2317   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2318   fprintf (file, fmt_str, "", "  instances  ", "used ");
2319   fprintf (file, "---------------------------------------------------------\n");
2320 
2321   size = n_basic_blocks * sizeof (struct basic_block_def);
2322   total += size;
2323   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2324 	   SCALE (size), LABEL (size));
2325 
2326   num_edges = 0;
2327   FOR_EACH_BB (bb)
2328     num_edges += EDGE_COUNT (bb->succs);
2329   size = num_edges * sizeof (struct edge_def);
2330   total += size;
2331   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2332 
2333   fprintf (file, "---------------------------------------------------------\n");
2334   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2335 	   LABEL (total));
2336   fprintf (file, "---------------------------------------------------------\n");
2337   fprintf (file, "\n");
2338 
2339   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2340     max_num_merged_labels = cfg_stats.num_merged_labels;
2341 
2342   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2343 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2344 
2345   fprintf (file, "\n");
2346 }
2347 
2348 
2349 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2350    linked in the final executable.  */
2351 
2352 void
debug_cfg_stats(void)2353 debug_cfg_stats (void)
2354 {
2355   dump_cfg_stats (stderr);
2356 }
2357 
2358 
2359 /* Dump the flowgraph to a .vcg FILE.  */
2360 
2361 static void
tree_cfg2vcg(FILE * file)2362 tree_cfg2vcg (FILE *file)
2363 {
2364   edge e;
2365   edge_iterator ei;
2366   basic_block bb;
2367   const char *funcname
2368     = lang_hooks.decl_printable_name (current_function_decl, 2);
2369 
2370   /* Write the file header.  */
2371   fprintf (file, "graph: { title: \"%s\"\n", funcname);
2372   fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2373   fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2374 
2375   /* Write blocks and edges.  */
2376   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2377     {
2378       fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2379 	       e->dest->index);
2380 
2381       if (e->flags & EDGE_FAKE)
2382 	fprintf (file, " linestyle: dotted priority: 10");
2383       else
2384 	fprintf (file, " linestyle: solid priority: 100");
2385 
2386       fprintf (file, " }\n");
2387     }
2388   fputc ('\n', file);
2389 
2390   FOR_EACH_BB (bb)
2391     {
2392       enum tree_code head_code, end_code;
2393       const char *head_name, *end_name;
2394       int head_line = 0;
2395       int end_line = 0;
2396       tree first = first_stmt (bb);
2397       tree last = last_stmt (bb);
2398 
2399       if (first)
2400 	{
2401 	  head_code = TREE_CODE (first);
2402 	  head_name = tree_code_name[head_code];
2403 	  head_line = get_lineno (first);
2404 	}
2405       else
2406 	head_name = "no-statement";
2407 
2408       if (last)
2409 	{
2410 	  end_code = TREE_CODE (last);
2411 	  end_name = tree_code_name[end_code];
2412 	  end_line = get_lineno (last);
2413 	}
2414       else
2415 	end_name = "no-statement";
2416 
2417       fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2418 	       bb->index, bb->index, head_name, head_line, end_name,
2419 	       end_line);
2420 
2421       FOR_EACH_EDGE (e, ei, bb->succs)
2422 	{
2423 	  if (e->dest == EXIT_BLOCK_PTR)
2424 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2425 	  else
2426 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2427 
2428 	  if (e->flags & EDGE_FAKE)
2429 	    fprintf (file, " priority: 10 linestyle: dotted");
2430 	  else
2431 	    fprintf (file, " priority: 100 linestyle: solid");
2432 
2433 	  fprintf (file, " }\n");
2434 	}
2435 
2436       if (bb->next_bb != EXIT_BLOCK_PTR)
2437 	fputc ('\n', file);
2438     }
2439 
2440   fputs ("}\n\n", file);
2441 }
2442 
2443 
2444 
2445 /*---------------------------------------------------------------------------
2446 			     Miscellaneous helpers
2447 ---------------------------------------------------------------------------*/
2448 
2449 /* Return true if T represents a stmt that always transfers control.  */
2450 
2451 bool
is_ctrl_stmt(tree t)2452 is_ctrl_stmt (tree t)
2453 {
2454   return (TREE_CODE (t) == COND_EXPR
2455 	  || TREE_CODE (t) == SWITCH_EXPR
2456 	  || TREE_CODE (t) == GOTO_EXPR
2457 	  || TREE_CODE (t) == RETURN_EXPR
2458 	  || TREE_CODE (t) == RESX_EXPR);
2459 }
2460 
2461 
2462 /* Return true if T is a statement that may alter the flow of control
2463    (e.g., a call to a non-returning function).  */
2464 
2465 bool
is_ctrl_altering_stmt(tree t)2466 is_ctrl_altering_stmt (tree t)
2467 {
2468   tree call;
2469 
2470   gcc_assert (t);
2471   call = get_call_expr_in (t);
2472   if (call)
2473     {
2474       /* A non-pure/const CALL_EXPR alters flow control if the current
2475 	 function has nonlocal labels.  */
2476       if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2477 	return true;
2478 
2479       /* A CALL_EXPR also alters control flow if it does not return.  */
2480       if (call_expr_flags (call) & ECF_NORETURN)
2481 	return true;
2482     }
2483 
2484   /* If a statement can throw, it alters control flow.  */
2485   return tree_can_throw_internal (t);
2486 }
2487 
2488 
2489 /* Return true if T is a computed goto.  */
2490 
2491 bool
computed_goto_p(tree t)2492 computed_goto_p (tree t)
2493 {
2494   return (TREE_CODE (t) == GOTO_EXPR
2495 	  && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2496 }
2497 
2498 
2499 /* Checks whether EXPR is a simple local goto.  */
2500 
2501 bool
simple_goto_p(tree expr)2502 simple_goto_p (tree expr)
2503 {
2504   return (TREE_CODE (expr) == GOTO_EXPR
2505 	  && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2506 }
2507 
2508 
2509 /* Return true if T should start a new basic block.  PREV_T is the
2510    statement preceding T.  It is used when T is a label or a case label.
2511    Labels should only start a new basic block if their previous statement
2512    wasn't a label.  Otherwise, sequence of labels would generate
2513    unnecessary basic blocks that only contain a single label.  */
2514 
2515 static inline bool
stmt_starts_bb_p(tree t,tree prev_t)2516 stmt_starts_bb_p (tree t, tree prev_t)
2517 {
2518   if (t == NULL_TREE)
2519     return false;
2520 
2521   /* LABEL_EXPRs start a new basic block only if the preceding
2522      statement wasn't a label of the same type.  This prevents the
2523      creation of consecutive blocks that have nothing but a single
2524      label.  */
2525   if (TREE_CODE (t) == LABEL_EXPR)
2526     {
2527       /* Nonlocal and computed GOTO targets always start a new block.  */
2528       if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2529 	  || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2530 	return true;
2531 
2532       if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2533 	{
2534 	  if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2535 	    return true;
2536 
2537 	  cfg_stats.num_merged_labels++;
2538 	  return false;
2539 	}
2540       else
2541 	return true;
2542     }
2543 
2544   return false;
2545 }
2546 
2547 
2548 /* Return true if T should end a basic block.  */
2549 
2550 bool
stmt_ends_bb_p(tree t)2551 stmt_ends_bb_p (tree t)
2552 {
2553   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2554 }
2555 
2556 
2557 /* Add gotos that used to be represented implicitly in the CFG.  */
2558 
2559 void
disband_implicit_edges(void)2560 disband_implicit_edges (void)
2561 {
2562   basic_block bb;
2563   block_stmt_iterator last;
2564   edge e;
2565   edge_iterator ei;
2566   tree stmt, label;
2567 
2568   FOR_EACH_BB (bb)
2569     {
2570       last = bsi_last (bb);
2571       stmt = last_stmt (bb);
2572 
2573       if (stmt && TREE_CODE (stmt) == COND_EXPR)
2574 	{
2575 	  /* Remove superfluous gotos from COND_EXPR branches.  Moved
2576 	     from cfg_remove_useless_stmts here since it violates the
2577 	     invariants for tree--cfg correspondence and thus fits better
2578 	     here where we do it anyway.  */
2579 	  e = find_edge (bb, bb->next_bb);
2580 	  if (e)
2581 	    {
2582 	      if (e->flags & EDGE_TRUE_VALUE)
2583 		COND_EXPR_THEN (stmt) = build_empty_stmt ();
2584 	      else if (e->flags & EDGE_FALSE_VALUE)
2585 		COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2586 	      else
2587 		gcc_unreachable ();
2588 	      e->flags |= EDGE_FALLTHRU;
2589 	    }
2590 
2591 	  continue;
2592 	}
2593 
2594       if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2595 	{
2596 	  /* Remove the RETURN_EXPR if we may fall though to the exit
2597 	     instead.  */
2598 	  gcc_assert (single_succ_p (bb));
2599 	  gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2600 
2601 	  if (bb->next_bb == EXIT_BLOCK_PTR
2602 	      && !TREE_OPERAND (stmt, 0))
2603 	    {
2604 	      bsi_remove (&last);
2605 	      single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2606 	    }
2607 	  continue;
2608 	}
2609 
2610       /* There can be no fallthru edge if the last statement is a control
2611 	 one.  */
2612       if (stmt && is_ctrl_stmt (stmt))
2613 	continue;
2614 
2615       /* Find a fallthru edge and emit the goto if necessary.  */
2616       FOR_EACH_EDGE (e, ei, bb->succs)
2617 	if (e->flags & EDGE_FALLTHRU)
2618 	  break;
2619 
2620       if (!e || e->dest == bb->next_bb)
2621 	continue;
2622 
2623       gcc_assert (e->dest != EXIT_BLOCK_PTR);
2624       label = tree_block_label (e->dest);
2625 
2626       stmt = build1 (GOTO_EXPR, void_type_node, label);
2627 #ifdef USE_MAPPED_LOCATION
2628       SET_EXPR_LOCATION (stmt, e->goto_locus);
2629 #else
2630       SET_EXPR_LOCUS (stmt, e->goto_locus);
2631 #endif
2632       bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2633       e->flags &= ~EDGE_FALLTHRU;
2634     }
2635 }
2636 
2637 /* Remove block annotations and other datastructures.  */
2638 
2639 void
delete_tree_cfg_annotations(void)2640 delete_tree_cfg_annotations (void)
2641 {
2642   label_to_block_map = NULL;
2643 }
2644 
2645 
2646 /* Return the first statement in basic block BB.  */
2647 
2648 tree
first_stmt(basic_block bb)2649 first_stmt (basic_block bb)
2650 {
2651   block_stmt_iterator i = bsi_start (bb);
2652   return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2653 }
2654 
2655 
2656 /* Return the last statement in basic block BB.  */
2657 
2658 tree
last_stmt(basic_block bb)2659 last_stmt (basic_block bb)
2660 {
2661   block_stmt_iterator b = bsi_last (bb);
2662   return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2663 }
2664 
2665 
2666 /* Return a pointer to the last statement in block BB.  */
2667 
2668 tree *
last_stmt_ptr(basic_block bb)2669 last_stmt_ptr (basic_block bb)
2670 {
2671   block_stmt_iterator last = bsi_last (bb);
2672   return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2673 }
2674 
2675 
2676 /* Return the last statement of an otherwise empty block.  Return NULL
2677    if the block is totally empty, or if it contains more than one
2678    statement.  */
2679 
2680 tree
last_and_only_stmt(basic_block bb)2681 last_and_only_stmt (basic_block bb)
2682 {
2683   block_stmt_iterator i = bsi_last (bb);
2684   tree last, prev;
2685 
2686   if (bsi_end_p (i))
2687     return NULL_TREE;
2688 
2689   last = bsi_stmt (i);
2690   bsi_prev (&i);
2691   if (bsi_end_p (i))
2692     return last;
2693 
2694   /* Empty statements should no longer appear in the instruction stream.
2695      Everything that might have appeared before should be deleted by
2696      remove_useless_stmts, and the optimizers should just bsi_remove
2697      instead of smashing with build_empty_stmt.
2698 
2699      Thus the only thing that should appear here in a block containing
2700      one executable statement is a label.  */
2701   prev = bsi_stmt (i);
2702   if (TREE_CODE (prev) == LABEL_EXPR)
2703     return last;
2704   else
2705     return NULL_TREE;
2706 }
2707 
2708 
2709 /* Mark BB as the basic block holding statement T.  */
2710 
2711 void
set_bb_for_stmt(tree t,basic_block bb)2712 set_bb_for_stmt (tree t, basic_block bb)
2713 {
2714   if (TREE_CODE (t) == PHI_NODE)
2715     PHI_BB (t) = bb;
2716   else if (TREE_CODE (t) == STATEMENT_LIST)
2717     {
2718       tree_stmt_iterator i;
2719       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2720 	set_bb_for_stmt (tsi_stmt (i), bb);
2721     }
2722   else
2723     {
2724       stmt_ann_t ann = get_stmt_ann (t);
2725       ann->bb = bb;
2726 
2727       /* If the statement is a label, add the label to block-to-labels map
2728 	 so that we can speed up edge creation for GOTO_EXPRs.  */
2729       if (TREE_CODE (t) == LABEL_EXPR)
2730 	{
2731 	  int uid;
2732 
2733 	  t = LABEL_EXPR_LABEL (t);
2734 	  uid = LABEL_DECL_UID (t);
2735 	  if (uid == -1)
2736 	    {
2737 	      LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2738 	      if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2739 		VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2740 	    }
2741 	  else
2742 	    /* We're moving an existing label.  Make sure that we've
2743 		removed it from the old block.  */
2744 	    gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2745 	  VARRAY_BB (label_to_block_map, uid) = bb;
2746 	}
2747     }
2748 }
2749 
2750 /* Finds iterator for STMT.  */
2751 
2752 extern block_stmt_iterator
bsi_for_stmt(tree stmt)2753 bsi_for_stmt (tree stmt)
2754 {
2755   block_stmt_iterator bsi;
2756 
2757   for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2758     if (bsi_stmt (bsi) == stmt)
2759       return bsi;
2760 
2761   gcc_unreachable ();
2762 }
2763 
2764 /* Mark statement T as modified, and update it.  */
2765 static inline void
update_modified_stmts(tree t)2766 update_modified_stmts (tree t)
2767 {
2768   if (TREE_CODE (t) == STATEMENT_LIST)
2769     {
2770       tree_stmt_iterator i;
2771       tree stmt;
2772       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2773         {
2774 	  stmt = tsi_stmt (i);
2775 	  update_stmt_if_modified (stmt);
2776 	}
2777     }
2778   else
2779     update_stmt_if_modified (t);
2780 }
2781 
2782 /* Insert statement (or statement list) T before the statement
2783    pointed-to by iterator I.  M specifies how to update iterator I
2784    after insertion (see enum bsi_iterator_update).  */
2785 
2786 void
bsi_insert_before(block_stmt_iterator * i,tree t,enum bsi_iterator_update m)2787 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2788 {
2789   set_bb_for_stmt (t, i->bb);
2790   update_modified_stmts (t);
2791   tsi_link_before (&i->tsi, t, m);
2792 }
2793 
2794 
2795 /* Insert statement (or statement list) T after the statement
2796    pointed-to by iterator I.  M specifies how to update iterator I
2797    after insertion (see enum bsi_iterator_update).  */
2798 
2799 void
bsi_insert_after(block_stmt_iterator * i,tree t,enum bsi_iterator_update m)2800 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2801 {
2802   set_bb_for_stmt (t, i->bb);
2803   update_modified_stmts (t);
2804   tsi_link_after (&i->tsi, t, m);
2805 }
2806 
2807 
2808 /* Remove the statement pointed to by iterator I.  The iterator is updated
2809    to the next statement.  */
2810 
2811 void
bsi_remove(block_stmt_iterator * i)2812 bsi_remove (block_stmt_iterator *i)
2813 {
2814   tree t = bsi_stmt (*i);
2815   set_bb_for_stmt (t, NULL);
2816   delink_stmt_imm_use (t);
2817   tsi_delink (&i->tsi);
2818   mark_stmt_modified (t);
2819 }
2820 
2821 
2822 /* Move the statement at FROM so it comes right after the statement at TO.  */
2823 
2824 void
bsi_move_after(block_stmt_iterator * from,block_stmt_iterator * to)2825 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2826 {
2827   tree stmt = bsi_stmt (*from);
2828   bsi_remove (from);
2829   bsi_insert_after (to, stmt, BSI_SAME_STMT);
2830 }
2831 
2832 
2833 /* Move the statement at FROM so it comes right before the statement at TO.  */
2834 
2835 void
bsi_move_before(block_stmt_iterator * from,block_stmt_iterator * to)2836 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2837 {
2838   tree stmt = bsi_stmt (*from);
2839   bsi_remove (from);
2840   bsi_insert_before (to, stmt, BSI_SAME_STMT);
2841 }
2842 
2843 
2844 /* Move the statement at FROM to the end of basic block BB.  */
2845 
2846 void
bsi_move_to_bb_end(block_stmt_iterator * from,basic_block bb)2847 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2848 {
2849   block_stmt_iterator last = bsi_last (bb);
2850 
2851   /* Have to check bsi_end_p because it could be an empty block.  */
2852   if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2853     bsi_move_before (from, &last);
2854   else
2855     bsi_move_after (from, &last);
2856 }
2857 
2858 
2859 /* Replace the contents of the statement pointed to by iterator BSI
2860    with STMT.  If PRESERVE_EH_INFO is true, the exception handling
2861    information of the original statement is preserved.  */
2862 
2863 void
bsi_replace(const block_stmt_iterator * bsi,tree stmt,bool preserve_eh_info)2864 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2865 {
2866   int eh_region;
2867   tree orig_stmt = bsi_stmt (*bsi);
2868 
2869   SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2870   set_bb_for_stmt (stmt, bsi->bb);
2871 
2872   /* Preserve EH region information from the original statement, if
2873      requested by the caller.  */
2874   if (preserve_eh_info)
2875     {
2876       eh_region = lookup_stmt_eh_region (orig_stmt);
2877       if (eh_region >= 0)
2878 	add_stmt_to_eh_region (stmt, eh_region);
2879     }
2880 
2881   delink_stmt_imm_use (orig_stmt);
2882   *bsi_stmt_ptr (*bsi) = stmt;
2883   mark_stmt_modified (stmt);
2884   update_modified_stmts (stmt);
2885 }
2886 
2887 
2888 /* Insert the statement pointed-to by BSI into edge E.  Every attempt
2889    is made to place the statement in an existing basic block, but
2890    sometimes that isn't possible.  When it isn't possible, the edge is
2891    split and the statement is added to the new block.
2892 
2893    In all cases, the returned *BSI points to the correct location.  The
2894    return value is true if insertion should be done after the location,
2895    or false if it should be done before the location.  If new basic block
2896    has to be created, it is stored in *NEW_BB.  */
2897 
2898 static bool
tree_find_edge_insert_loc(edge e,block_stmt_iterator * bsi,basic_block * new_bb)2899 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2900 			   basic_block *new_bb)
2901 {
2902   basic_block dest, src;
2903   tree tmp;
2904 
2905   dest = e->dest;
2906  restart:
2907 
2908   /* If the destination has one predecessor which has no PHI nodes,
2909      insert there.  Except for the exit block.
2910 
2911      The requirement for no PHI nodes could be relaxed.  Basically we
2912      would have to examine the PHIs to prove that none of them used
2913      the value set by the statement we want to insert on E.  That
2914      hardly seems worth the effort.  */
2915   if (single_pred_p (dest)
2916       && ! phi_nodes (dest)
2917       && dest != EXIT_BLOCK_PTR)
2918     {
2919       *bsi = bsi_start (dest);
2920       if (bsi_end_p (*bsi))
2921 	return true;
2922 
2923       /* Make sure we insert after any leading labels.  */
2924       tmp = bsi_stmt (*bsi);
2925       while (TREE_CODE (tmp) == LABEL_EXPR)
2926 	{
2927 	  bsi_next (bsi);
2928 	  if (bsi_end_p (*bsi))
2929 	    break;
2930 	  tmp = bsi_stmt (*bsi);
2931 	}
2932 
2933       if (bsi_end_p (*bsi))
2934 	{
2935 	  *bsi = bsi_last (dest);
2936 	  return true;
2937 	}
2938       else
2939 	return false;
2940     }
2941 
2942   /* If the source has one successor, the edge is not abnormal and
2943      the last statement does not end a basic block, insert there.
2944      Except for the entry block.  */
2945   src = e->src;
2946   if ((e->flags & EDGE_ABNORMAL) == 0
2947       && single_succ_p (src)
2948       && src != ENTRY_BLOCK_PTR)
2949     {
2950       *bsi = bsi_last (src);
2951       if (bsi_end_p (*bsi))
2952 	return true;
2953 
2954       tmp = bsi_stmt (*bsi);
2955       if (!stmt_ends_bb_p (tmp))
2956 	return true;
2957 
2958       /* Insert code just before returning the value.  We may need to decompose
2959          the return in the case it contains non-trivial operand.  */
2960       if (TREE_CODE (tmp) == RETURN_EXPR)
2961         {
2962 	  tree op = TREE_OPERAND (tmp, 0);
2963 	  if (op && !is_gimple_val (op))
2964 	    {
2965 	      gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2966 	      bsi_insert_before (bsi, op, BSI_NEW_STMT);
2967 	      TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2968 	    }
2969 	  bsi_prev (bsi);
2970 	  return true;
2971         }
2972     }
2973 
2974   /* Otherwise, create a new basic block, and split this edge.  */
2975   dest = split_edge (e);
2976   if (new_bb)
2977     *new_bb = dest;
2978   e = single_pred_edge (dest);
2979   goto restart;
2980 }
2981 
2982 
2983 /* This routine will commit all pending edge insertions, creating any new
2984    basic blocks which are necessary.  */
2985 
2986 void
bsi_commit_edge_inserts(void)2987 bsi_commit_edge_inserts (void)
2988 {
2989   basic_block bb;
2990   edge e;
2991   edge_iterator ei;
2992 
2993   bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2994 
2995   FOR_EACH_BB (bb)
2996     FOR_EACH_EDGE (e, ei, bb->succs)
2997       bsi_commit_one_edge_insert (e, NULL);
2998 }
2999 
3000 
3001 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3002    to this block, otherwise set it to NULL.  */
3003 
3004 void
bsi_commit_one_edge_insert(edge e,basic_block * new_bb)3005 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3006 {
3007   if (new_bb)
3008     *new_bb = NULL;
3009   if (PENDING_STMT (e))
3010     {
3011       block_stmt_iterator bsi;
3012       tree stmt = PENDING_STMT (e);
3013 
3014       PENDING_STMT (e) = NULL_TREE;
3015 
3016       if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3017 	bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3018       else
3019 	bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3020     }
3021 }
3022 
3023 
3024 /* Add STMT to the pending list of edge E.  No actual insertion is
3025    made until a call to bsi_commit_edge_inserts () is made.  */
3026 
3027 void
bsi_insert_on_edge(edge e,tree stmt)3028 bsi_insert_on_edge (edge e, tree stmt)
3029 {
3030   append_to_statement_list (stmt, &PENDING_STMT (e));
3031 }
3032 
3033 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts.  If a new
3034    block has to be created, it is returned.  */
3035 
3036 basic_block
bsi_insert_on_edge_immediate(edge e,tree stmt)3037 bsi_insert_on_edge_immediate (edge e, tree stmt)
3038 {
3039   block_stmt_iterator bsi;
3040   basic_block new_bb = NULL;
3041 
3042   gcc_assert (!PENDING_STMT (e));
3043 
3044   if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3045     bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3046   else
3047     bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3048 
3049   return new_bb;
3050 }
3051 
3052 /*---------------------------------------------------------------------------
3053 	     Tree specific functions for CFG manipulation
3054 ---------------------------------------------------------------------------*/
3055 
3056 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
3057 
3058 static void
reinstall_phi_args(edge new_edge,edge old_edge)3059 reinstall_phi_args (edge new_edge, edge old_edge)
3060 {
3061   tree var, phi;
3062 
3063   if (!PENDING_STMT (old_edge))
3064     return;
3065 
3066   for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3067        var && phi;
3068        var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3069     {
3070       tree result = TREE_PURPOSE (var);
3071       tree arg = TREE_VALUE (var);
3072 
3073       gcc_assert (result == PHI_RESULT (phi));
3074 
3075       add_phi_arg (phi, arg, new_edge);
3076     }
3077 
3078   PENDING_STMT (old_edge) = NULL;
3079 }
3080 
3081 /* Returns the basic block after that the new basic block created
3082    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
3083    near its "logical" location.  This is of most help to humans looking
3084    at debugging dumps.  */
3085 
3086 static basic_block
split_edge_bb_loc(edge edge_in)3087 split_edge_bb_loc (edge edge_in)
3088 {
3089   basic_block dest = edge_in->dest;
3090 
3091   if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3092     return edge_in->src;
3093   else
3094     return dest->prev_bb;
3095 }
3096 
3097 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
3098    Abort on abnormal edges.  */
3099 
3100 static basic_block
tree_split_edge(edge edge_in)3101 tree_split_edge (edge edge_in)
3102 {
3103   basic_block new_bb, after_bb, dest, src;
3104   edge new_edge, e;
3105 
3106   /* Abnormal edges cannot be split.  */
3107   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3108 
3109   src = edge_in->src;
3110   dest = edge_in->dest;
3111 
3112   after_bb = split_edge_bb_loc (edge_in);
3113 
3114   new_bb = create_empty_bb (after_bb);
3115   new_bb->frequency = EDGE_FREQUENCY (edge_in);
3116   new_bb->count = edge_in->count;
3117   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3118   new_edge->probability = REG_BR_PROB_BASE;
3119   new_edge->count = edge_in->count;
3120 
3121   e = redirect_edge_and_branch (edge_in, new_bb);
3122   gcc_assert (e);
3123   reinstall_phi_args (new_edge, e);
3124 
3125   return new_bb;
3126 }
3127 
3128 
3129 /* Return true when BB has label LABEL in it.  */
3130 
3131 static bool
has_label_p(basic_block bb,tree label)3132 has_label_p (basic_block bb, tree label)
3133 {
3134   block_stmt_iterator bsi;
3135 
3136   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3137     {
3138       tree stmt = bsi_stmt (bsi);
3139 
3140       if (TREE_CODE (stmt) != LABEL_EXPR)
3141 	return false;
3142       if (LABEL_EXPR_LABEL (stmt) == label)
3143 	return true;
3144     }
3145   return false;
3146 }
3147 
3148 
3149 /* Callback for walk_tree, check that all elements with address taken are
3150    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
3151    inside a PHI node.  */
3152 
3153 static tree
verify_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)3154 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3155 {
3156   tree t = *tp, x;
3157   bool in_phi = (data != NULL);
3158 
3159   if (TYPE_P (t))
3160     *walk_subtrees = 0;
3161 
3162   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
3163 #define CHECK_OP(N, MSG) \
3164   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
3165        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3166 
3167   switch (TREE_CODE (t))
3168     {
3169     case SSA_NAME:
3170       if (SSA_NAME_IN_FREE_LIST (t))
3171 	{
3172 	  error ("SSA name in freelist but still referenced");
3173 	  return *tp;
3174 	}
3175       break;
3176 
3177     case ASSERT_EXPR:
3178       x = fold (ASSERT_EXPR_COND (t));
3179       if (x == boolean_false_node)
3180 	{
3181 	  error ("ASSERT_EXPR with an always-false condition");
3182 	  return *tp;
3183 	}
3184       break;
3185 
3186     case MODIFY_EXPR:
3187       x = TREE_OPERAND (t, 0);
3188       if (TREE_CODE (x) == BIT_FIELD_REF
3189 	  && is_gimple_reg (TREE_OPERAND (x, 0)))
3190 	{
3191 	  error ("GIMPLE register modified with BIT_FIELD_REF");
3192 	  return t;
3193 	}
3194       break;
3195 
3196     case ADDR_EXPR:
3197       {
3198 	bool old_invariant;
3199 	bool old_constant;
3200 	bool old_side_effects;
3201 	bool new_invariant;
3202 	bool new_constant;
3203 	bool new_side_effects;
3204 
3205         /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3206 	   dead PHIs that take the address of something.  But if the PHI
3207 	   result is dead, the fact that it takes the address of anything
3208 	   is irrelevant.  Because we can not tell from here if a PHI result
3209 	   is dead, we just skip this check for PHIs altogether.  This means
3210 	   we may be missing "valid" checks, but what can you do?
3211 	   This was PR19217.  */
3212         if (in_phi)
3213 	  break;
3214 
3215 	old_invariant = TREE_INVARIANT (t);
3216 	old_constant = TREE_CONSTANT (t);
3217 	old_side_effects = TREE_SIDE_EFFECTS (t);
3218 
3219 	recompute_tree_invarant_for_addr_expr (t);
3220 	new_invariant = TREE_INVARIANT (t);
3221 	new_side_effects = TREE_SIDE_EFFECTS (t);
3222 	new_constant = TREE_CONSTANT (t);
3223 
3224 	if (old_invariant != new_invariant)
3225 	  {
3226 	    error ("invariant not recomputed when ADDR_EXPR changed");
3227 	    return t;
3228 	  }
3229 
3230         if (old_constant != new_constant)
3231 	  {
3232 	    error ("constant not recomputed when ADDR_EXPR changed");
3233 	    return t;
3234 	  }
3235 	if (old_side_effects != new_side_effects)
3236 	  {
3237 	    error ("side effects not recomputed when ADDR_EXPR changed");
3238 	    return t;
3239 	  }
3240 
3241 	/* Skip any references (they will be checked when we recurse down the
3242 	   tree) and ensure that any variable used as a prefix is marked
3243 	   addressable.  */
3244 	for (x = TREE_OPERAND (t, 0);
3245 	     handled_component_p (x);
3246 	     x = TREE_OPERAND (x, 0))
3247 	  ;
3248 
3249 	if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3250 	  return NULL;
3251 	if (!TREE_ADDRESSABLE (x))
3252 	  {
3253 	    error ("address taken, but ADDRESSABLE bit not set");
3254 	    return x;
3255 	  }
3256 	break;
3257       }
3258 
3259     case COND_EXPR:
3260       x = COND_EXPR_COND (t);
3261       if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3262 	{
3263 	  error ("non-boolean used in condition");
3264 	  return x;
3265 	}
3266       if (!is_gimple_condexpr (x))
3267         {
3268 	  error ("invalid conditional operand");
3269 	  return x;
3270 	}
3271       break;
3272 
3273     case NOP_EXPR:
3274     case CONVERT_EXPR:
3275     case FIX_TRUNC_EXPR:
3276     case FIX_CEIL_EXPR:
3277     case FIX_FLOOR_EXPR:
3278     case FIX_ROUND_EXPR:
3279     case FLOAT_EXPR:
3280     case NEGATE_EXPR:
3281     case ABS_EXPR:
3282     case BIT_NOT_EXPR:
3283     case NON_LVALUE_EXPR:
3284     case TRUTH_NOT_EXPR:
3285       CHECK_OP (0, "invalid operand to unary operator");
3286       break;
3287 
3288     case REALPART_EXPR:
3289     case IMAGPART_EXPR:
3290     case COMPONENT_REF:
3291     case ARRAY_REF:
3292     case ARRAY_RANGE_REF:
3293     case BIT_FIELD_REF:
3294     case VIEW_CONVERT_EXPR:
3295       /* We have a nest of references.  Verify that each of the operands
3296 	 that determine where to reference is either a constant or a variable,
3297 	 verify that the base is valid, and then show we've already checked
3298 	 the subtrees.  */
3299       while (handled_component_p (t))
3300 	{
3301 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3302 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3303 	  else if (TREE_CODE (t) == ARRAY_REF
3304 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
3305 	    {
3306 	      CHECK_OP (1, "invalid array index");
3307 	      if (TREE_OPERAND (t, 2))
3308 		CHECK_OP (2, "invalid array lower bound");
3309 	      if (TREE_OPERAND (t, 3))
3310 		CHECK_OP (3, "invalid array stride");
3311 	    }
3312 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
3313 	    {
3314 	      CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3315 	      CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3316 	    }
3317 
3318 	  t = TREE_OPERAND (t, 0);
3319 	}
3320 
3321       if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3322 	{
3323 	  error ("invalid reference prefix");
3324 	  return t;
3325 	}
3326       *walk_subtrees = 0;
3327       break;
3328 
3329     case LT_EXPR:
3330     case LE_EXPR:
3331     case GT_EXPR:
3332     case GE_EXPR:
3333     case EQ_EXPR:
3334     case NE_EXPR:
3335     case UNORDERED_EXPR:
3336     case ORDERED_EXPR:
3337     case UNLT_EXPR:
3338     case UNLE_EXPR:
3339     case UNGT_EXPR:
3340     case UNGE_EXPR:
3341     case UNEQ_EXPR:
3342     case LTGT_EXPR:
3343     case PLUS_EXPR:
3344     case MINUS_EXPR:
3345     case MULT_EXPR:
3346     case TRUNC_DIV_EXPR:
3347     case CEIL_DIV_EXPR:
3348     case FLOOR_DIV_EXPR:
3349     case ROUND_DIV_EXPR:
3350     case TRUNC_MOD_EXPR:
3351     case CEIL_MOD_EXPR:
3352     case FLOOR_MOD_EXPR:
3353     case ROUND_MOD_EXPR:
3354     case RDIV_EXPR:
3355     case EXACT_DIV_EXPR:
3356     case MIN_EXPR:
3357     case MAX_EXPR:
3358     case LSHIFT_EXPR:
3359     case RSHIFT_EXPR:
3360     case LROTATE_EXPR:
3361     case RROTATE_EXPR:
3362     case BIT_IOR_EXPR:
3363     case BIT_XOR_EXPR:
3364     case BIT_AND_EXPR:
3365       CHECK_OP (0, "invalid operand to binary operator");
3366       CHECK_OP (1, "invalid operand to binary operator");
3367       break;
3368 
3369     default:
3370       break;
3371     }
3372   return NULL;
3373 
3374 #undef CHECK_OP
3375 }
3376 
3377 
3378 /* Verify STMT, return true if STMT is not in GIMPLE form.
3379    TODO: Implement type checking.  */
3380 
3381 static bool
verify_stmt(tree stmt,bool last_in_block)3382 verify_stmt (tree stmt, bool last_in_block)
3383 {
3384   tree addr;
3385 
3386   if (!is_gimple_stmt (stmt))
3387     {
3388       error ("is not a valid GIMPLE statement");
3389       goto fail;
3390     }
3391 
3392   addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3393   if (addr)
3394     {
3395       debug_generic_stmt (addr);
3396       return true;
3397     }
3398 
3399   /* If the statement is marked as part of an EH region, then it is
3400      expected that the statement could throw.  Verify that when we
3401      have optimizations that simplify statements such that we prove
3402      that they cannot throw, that we update other data structures
3403      to match.  */
3404   if (lookup_stmt_eh_region (stmt) >= 0)
3405     {
3406       if (!tree_could_throw_p (stmt))
3407 	{
3408 	  error ("statement marked for throw, but doesn%'t");
3409 	  goto fail;
3410 	}
3411       if (!last_in_block && tree_can_throw_internal (stmt))
3412 	{
3413 	  error ("statement marked for throw in middle of block");
3414 	  goto fail;
3415 	}
3416     }
3417 
3418   return false;
3419 
3420  fail:
3421   debug_generic_stmt (stmt);
3422   return true;
3423 }
3424 
3425 
3426 /* Return true when the T can be shared.  */
3427 
3428 static bool
tree_node_can_be_shared(tree t)3429 tree_node_can_be_shared (tree t)
3430 {
3431   if (IS_TYPE_OR_DECL_P (t)
3432       /* We check for constants explicitly since they are not considered
3433 	 gimple invariants if they overflowed.  */
3434       || CONSTANT_CLASS_P (t)
3435       || is_gimple_min_invariant (t)
3436       || TREE_CODE (t) == SSA_NAME
3437       || t == error_mark_node)
3438     return true;
3439 
3440   if (TREE_CODE (t) == CASE_LABEL_EXPR)
3441     return true;
3442 
3443   while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3444 	  /* We check for constants explicitly since they are not considered
3445 	     gimple invariants if they overflowed.  */
3446 	  && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3447 	      || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3448 	 || (TREE_CODE (t) == COMPONENT_REF
3449 	     || TREE_CODE (t) == REALPART_EXPR
3450 	     || TREE_CODE (t) == IMAGPART_EXPR))
3451     t = TREE_OPERAND (t, 0);
3452 
3453   if (DECL_P (t))
3454     return true;
3455 
3456   return false;
3457 }
3458 
3459 
3460 /* Called via walk_trees.  Verify tree sharing.  */
3461 
3462 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)3463 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3464 {
3465   htab_t htab = (htab_t) data;
3466   void **slot;
3467 
3468   if (tree_node_can_be_shared (*tp))
3469     {
3470       *walk_subtrees = false;
3471       return NULL;
3472     }
3473 
3474   slot = htab_find_slot (htab, *tp, INSERT);
3475   if (*slot)
3476     return *slot;
3477   *slot = *tp;
3478 
3479   return NULL;
3480 }
3481 
3482 
3483 /* Verify the GIMPLE statement chain.  */
3484 
3485 void
verify_stmts(void)3486 verify_stmts (void)
3487 {
3488   basic_block bb;
3489   block_stmt_iterator bsi;
3490   bool err = false;
3491   htab_t htab;
3492   tree addr;
3493 
3494   timevar_push (TV_TREE_STMT_VERIFY);
3495   htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3496 
3497   FOR_EACH_BB (bb)
3498     {
3499       tree phi;
3500       int i;
3501 
3502       for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3503 	{
3504 	  int phi_num_args = PHI_NUM_ARGS (phi);
3505 
3506 	  if (bb_for_stmt (phi) != bb)
3507 	    {
3508 	      error ("bb_for_stmt (phi) is set to a wrong basic block");
3509 	      err |= true;
3510 	    }
3511 
3512 	  for (i = 0; i < phi_num_args; i++)
3513 	    {
3514 	      tree t = PHI_ARG_DEF (phi, i);
3515 	      tree addr;
3516 
3517 	      /* Addressable variables do have SSA_NAMEs but they
3518 		 are not considered gimple values.  */
3519 	      if (TREE_CODE (t) != SSA_NAME
3520 		  && TREE_CODE (t) != FUNCTION_DECL
3521 		  && !is_gimple_val (t))
3522 		{
3523 		  error ("PHI def is not a GIMPLE value");
3524 		  debug_generic_stmt (phi);
3525 		  debug_generic_stmt (t);
3526 		  err |= true;
3527 		}
3528 
3529 	      addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3530 	      if (addr)
3531 		{
3532 		  debug_generic_stmt (addr);
3533 		  err |= true;
3534 		}
3535 
3536 	      addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3537 	      if (addr)
3538 		{
3539 		  error ("incorrect sharing of tree nodes");
3540 		  debug_generic_stmt (phi);
3541 		  debug_generic_stmt (addr);
3542 		  err |= true;
3543 		}
3544 	    }
3545 	}
3546 
3547       for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3548 	{
3549 	  tree stmt = bsi_stmt (bsi);
3550 
3551 	  if (bb_for_stmt (stmt) != bb)
3552 	    {
3553 	      error ("bb_for_stmt (stmt) is set to a wrong basic block");
3554 	      err |= true;
3555 	    }
3556 
3557 	  bsi_next (&bsi);
3558 	  err |= verify_stmt (stmt, bsi_end_p (bsi));
3559 	  addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3560 	  if (addr)
3561 	    {
3562 	      error ("incorrect sharing of tree nodes");
3563 	      debug_generic_stmt (stmt);
3564 	      debug_generic_stmt (addr);
3565 	      err |= true;
3566 	    }
3567 	}
3568     }
3569 
3570   if (err)
3571     internal_error ("verify_stmts failed");
3572 
3573   htab_delete (htab);
3574   timevar_pop (TV_TREE_STMT_VERIFY);
3575 }
3576 
3577 
3578 /* Verifies that the flow information is OK.  */
3579 
3580 static int
tree_verify_flow_info(void)3581 tree_verify_flow_info (void)
3582 {
3583   int err = 0;
3584   basic_block bb;
3585   block_stmt_iterator bsi;
3586   tree stmt;
3587   edge e;
3588   edge_iterator ei;
3589 
3590   if (ENTRY_BLOCK_PTR->stmt_list)
3591     {
3592       error ("ENTRY_BLOCK has a statement list associated with it");
3593       err = 1;
3594     }
3595 
3596   if (EXIT_BLOCK_PTR->stmt_list)
3597     {
3598       error ("EXIT_BLOCK has a statement list associated with it");
3599       err = 1;
3600     }
3601 
3602   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3603     if (e->flags & EDGE_FALLTHRU)
3604       {
3605 	error ("fallthru to exit from bb %d", e->src->index);
3606 	err = 1;
3607       }
3608 
3609   FOR_EACH_BB (bb)
3610     {
3611       bool found_ctrl_stmt = false;
3612 
3613       stmt = NULL_TREE;
3614 
3615       /* Skip labels on the start of basic block.  */
3616       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3617 	{
3618 	  tree prev_stmt = stmt;
3619 
3620 	  stmt = bsi_stmt (bsi);
3621 
3622 	  if (TREE_CODE (stmt) != LABEL_EXPR)
3623 	    break;
3624 
3625 	  if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3626 	    {
3627 	      error ("nonlocal label %s is not first "
3628 		     "in a sequence of labels in bb %d",
3629 		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3630 		     bb->index);
3631 	      err = 1;
3632 	    }
3633 
3634 	  if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3635 	    {
3636 	      error ("label %s to block does not match in bb %d",
3637 		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3638 		     bb->index);
3639 	      err = 1;
3640 	    }
3641 
3642 	  if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3643 	      != current_function_decl)
3644 	    {
3645 	      error ("label %s has incorrect context in bb %d",
3646 		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3647 		     bb->index);
3648 	      err = 1;
3649 	    }
3650 	}
3651 
3652       /* Verify that body of basic block BB is free of control flow.  */
3653       for (; !bsi_end_p (bsi); bsi_next (&bsi))
3654 	{
3655 	  tree stmt = bsi_stmt (bsi);
3656 
3657 	  if (found_ctrl_stmt)
3658 	    {
3659 	      error ("control flow in the middle of basic block %d",
3660 		     bb->index);
3661 	      err = 1;
3662 	    }
3663 
3664 	  if (stmt_ends_bb_p (stmt))
3665 	    found_ctrl_stmt = true;
3666 
3667 	  if (TREE_CODE (stmt) == LABEL_EXPR)
3668 	    {
3669 	      error ("label %s in the middle of basic block %d",
3670 		     IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3671 		     bb->index);
3672 	      err = 1;
3673 	    }
3674 	}
3675       bsi = bsi_last (bb);
3676       if (bsi_end_p (bsi))
3677 	continue;
3678 
3679       stmt = bsi_stmt (bsi);
3680 
3681       err |= verify_eh_edges (stmt);
3682 
3683       if (is_ctrl_stmt (stmt))
3684 	{
3685 	  FOR_EACH_EDGE (e, ei, bb->succs)
3686 	    if (e->flags & EDGE_FALLTHRU)
3687 	      {
3688 		error ("fallthru edge after a control statement in bb %d",
3689 		       bb->index);
3690 		err = 1;
3691 	      }
3692 	}
3693 
3694       switch (TREE_CODE (stmt))
3695 	{
3696 	case COND_EXPR:
3697 	  {
3698 	    edge true_edge;
3699 	    edge false_edge;
3700 	    if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3701 		|| TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3702 	      {
3703 		error ("structured COND_EXPR at the end of bb %d", bb->index);
3704 		err = 1;
3705 	      }
3706 
3707 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3708 
3709 	    if (!true_edge || !false_edge
3710 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
3711 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
3712 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3713 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3714 		|| EDGE_COUNT (bb->succs) >= 3)
3715 	      {
3716 		error ("wrong outgoing edge flags at end of bb %d",
3717 		       bb->index);
3718 		err = 1;
3719 	      }
3720 
3721 	    if (!has_label_p (true_edge->dest,
3722 			      GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3723 	      {
3724 		error ("%<then%> label does not match edge at end of bb %d",
3725 		       bb->index);
3726 		err = 1;
3727 	      }
3728 
3729 	    if (!has_label_p (false_edge->dest,
3730 			      GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3731 	      {
3732 		error ("%<else%> label does not match edge at end of bb %d",
3733 		       bb->index);
3734 		err = 1;
3735 	      }
3736 	  }
3737 	  break;
3738 
3739 	case GOTO_EXPR:
3740 	  if (simple_goto_p (stmt))
3741 	    {
3742 	      error ("explicit goto at end of bb %d", bb->index);
3743     	      err = 1;
3744 	    }
3745 	  else
3746 	    {
3747 	      /* FIXME.  We should double check that the labels in the
3748 		 destination blocks have their address taken.  */
3749 	      FOR_EACH_EDGE (e, ei, bb->succs)
3750 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3751 				 | EDGE_FALSE_VALUE))
3752 		    || !(e->flags & EDGE_ABNORMAL))
3753 		  {
3754 		    error ("wrong outgoing edge flags at end of bb %d",
3755 			   bb->index);
3756 		    err = 1;
3757 		  }
3758 	    }
3759 	  break;
3760 
3761 	case RETURN_EXPR:
3762 	  if (!single_succ_p (bb)
3763 	      || (single_succ_edge (bb)->flags
3764 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
3765 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3766 	    {
3767 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
3768 	      err = 1;
3769 	    }
3770 	  if (single_succ (bb) != EXIT_BLOCK_PTR)
3771 	    {
3772 	      error ("return edge does not point to exit in bb %d",
3773 		     bb->index);
3774 	      err = 1;
3775 	    }
3776 	  break;
3777 
3778 	case SWITCH_EXPR:
3779 	  {
3780 	    tree prev;
3781 	    edge e;
3782 	    size_t i, n;
3783 	    tree vec;
3784 
3785 	    vec = SWITCH_LABELS (stmt);
3786 	    n = TREE_VEC_LENGTH (vec);
3787 
3788 	    /* Mark all the destination basic blocks.  */
3789 	    for (i = 0; i < n; ++i)
3790 	      {
3791 		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3792 		basic_block label_bb = label_to_block (lab);
3793 
3794 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3795 		label_bb->aux = (void *)1;
3796 	      }
3797 
3798 	    /* Verify that the case labels are sorted.  */
3799 	    prev = TREE_VEC_ELT (vec, 0);
3800 	    for (i = 1; i < n - 1; ++i)
3801 	      {
3802 		tree c = TREE_VEC_ELT (vec, i);
3803 		if (! CASE_LOW (c))
3804 		  {
3805 		    error ("found default case not at end of case vector");
3806 		    err = 1;
3807 		    continue;
3808 		  }
3809 		if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3810 		  {
3811 		    error ("case labels not sorted:");
3812 		    print_generic_expr (stderr, prev, 0);
3813 		    fprintf (stderr," is greater than ");
3814 		    print_generic_expr (stderr, c, 0);
3815 		    fprintf (stderr," but comes before it.\n");
3816 		    err = 1;
3817 		  }
3818 		prev = c;
3819 	      }
3820 	    if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3821 	      {
3822 		error ("no default case found at end of case vector");
3823 		err = 1;
3824 	      }
3825 
3826 	    FOR_EACH_EDGE (e, ei, bb->succs)
3827 	      {
3828 		if (!e->dest->aux)
3829 		  {
3830 		    error ("extra outgoing edge %d->%d",
3831 			   bb->index, e->dest->index);
3832 		    err = 1;
3833 		  }
3834 		e->dest->aux = (void *)2;
3835 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3836 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3837 		  {
3838 		    error ("wrong outgoing edge flags at end of bb %d",
3839 			   bb->index);
3840 		    err = 1;
3841 		  }
3842 	      }
3843 
3844 	    /* Check that we have all of them.  */
3845 	    for (i = 0; i < n; ++i)
3846 	      {
3847 		tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3848 		basic_block label_bb = label_to_block (lab);
3849 
3850 		if (label_bb->aux != (void *)2)
3851 		  {
3852 		    error ("missing edge %i->%i",
3853 			   bb->index, label_bb->index);
3854 		    err = 1;
3855 		  }
3856 	      }
3857 
3858 	    FOR_EACH_EDGE (e, ei, bb->succs)
3859 	      e->dest->aux = (void *)0;
3860 	  }
3861 
3862 	default: ;
3863 	}
3864     }
3865 
3866   if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3867     verify_dominators (CDI_DOMINATORS);
3868 
3869   return err;
3870 }
3871 
3872 
3873 /* Updates phi nodes after creating a forwarder block joined
3874    by edge FALLTHRU.  */
3875 
3876 static void
tree_make_forwarder_block(edge fallthru)3877 tree_make_forwarder_block (edge fallthru)
3878 {
3879   edge e;
3880   edge_iterator ei;
3881   basic_block dummy, bb;
3882   tree phi, new_phi, var;
3883 
3884   dummy = fallthru->src;
3885   bb = fallthru->dest;
3886 
3887   if (single_pred_p (bb))
3888     return;
3889 
3890   /* If we redirected a branch we must create new phi nodes at the
3891      start of BB.  */
3892   for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3893     {
3894       var = PHI_RESULT (phi);
3895       new_phi = create_phi_node (var, bb);
3896       SSA_NAME_DEF_STMT (var) = new_phi;
3897       SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3898       add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3899     }
3900 
3901   /* Ensure that the PHI node chain is in the same order.  */
3902   set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3903 
3904   /* Add the arguments we have stored on edges.  */
3905   FOR_EACH_EDGE (e, ei, bb->preds)
3906     {
3907       if (e == fallthru)
3908 	continue;
3909 
3910       flush_pending_stmts (e);
3911     }
3912 }
3913 
3914 
3915 /* Return a non-special label in the head of basic block BLOCK.
3916    Create one if it doesn't exist.  */
3917 
3918 tree
tree_block_label(basic_block bb)3919 tree_block_label (basic_block bb)
3920 {
3921   block_stmt_iterator i, s = bsi_start (bb);
3922   bool first = true;
3923   tree label, stmt;
3924 
3925   for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3926     {
3927       stmt = bsi_stmt (i);
3928       if (TREE_CODE (stmt) != LABEL_EXPR)
3929 	break;
3930       label = LABEL_EXPR_LABEL (stmt);
3931       if (!DECL_NONLOCAL (label))
3932 	{
3933 	  if (!first)
3934 	    bsi_move_before (&i, &s);
3935 	  return label;
3936 	}
3937     }
3938 
3939   label = create_artificial_label ();
3940   stmt = build1 (LABEL_EXPR, void_type_node, label);
3941   bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3942   return label;
3943 }
3944 
3945 
3946 /* Attempt to perform edge redirection by replacing a possibly complex
3947    jump instruction by a goto or by removing the jump completely.
3948    This can apply only if all edges now point to the same block.  The
3949    parameters and return values are equivalent to
3950    redirect_edge_and_branch.  */
3951 
3952 static edge
tree_try_redirect_by_replacing_jump(edge e,basic_block target)3953 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
3954 {
3955   basic_block src = e->src;
3956   block_stmt_iterator b;
3957   tree stmt;
3958 
3959   /* We can replace or remove a complex jump only when we have exactly
3960      two edges.  */
3961   if (EDGE_COUNT (src->succs) != 2
3962       /* Verify that all targets will be TARGET.  Specifically, the
3963 	 edge that is not E must also go to TARGET.  */
3964       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
3965     return NULL;
3966 
3967   b = bsi_last (src);
3968   if (bsi_end_p (b))
3969     return NULL;
3970   stmt = bsi_stmt (b);
3971 
3972   if (TREE_CODE (stmt) == COND_EXPR
3973       || TREE_CODE (stmt) == SWITCH_EXPR)
3974     {
3975       bsi_remove (&b);
3976       e = ssa_redirect_edge (e, target);
3977       e->flags = EDGE_FALLTHRU;
3978       return e;
3979     }
3980 
3981   return NULL;
3982 }
3983 
3984 
3985 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
3986    edge representing the redirected branch.  */
3987 
3988 static edge
tree_redirect_edge_and_branch(edge e,basic_block dest)3989 tree_redirect_edge_and_branch (edge e, basic_block dest)
3990 {
3991   basic_block bb = e->src;
3992   block_stmt_iterator bsi;
3993   edge ret;
3994   tree label, stmt;
3995 
3996   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3997     return NULL;
3998 
3999   if (e->src != ENTRY_BLOCK_PTR
4000       && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4001     return ret;
4002 
4003   if (e->dest == dest)
4004     return NULL;
4005 
4006   label = tree_block_label (dest);
4007 
4008   bsi = bsi_last (bb);
4009   stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4010 
4011   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4012     {
4013     case COND_EXPR:
4014       stmt = (e->flags & EDGE_TRUE_VALUE
4015 	      ? COND_EXPR_THEN (stmt)
4016 	      : COND_EXPR_ELSE (stmt));
4017       GOTO_DESTINATION (stmt) = label;
4018       break;
4019 
4020     case GOTO_EXPR:
4021       /* No non-abnormal edges should lead from a non-simple goto, and
4022 	 simple ones should be represented implicitly.  */
4023       gcc_unreachable ();
4024 
4025     case SWITCH_EXPR:
4026       {
4027         tree cases = get_cases_for_edge (e, stmt);
4028 
4029 	/* If we have a list of cases associated with E, then use it
4030 	   as it's a lot faster than walking the entire case vector.  */
4031 	if (cases)
4032 	  {
4033 	    edge e2 = find_edge (e->src, dest);
4034 	    tree last, first;
4035 
4036 	    first = cases;
4037 	    while (cases)
4038 	      {
4039 		last = cases;
4040 		CASE_LABEL (cases) = label;
4041 		cases = TREE_CHAIN (cases);
4042 	      }
4043 
4044 	    /* If there was already an edge in the CFG, then we need
4045 	       to move all the cases associated with E to E2.  */
4046 	    if (e2)
4047 	      {
4048 		tree cases2 = get_cases_for_edge (e2, stmt);
4049 
4050 		TREE_CHAIN (last) = TREE_CHAIN (cases2);
4051 		TREE_CHAIN (cases2) = first;
4052 	      }
4053 	  }
4054 	else
4055 	  {
4056 	    tree vec = SWITCH_LABELS (stmt);
4057 	    size_t i, n = TREE_VEC_LENGTH (vec);
4058 
4059 	    for (i = 0; i < n; i++)
4060 	      {
4061 		tree elt = TREE_VEC_ELT (vec, i);
4062 
4063 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
4064 		  CASE_LABEL (elt) = label;
4065 	      }
4066 	  }
4067 
4068 	break;
4069       }
4070 
4071     case RETURN_EXPR:
4072       bsi_remove (&bsi);
4073       e->flags |= EDGE_FALLTHRU;
4074       break;
4075 
4076     default:
4077       /* Otherwise it must be a fallthru edge, and we don't need to
4078 	 do anything besides redirecting it.  */
4079       gcc_assert (e->flags & EDGE_FALLTHRU);
4080       break;
4081     }
4082 
4083   /* Update/insert PHI nodes as necessary.  */
4084 
4085   /* Now update the edges in the CFG.  */
4086   e = ssa_redirect_edge (e, dest);
4087 
4088   return e;
4089 }
4090 
4091 
4092 /* Simple wrapper, as we can always redirect fallthru edges.  */
4093 
4094 static basic_block
tree_redirect_edge_and_branch_force(edge e,basic_block dest)4095 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4096 {
4097   e = tree_redirect_edge_and_branch (e, dest);
4098   gcc_assert (e);
4099 
4100   return NULL;
4101 }
4102 
4103 
4104 /* Splits basic block BB after statement STMT (but at least after the
4105    labels).  If STMT is NULL, BB is split just after the labels.  */
4106 
4107 static basic_block
tree_split_block(basic_block bb,void * stmt)4108 tree_split_block (basic_block bb, void *stmt)
4109 {
4110   block_stmt_iterator bsi, bsi_tgt;
4111   tree act;
4112   basic_block new_bb;
4113   edge e;
4114   edge_iterator ei;
4115 
4116   new_bb = create_empty_bb (bb);
4117 
4118   /* Redirect the outgoing edges.  */
4119   new_bb->succs = bb->succs;
4120   bb->succs = NULL;
4121   FOR_EACH_EDGE (e, ei, new_bb->succs)
4122     e->src = new_bb;
4123 
4124   if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4125     stmt = NULL;
4126 
4127   /* Move everything from BSI to the new basic block.  */
4128   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4129     {
4130       act = bsi_stmt (bsi);
4131       if (TREE_CODE (act) == LABEL_EXPR)
4132 	continue;
4133 
4134       if (!stmt)
4135 	break;
4136 
4137       if (stmt == act)
4138 	{
4139 	  bsi_next (&bsi);
4140 	  break;
4141 	}
4142     }
4143 
4144   bsi_tgt = bsi_start (new_bb);
4145   while (!bsi_end_p (bsi))
4146     {
4147       act = bsi_stmt (bsi);
4148       bsi_remove (&bsi);
4149       bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4150     }
4151 
4152   return new_bb;
4153 }
4154 
4155 
4156 /* Moves basic block BB after block AFTER.  */
4157 
4158 static bool
tree_move_block_after(basic_block bb,basic_block after)4159 tree_move_block_after (basic_block bb, basic_block after)
4160 {
4161   if (bb->prev_bb == after)
4162     return true;
4163 
4164   unlink_block (bb);
4165   link_block (bb, after);
4166 
4167   return true;
4168 }
4169 
4170 
4171 /* Return true if basic_block can be duplicated.  */
4172 
4173 static bool
tree_can_duplicate_bb_p(basic_block bb ATTRIBUTE_UNUSED)4174 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4175 {
4176   return true;
4177 }
4178 
4179 
4180 /* Create a duplicate of the basic block BB.  NOTE: This does not
4181    preserve SSA form.  */
4182 
4183 static basic_block
tree_duplicate_bb(basic_block bb)4184 tree_duplicate_bb (basic_block bb)
4185 {
4186   basic_block new_bb;
4187   block_stmt_iterator bsi, bsi_tgt;
4188   tree phi;
4189 
4190   new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4191 
4192   /* Copy the PHI nodes.  We ignore PHI node arguments here because
4193      the incoming edges have not been setup yet.  */
4194   for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4195     {
4196       tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4197       create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4198     }
4199 
4200   /* Keep the chain of PHI nodes in the same order so that they can be
4201      updated by ssa_redirect_edge.  */
4202   set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4203 
4204   bsi_tgt = bsi_start (new_bb);
4205   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4206     {
4207       def_operand_p def_p;
4208       ssa_op_iter op_iter;
4209       tree stmt, copy;
4210       int region;
4211 
4212       stmt = bsi_stmt (bsi);
4213       if (TREE_CODE (stmt) == LABEL_EXPR)
4214 	continue;
4215 
4216       /* Create a new copy of STMT and duplicate STMT's virtual
4217 	 operands.  */
4218       copy = unshare_expr (stmt);
4219       bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4220       copy_virtual_operands (copy, stmt);
4221       region = lookup_stmt_eh_region (stmt);
4222       if (region >= 0)
4223 	add_stmt_to_eh_region (copy, region);
4224 
4225       /* Create new names for all the definitions created by COPY and
4226 	 add replacement mappings for each new name.  */
4227       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4228 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4229     }
4230 
4231   return new_bb;
4232 }
4233 
4234 
4235 /* Basic block BB_COPY was created by code duplication.  Add phi node
4236    arguments for edges going out of BB_COPY.  The blocks that were
4237    duplicated have BB_DUPLICATED set.  */
4238 
4239 void
add_phi_args_after_copy_bb(basic_block bb_copy)4240 add_phi_args_after_copy_bb (basic_block bb_copy)
4241 {
4242   basic_block bb, dest;
4243   edge e, e_copy;
4244   edge_iterator ei;
4245   tree phi, phi_copy, phi_next, def;
4246 
4247   bb = get_bb_original (bb_copy);
4248 
4249   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4250     {
4251       if (!phi_nodes (e_copy->dest))
4252 	continue;
4253 
4254       if (e_copy->dest->flags & BB_DUPLICATED)
4255 	dest = get_bb_original (e_copy->dest);
4256       else
4257 	dest = e_copy->dest;
4258 
4259       e = find_edge (bb, dest);
4260       if (!e)
4261 	{
4262 	  /* During loop unrolling the target of the latch edge is copied.
4263 	     In this case we are not looking for edge to dest, but to
4264 	     duplicated block whose original was dest.  */
4265 	  FOR_EACH_EDGE (e, ei, bb->succs)
4266 	    if ((e->dest->flags & BB_DUPLICATED)
4267 		&& get_bb_original (e->dest) == dest)
4268 	      break;
4269 
4270 	  gcc_assert (e != NULL);
4271 	}
4272 
4273       for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4274 	   phi;
4275 	   phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4276 	{
4277 	  phi_next = PHI_CHAIN (phi);
4278 	  def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4279 	  add_phi_arg (phi_copy, def, e_copy);
4280 	}
4281     }
4282 }
4283 
4284 /* Blocks in REGION_COPY array of length N_REGION were created by
4285    duplication of basic blocks.  Add phi node arguments for edges
4286    going from these blocks.  */
4287 
4288 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region)4289 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4290 {
4291   unsigned i;
4292 
4293   for (i = 0; i < n_region; i++)
4294     region_copy[i]->flags |= BB_DUPLICATED;
4295 
4296   for (i = 0; i < n_region; i++)
4297     add_phi_args_after_copy_bb (region_copy[i]);
4298 
4299   for (i = 0; i < n_region; i++)
4300     region_copy[i]->flags &= ~BB_DUPLICATED;
4301 }
4302 
4303 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4304    important exit edge EXIT.  By important we mean that no SSA name defined
4305    inside region is live over the other exit edges of the region.  All entry
4306    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
4307    to the duplicate of the region.  SSA form is not updated, but dominance
4308    and loop information is.  The new basic blocks are stored to REGION_COPY
4309    in the same order as they had in REGION, provided that REGION_COPY is not
4310    NULL.  The function returns false if it is unable to copy the region,
4311    true otherwise.  */
4312 
4313 bool
tree_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)4314 tree_duplicate_sese_region (edge entry, edge exit,
4315 			    basic_block *region, unsigned n_region,
4316 			    basic_block *region_copy)
4317 {
4318   unsigned i, n_doms;
4319   bool free_region_copy = false, copying_header = false;
4320   struct loop *loop = entry->dest->loop_father;
4321   edge exit_copy;
4322   basic_block *doms;
4323   edge redirected;
4324   int total_freq = 0, entry_freq = 0;
4325   gcov_type total_count = 0, entry_count = 0;
4326 
4327   if (!can_copy_bbs_p (region, n_region))
4328     return false;
4329 
4330   /* Some sanity checking.  Note that we do not check for all possible
4331      missuses of the functions.  I.e. if you ask to copy something weird,
4332      it will work, but the state of structures probably will not be
4333      correct.  */
4334   for (i = 0; i < n_region; i++)
4335     {
4336       /* We do not handle subloops, i.e. all the blocks must belong to the
4337 	 same loop.  */
4338       if (region[i]->loop_father != loop)
4339 	return false;
4340 
4341       if (region[i] != entry->dest
4342 	  && region[i] == loop->header)
4343 	return false;
4344     }
4345 
4346   loop->copy = loop;
4347 
4348   /* In case the function is used for loop header copying (which is the primary
4349      use), ensure that EXIT and its copy will be new latch and entry edges.  */
4350   if (loop->header == entry->dest)
4351     {
4352       copying_header = true;
4353       loop->copy = loop->outer;
4354 
4355       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4356 	return false;
4357 
4358       for (i = 0; i < n_region; i++)
4359 	if (region[i] != exit->src
4360 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4361 	  return false;
4362     }
4363 
4364   if (!region_copy)
4365     {
4366       region_copy = xmalloc (sizeof (basic_block) * n_region);
4367       free_region_copy = true;
4368     }
4369 
4370   /* gcc_assert (!need_ssa_update_p ()); */
4371 
4372   /* Record blocks outside the region that are dominated by something
4373      inside.  */
4374   doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4375   initialize_original_copy_tables ();
4376 
4377   n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4378 
4379   if (entry->dest->count)
4380     {
4381       total_count = entry->dest->count;
4382       entry_count = entry->count;
4383       /* Fix up corner cases, to avoid division by zero or creation of negative
4384 	 frequencies.  */
4385       if (entry_count > total_count)
4386 	entry_count = total_count;
4387     }
4388   else
4389     {
4390       total_freq = entry->dest->frequency;
4391       entry_freq = EDGE_FREQUENCY (entry);
4392       /* Fix up corner cases, to avoid division by zero or creation of negative
4393 	 frequencies.  */
4394       if (total_freq == 0)
4395 	total_freq = 1;
4396       else if (entry_freq > total_freq)
4397 	entry_freq = total_freq;
4398     }
4399 
4400   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4401 	    split_edge_bb_loc (entry));
4402   if (total_count)
4403     {
4404       scale_bbs_frequencies_gcov_type (region, n_region,
4405 				       total_count - entry_count,
4406 				       total_count);
4407       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4408 	  			       total_count);
4409     }
4410   else
4411     {
4412       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4413 				 total_freq);
4414       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4415     }
4416 
4417   if (copying_header)
4418     {
4419       loop->header = exit->dest;
4420       loop->latch = exit->src;
4421     }
4422 
4423   /* Redirect the entry and add the phi node arguments.  */
4424   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4425   gcc_assert (redirected != NULL);
4426   flush_pending_stmts (entry);
4427 
4428   /* Concerning updating of dominators:  We must recount dominators
4429      for entry block and its copy.  Anything that is outside of the
4430      region, but was dominated by something inside needs recounting as
4431      well.  */
4432   set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4433   doms[n_doms++] = get_bb_original (entry->dest);
4434   iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4435   free (doms);
4436 
4437   /* Add the other PHI node arguments.  */
4438   add_phi_args_after_copy (region_copy, n_region);
4439 
4440   if (free_region_copy)
4441     free (region_copy);
4442 
4443   free_original_copy_tables ();
4444   return true;
4445 }
4446 
4447 
4448 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h)  */
4449 
4450 void
dump_function_to_file(tree fn,FILE * file,int flags)4451 dump_function_to_file (tree fn, FILE *file, int flags)
4452 {
4453   tree arg, vars, var;
4454   bool ignore_topmost_bind = false, any_var = false;
4455   basic_block bb;
4456   tree chain;
4457 
4458   fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4459 
4460   arg = DECL_ARGUMENTS (fn);
4461   while (arg)
4462     {
4463       print_generic_expr (file, arg, dump_flags);
4464       if (TREE_CHAIN (arg))
4465 	fprintf (file, ", ");
4466       arg = TREE_CHAIN (arg);
4467     }
4468   fprintf (file, ")\n");
4469 
4470   if (flags & TDF_DETAILS)
4471     dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4472   if (flags & TDF_RAW)
4473     {
4474       dump_node (fn, TDF_SLIM | flags, file);
4475       return;
4476     }
4477 
4478   /* When GIMPLE is lowered, the variables are no longer available in
4479      BIND_EXPRs, so display them separately.  */
4480   if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4481     {
4482       ignore_topmost_bind = true;
4483 
4484       fprintf (file, "{\n");
4485       for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4486 	{
4487 	  var = TREE_VALUE (vars);
4488 
4489 	  print_generic_decl (file, var, flags);
4490 	  fprintf (file, "\n");
4491 
4492 	  any_var = true;
4493 	}
4494     }
4495 
4496   if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4497     {
4498       /* Make a CFG based dump.  */
4499       check_bb_profile (ENTRY_BLOCK_PTR, file);
4500       if (!ignore_topmost_bind)
4501 	fprintf (file, "{\n");
4502 
4503       if (any_var && n_basic_blocks)
4504 	fprintf (file, "\n");
4505 
4506       FOR_EACH_BB (bb)
4507 	dump_generic_bb (file, bb, 2, flags);
4508 
4509       fprintf (file, "}\n");
4510       check_bb_profile (EXIT_BLOCK_PTR, file);
4511     }
4512   else
4513     {
4514       int indent;
4515 
4516       /* Make a tree based dump.  */
4517       chain = DECL_SAVED_TREE (fn);
4518 
4519       if (TREE_CODE (chain) == BIND_EXPR)
4520 	{
4521 	  if (ignore_topmost_bind)
4522 	    {
4523 	      chain = BIND_EXPR_BODY (chain);
4524 	      indent = 2;
4525 	    }
4526 	  else
4527 	    indent = 0;
4528 	}
4529       else
4530 	{
4531 	  if (!ignore_topmost_bind)
4532 	    fprintf (file, "{\n");
4533 	  indent = 2;
4534 	}
4535 
4536       if (any_var)
4537 	fprintf (file, "\n");
4538 
4539       print_generic_stmt_indented (file, chain, flags, indent);
4540       if (ignore_topmost_bind)
4541 	fprintf (file, "}\n");
4542     }
4543 
4544   fprintf (file, "\n\n");
4545 }
4546 
4547 
4548 /* Pretty print of the loops intermediate representation.  */
4549 static void print_loop (FILE *, struct loop *, int);
4550 static void print_pred_bbs (FILE *, basic_block bb);
4551 static void print_succ_bbs (FILE *, basic_block bb);
4552 
4553 
4554 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
4555 
4556 static void
print_pred_bbs(FILE * file,basic_block bb)4557 print_pred_bbs (FILE *file, basic_block bb)
4558 {
4559   edge e;
4560   edge_iterator ei;
4561 
4562   FOR_EACH_EDGE (e, ei, bb->preds)
4563     fprintf (file, "bb_%d ", e->src->index);
4564 }
4565 
4566 
4567 /* Print on FILE the indexes for the successors of basic_block BB.  */
4568 
4569 static void
print_succ_bbs(FILE * file,basic_block bb)4570 print_succ_bbs (FILE *file, basic_block bb)
4571 {
4572   edge e;
4573   edge_iterator ei;
4574 
4575   FOR_EACH_EDGE (e, ei, bb->succs)
4576     fprintf (file, "bb_%d ", e->dest->index);
4577 }
4578 
4579 
4580 /* Pretty print LOOP on FILE, indented INDENT spaces.  */
4581 
4582 static void
print_loop(FILE * file,struct loop * loop,int indent)4583 print_loop (FILE *file, struct loop *loop, int indent)
4584 {
4585   char *s_indent;
4586   basic_block bb;
4587 
4588   if (loop == NULL)
4589     return;
4590 
4591   s_indent = (char *) alloca ((size_t) indent + 1);
4592   memset ((void *) s_indent, ' ', (size_t) indent);
4593   s_indent[indent] = '\0';
4594 
4595   /* Print the loop's header.  */
4596   fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4597 
4598   /* Print the loop's body.  */
4599   fprintf (file, "%s{\n", s_indent);
4600   FOR_EACH_BB (bb)
4601     if (bb->loop_father == loop)
4602       {
4603 	/* Print the basic_block's header.  */
4604 	fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
4605 	print_pred_bbs (file, bb);
4606 	fprintf (file, "}, succs = {");
4607 	print_succ_bbs (file, bb);
4608 	fprintf (file, "})\n");
4609 
4610 	/* Print the basic_block's body.  */
4611 	fprintf (file, "%s  {\n", s_indent);
4612 	tree_dump_bb (bb, file, indent + 4);
4613 	fprintf (file, "%s  }\n", s_indent);
4614       }
4615 
4616   print_loop (file, loop->inner, indent + 2);
4617   fprintf (file, "%s}\n", s_indent);
4618   print_loop (file, loop->next, indent);
4619 }
4620 
4621 
4622 /* Follow a CFG edge from the entry point of the program, and on entry
4623    of a loop, pretty print the loop structure on FILE.  */
4624 
4625 void
print_loop_ir(FILE * file)4626 print_loop_ir (FILE *file)
4627 {
4628   basic_block bb;
4629 
4630   bb = BASIC_BLOCK (0);
4631   if (bb && bb->loop_father)
4632     print_loop (file, bb->loop_father, 0);
4633 }
4634 
4635 
4636 /* Debugging loops structure at tree level.  */
4637 
4638 void
debug_loop_ir(void)4639 debug_loop_ir (void)
4640 {
4641   print_loop_ir (stderr);
4642 }
4643 
4644 
4645 /* Return true if BB ends with a call, possibly followed by some
4646    instructions that must stay with the call.  Return false,
4647    otherwise.  */
4648 
4649 static bool
tree_block_ends_with_call_p(basic_block bb)4650 tree_block_ends_with_call_p (basic_block bb)
4651 {
4652   block_stmt_iterator bsi = bsi_last (bb);
4653   return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4654 }
4655 
4656 
4657 /* Return true if BB ends with a conditional branch.  Return false,
4658    otherwise.  */
4659 
4660 static bool
tree_block_ends_with_condjump_p(basic_block bb)4661 tree_block_ends_with_condjump_p (basic_block bb)
4662 {
4663   tree stmt = last_stmt (bb);
4664   return (stmt && TREE_CODE (stmt) == COND_EXPR);
4665 }
4666 
4667 
4668 /* Return true if we need to add fake edge to exit at statement T.
4669    Helper function for tree_flow_call_edges_add.  */
4670 
4671 static bool
need_fake_edge_p(tree t)4672 need_fake_edge_p (tree t)
4673 {
4674   tree call;
4675 
4676   /* NORETURN and LONGJMP calls already have an edge to exit.
4677      CONST and PURE calls do not need one.
4678      We don't currently check for CONST and PURE here, although
4679      it would be a good idea, because those attributes are
4680      figured out from the RTL in mark_constant_function, and
4681      the counter incrementation code from -fprofile-arcs
4682      leads to different results from -fbranch-probabilities.  */
4683   call = get_call_expr_in (t);
4684   if (call
4685       && !(call_expr_flags (call) & ECF_NORETURN))
4686     return true;
4687 
4688   if (TREE_CODE (t) == ASM_EXPR
4689        && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4690     return true;
4691 
4692   return false;
4693 }
4694 
4695 
4696 /* Add fake edges to the function exit for any non constant and non
4697    noreturn calls, volatile inline assembly in the bitmap of blocks
4698    specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
4699    the number of blocks that were split.
4700 
4701    The goal is to expose cases in which entering a basic block does
4702    not imply that all subsequent instructions must be executed.  */
4703 
4704 static int
tree_flow_call_edges_add(sbitmap blocks)4705 tree_flow_call_edges_add (sbitmap blocks)
4706 {
4707   int i;
4708   int blocks_split = 0;
4709   int last_bb = last_basic_block;
4710   bool check_last_block = false;
4711 
4712   if (n_basic_blocks == 0)
4713     return 0;
4714 
4715   if (! blocks)
4716     check_last_block = true;
4717   else
4718     check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4719 
4720   /* In the last basic block, before epilogue generation, there will be
4721      a fallthru edge to EXIT.  Special care is required if the last insn
4722      of the last basic block is a call because make_edge folds duplicate
4723      edges, which would result in the fallthru edge also being marked
4724      fake, which would result in the fallthru edge being removed by
4725      remove_fake_edges, which would result in an invalid CFG.
4726 
4727      Moreover, we can't elide the outgoing fake edge, since the block
4728      profiler needs to take this into account in order to solve the minimal
4729      spanning tree in the case that the call doesn't return.
4730 
4731      Handle this by adding a dummy instruction in a new last basic block.  */
4732   if (check_last_block)
4733     {
4734       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4735       block_stmt_iterator bsi = bsi_last (bb);
4736       tree t = NULL_TREE;
4737       if (!bsi_end_p (bsi))
4738 	t = bsi_stmt (bsi);
4739 
4740       if (t && need_fake_edge_p (t))
4741 	{
4742 	  edge e;
4743 
4744 	  e = find_edge (bb, EXIT_BLOCK_PTR);
4745 	  if (e)
4746 	    {
4747 	      bsi_insert_on_edge (e, build_empty_stmt ());
4748 	      bsi_commit_edge_inserts ();
4749 	    }
4750 	}
4751     }
4752 
4753   /* Now add fake edges to the function exit for any non constant
4754      calls since there is no way that we can determine if they will
4755      return or not...  */
4756   for (i = 0; i < last_bb; i++)
4757     {
4758       basic_block bb = BASIC_BLOCK (i);
4759       block_stmt_iterator bsi;
4760       tree stmt, last_stmt;
4761 
4762       if (!bb)
4763 	continue;
4764 
4765       if (blocks && !TEST_BIT (blocks, i))
4766 	continue;
4767 
4768       bsi = bsi_last (bb);
4769       if (!bsi_end_p (bsi))
4770 	{
4771 	  last_stmt = bsi_stmt (bsi);
4772 	  do
4773 	    {
4774 	      stmt = bsi_stmt (bsi);
4775 	      if (need_fake_edge_p (stmt))
4776 		{
4777 		  edge e;
4778 		  /* The handling above of the final block before the
4779 		     epilogue should be enough to verify that there is
4780 		     no edge to the exit block in CFG already.
4781 		     Calling make_edge in such case would cause us to
4782 		     mark that edge as fake and remove it later.  */
4783 #ifdef ENABLE_CHECKING
4784 		  if (stmt == last_stmt)
4785 		    {
4786 		      e = find_edge (bb, EXIT_BLOCK_PTR);
4787 		      gcc_assert (e == NULL);
4788 		    }
4789 #endif
4790 
4791 		  /* Note that the following may create a new basic block
4792 		     and renumber the existing basic blocks.  */
4793 		  if (stmt != last_stmt)
4794 		    {
4795 		      e = split_block (bb, stmt);
4796 		      if (e)
4797 			blocks_split++;
4798 		    }
4799 		  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4800 		}
4801 	      bsi_prev (&bsi);
4802 	    }
4803 	  while (!bsi_end_p (bsi));
4804 	}
4805     }
4806 
4807   if (blocks_split)
4808     verify_flow_info ();
4809 
4810   return blocks_split;
4811 }
4812 
4813 bool
tree_purge_dead_eh_edges(basic_block bb)4814 tree_purge_dead_eh_edges (basic_block bb)
4815 {
4816   bool changed = false;
4817   edge e;
4818   edge_iterator ei;
4819   tree stmt = last_stmt (bb);
4820 
4821   if (stmt && tree_can_throw_internal (stmt))
4822     return false;
4823 
4824   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4825     {
4826       if (e->flags & EDGE_EH)
4827 	{
4828 	  remove_edge (e);
4829 	  changed = true;
4830 	}
4831       else
4832 	ei_next (&ei);
4833     }
4834 
4835   /* Removal of dead EH edges might change dominators of not
4836      just immediate successors.  E.g. when bb1 is changed so that
4837      it no longer can throw and bb1->bb3 and bb1->bb4 are dead
4838      eh edges purged by this function in:
4839            0
4840 	  / \
4841 	 v   v
4842 	 1-->2
4843         / \  |
4844        v   v |
4845        3-->4 |
4846         \    v
4847 	 --->5
4848 	     |
4849 	     -
4850      idom(bb5) must be recomputed.  For now just free the dominance
4851      info.  */
4852   if (changed)
4853     free_dominance_info (CDI_DOMINATORS);
4854 
4855   return changed;
4856 }
4857 
4858 bool
tree_purge_all_dead_eh_edges(bitmap blocks)4859 tree_purge_all_dead_eh_edges (bitmap blocks)
4860 {
4861   bool changed = false;
4862   unsigned i;
4863   bitmap_iterator bi;
4864 
4865   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
4866     {
4867       changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
4868     }
4869 
4870   return changed;
4871 }
4872 
4873 /* This function is called whenever a new edge is created or
4874    redirected.  */
4875 
4876 static void
tree_execute_on_growing_pred(edge e)4877 tree_execute_on_growing_pred (edge e)
4878 {
4879   basic_block bb = e->dest;
4880 
4881   if (phi_nodes (bb))
4882     reserve_phi_args_for_new_edge (bb);
4883 }
4884 
4885 /* This function is called immediately before edge E is removed from
4886    the edge vector E->dest->preds.  */
4887 
4888 static void
tree_execute_on_shrinking_pred(edge e)4889 tree_execute_on_shrinking_pred (edge e)
4890 {
4891   if (phi_nodes (e->dest))
4892     remove_phi_args (e);
4893 }
4894 
4895 /*---------------------------------------------------------------------------
4896   Helper functions for Loop versioning
4897   ---------------------------------------------------------------------------*/
4898 
4899 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
4900    of 'first'. Both of them are dominated by 'new_head' basic block. When
4901    'new_head' was created by 'second's incoming edge it received phi arguments
4902    on the edge by split_edge(). Later, additional edge 'e' was created to
4903    connect 'new_head' and 'first'. Now this routine adds phi args on this
4904    additional edge 'e' that new_head to second edge received as part of edge
4905    splitting.
4906 */
4907 
4908 static void
tree_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)4909 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
4910 				basic_block new_head, edge e)
4911 {
4912   tree phi1, phi2;
4913   edge e2 = find_edge (new_head, second);
4914 
4915   /* Because NEW_HEAD has been created by splitting SECOND's incoming
4916      edge, we should always have an edge from NEW_HEAD to SECOND.  */
4917   gcc_assert (e2 != NULL);
4918 
4919   /* Browse all 'second' basic block phi nodes and add phi args to
4920      edge 'e' for 'first' head. PHI args are always in correct order.  */
4921 
4922   for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
4923        phi2 && phi1;
4924        phi2 = PHI_CHAIN (phi2),  phi1 = PHI_CHAIN (phi1))
4925     {
4926       tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
4927       add_phi_arg (phi1, def, e);
4928     }
4929 }
4930 
4931 /* Adds a if else statement to COND_BB with condition COND_EXPR.
4932    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
4933    the destination of the ELSE part.  */
4934 static void
tree_lv_add_condition_to_bb(basic_block first_head,basic_block second_head,basic_block cond_bb,void * cond_e)4935 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
4936                             basic_block cond_bb, void *cond_e)
4937 {
4938   block_stmt_iterator bsi;
4939   tree goto1 = NULL_TREE;
4940   tree goto2 = NULL_TREE;
4941   tree new_cond_expr = NULL_TREE;
4942   tree cond_expr = (tree) cond_e;
4943   edge e0;
4944 
4945   /* Build new conditional expr */
4946   goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
4947   goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
4948   new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
4949 
4950   /* Add new cond in cond_bb.  */
4951   bsi = bsi_start (cond_bb);
4952   bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
4953   /* Adjust edges appropriately to connect new head with first head
4954      as well as second head.  */
4955   e0 = single_succ_edge (cond_bb);
4956   e0->flags &= ~EDGE_FALLTHRU;
4957   e0->flags |= EDGE_FALSE_VALUE;
4958 }
4959 
4960 struct cfg_hooks tree_cfg_hooks = {
4961   "tree",
4962   tree_verify_flow_info,
4963   tree_dump_bb,			/* dump_bb  */
4964   create_bb,			/* create_basic_block  */
4965   tree_redirect_edge_and_branch,/* redirect_edge_and_branch  */
4966   tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force  */
4967   remove_bb,			/* delete_basic_block  */
4968   tree_split_block,		/* split_block  */
4969   tree_move_block_after,	/* move_block_after  */
4970   tree_can_merge_blocks_p,	/* can_merge_blocks_p  */
4971   tree_merge_blocks,		/* merge_blocks  */
4972   tree_predict_edge,		/* predict_edge  */
4973   tree_predicted_by_p,		/* predicted_by_p  */
4974   tree_can_duplicate_bb_p,	/* can_duplicate_block_p  */
4975   tree_duplicate_bb,		/* duplicate_block  */
4976   tree_split_edge,		/* split_edge  */
4977   tree_make_forwarder_block,	/* make_forward_block  */
4978   NULL,				/* tidy_fallthru_edge  */
4979   tree_block_ends_with_call_p,	/* block_ends_with_call_p */
4980   tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
4981   tree_flow_call_edges_add,     /* flow_call_edges_add */
4982   tree_execute_on_growing_pred,	/* execute_on_growing_pred */
4983   tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
4984   tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
4985   tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4986   tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
4987   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
4988   flush_pending_stmts 		/* flush_pending_stmts */
4989 };
4990 
4991 
4992 /* Split all critical edges.  */
4993 
4994 static void
split_critical_edges(void)4995 split_critical_edges (void)
4996 {
4997   basic_block bb;
4998   edge e;
4999   edge_iterator ei;
5000 
5001   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5002      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
5003      mappings around the calls to split_edge.  */
5004   start_recording_case_labels ();
5005   FOR_ALL_BB (bb)
5006     {
5007       FOR_EACH_EDGE (e, ei, bb->succs)
5008 	if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5009 	  {
5010 	    split_edge (e);
5011 	  }
5012     }
5013   end_recording_case_labels ();
5014 }
5015 
5016 struct tree_opt_pass pass_split_crit_edges =
5017 {
5018   "crited",                          /* name */
5019   NULL,                          /* gate */
5020   split_critical_edges,          /* execute */
5021   NULL,                          /* sub */
5022   NULL,                          /* next */
5023   0,                             /* static_pass_number */
5024   TV_TREE_SPLIT_EDGES,           /* tv_id */
5025   PROP_cfg,                      /* properties required */
5026   PROP_no_crit_edges,            /* properties_provided */
5027   0,                             /* properties_destroyed */
5028   0,                             /* todo_flags_start */
5029   TODO_dump_func,                /* todo_flags_finish */
5030   0                              /* letter */
5031 };
5032 
5033 
5034 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5035    a temporary, make sure and register it to be renamed if necessary,
5036    and finally return the temporary.  Put the statements to compute
5037    EXP before the current statement in BSI.  */
5038 
5039 tree
gimplify_val(block_stmt_iterator * bsi,tree type,tree exp)5040 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5041 {
5042   tree t, new_stmt, orig_stmt;
5043 
5044   if (is_gimple_val (exp))
5045     return exp;
5046 
5047   t = make_rename_temp (type, NULL);
5048   new_stmt = build (MODIFY_EXPR, type, t, exp);
5049 
5050   orig_stmt = bsi_stmt (*bsi);
5051   SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5052   TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5053 
5054   bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5055 
5056   return t;
5057 }
5058 
5059 /* Build a ternary operation and gimplify it.  Emit code before BSI.
5060    Return the gimple_val holding the result.  */
5061 
5062 tree
gimplify_build3(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a,tree b,tree c)5063 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5064 		 tree type, tree a, tree b, tree c)
5065 {
5066   tree ret;
5067 
5068   ret = fold_build3 (code, type, a, b, c);
5069   STRIP_NOPS (ret);
5070 
5071   return gimplify_val (bsi, type, ret);
5072 }
5073 
5074 /* Build a binary operation and gimplify it.  Emit code before BSI.
5075    Return the gimple_val holding the result.  */
5076 
5077 tree
gimplify_build2(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a,tree b)5078 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5079 		 tree type, tree a, tree b)
5080 {
5081   tree ret;
5082 
5083   ret = fold_build2 (code, type, a, b);
5084   STRIP_NOPS (ret);
5085 
5086   return gimplify_val (bsi, type, ret);
5087 }
5088 
5089 /* Build a unary operation and gimplify it.  Emit code before BSI.
5090    Return the gimple_val holding the result.  */
5091 
5092 tree
gimplify_build1(block_stmt_iterator * bsi,enum tree_code code,tree type,tree a)5093 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5094 		 tree a)
5095 {
5096   tree ret;
5097 
5098   ret = fold_build1 (code, type, a);
5099   STRIP_NOPS (ret);
5100 
5101   return gimplify_val (bsi, type, ret);
5102 }
5103 
5104 
5105 
5106 /* Emit return warnings.  */
5107 
5108 static void
execute_warn_function_return(void)5109 execute_warn_function_return (void)
5110 {
5111 #ifdef USE_MAPPED_LOCATION
5112   source_location location;
5113 #else
5114   location_t *locus;
5115 #endif
5116   tree last;
5117   edge e;
5118   edge_iterator ei;
5119 
5120   /* If we have a path to EXIT, then we do return.  */
5121   if (TREE_THIS_VOLATILE (cfun->decl)
5122       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5123     {
5124 #ifdef USE_MAPPED_LOCATION
5125       location = UNKNOWN_LOCATION;
5126 #else
5127       locus = NULL;
5128 #endif
5129       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5130 	{
5131 	  last = last_stmt (e->src);
5132 	  if (TREE_CODE (last) == RETURN_EXPR
5133 #ifdef USE_MAPPED_LOCATION
5134 	      && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5135 #else
5136 	      && (locus = EXPR_LOCUS (last)) != NULL)
5137 #endif
5138 	    break;
5139 	}
5140 #ifdef USE_MAPPED_LOCATION
5141       if (location == UNKNOWN_LOCATION)
5142 	location = cfun->function_end_locus;
5143       warning (0, "%H%<noreturn%> function does return", &location);
5144 #else
5145       if (!locus)
5146 	locus = &cfun->function_end_locus;
5147       warning (0, "%H%<noreturn%> function does return", locus);
5148 #endif
5149     }
5150 
5151   /* If we see "return;" in some basic block, then we do reach the end
5152      without returning a value.  */
5153   else if (warn_return_type
5154 	   && !TREE_NO_WARNING (cfun->decl)
5155 	   && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5156 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5157     {
5158       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5159 	{
5160 	  tree last = last_stmt (e->src);
5161 	  if (TREE_CODE (last) == RETURN_EXPR
5162 	      && TREE_OPERAND (last, 0) == NULL
5163 	      && !TREE_NO_WARNING (last))
5164 	    {
5165 #ifdef USE_MAPPED_LOCATION
5166 	      location = EXPR_LOCATION (last);
5167 	      if (location == UNKNOWN_LOCATION)
5168 		  location = cfun->function_end_locus;
5169 	      warning (0, "%Hcontrol reaches end of non-void function", &location);
5170 #else
5171 	      locus = EXPR_LOCUS (last);
5172 	      if (!locus)
5173 		locus = &cfun->function_end_locus;
5174 	      warning (0, "%Hcontrol reaches end of non-void function", locus);
5175 #endif
5176 	      TREE_NO_WARNING (cfun->decl) = 1;
5177 	      break;
5178 	    }
5179 	}
5180     }
5181 }
5182 
5183 
5184 /* Given a basic block B which ends with a conditional and has
5185    precisely two successors, determine which of the edges is taken if
5186    the conditional is true and which is taken if the conditional is
5187    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
5188 
5189 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)5190 extract_true_false_edges_from_block (basic_block b,
5191 				     edge *true_edge,
5192 				     edge *false_edge)
5193 {
5194   edge e = EDGE_SUCC (b, 0);
5195 
5196   if (e->flags & EDGE_TRUE_VALUE)
5197     {
5198       *true_edge = e;
5199       *false_edge = EDGE_SUCC (b, 1);
5200     }
5201   else
5202     {
5203       *false_edge = e;
5204       *true_edge = EDGE_SUCC (b, 1);
5205     }
5206 }
5207 
5208 struct tree_opt_pass pass_warn_function_return =
5209 {
5210   NULL,					/* name */
5211   NULL,					/* gate */
5212   execute_warn_function_return,		/* execute */
5213   NULL,					/* sub */
5214   NULL,					/* next */
5215   0,					/* static_pass_number */
5216   0,					/* tv_id */
5217   PROP_cfg,				/* properties_required */
5218   0,					/* properties_provided */
5219   0,					/* properties_destroyed */
5220   0,					/* todo_flags_start */
5221   0,					/* todo_flags_finish */
5222   0					/* letter */
5223 };
5224 
5225 /* Emit noreturn warnings.  */
5226 
5227 static void
execute_warn_function_noreturn(void)5228 execute_warn_function_noreturn (void)
5229 {
5230   if (warn_missing_noreturn
5231       && !TREE_THIS_VOLATILE (cfun->decl)
5232       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5233       && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5234     warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5235 	     "for attribute %<noreturn%>",
5236 	     cfun->decl);
5237 }
5238 
5239 struct tree_opt_pass pass_warn_function_noreturn =
5240 {
5241   NULL,					/* name */
5242   NULL,					/* gate */
5243   execute_warn_function_noreturn,	/* execute */
5244   NULL,					/* sub */
5245   NULL,					/* next */
5246   0,					/* static_pass_number */
5247   0,					/* tv_id */
5248   PROP_cfg,				/* properties_required */
5249   0,					/* properties_provided */
5250   0,					/* properties_destroyed */
5251   0,					/* todo_flags_start */
5252   0,					/* todo_flags_finish */
5253   0					/* letter */
5254 };
5255