1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "diagnostic.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "tree-flow.h"
37 #include "tree-dump.h"
38 #include "timevar.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "toplev.h"
42 #include "flags.h"
43 #include "cgraph.h"
44 #include "tree-inline.h"
45 #include "tree-mudflap.h"
46 #include "tree-pass.h"
47 #include "ggc.h"
48 #include "cgraph.h"
49 #include "graph.h"
50 #include "cfgloop.h"
51 #include "except.h"
52
53
54 /* Gate: execute, or not, all of the non-trivial optimizations. */
55
56 static bool
gate_all_optimizations(void)57 gate_all_optimizations (void)
58 {
59 return (optimize >= 1
60 /* Don't bother doing anything if the program has errors. */
61 && !(errorcount || sorrycount));
62 }
63
64 struct tree_opt_pass pass_all_optimizations =
65 {
66 NULL, /* name */
67 gate_all_optimizations, /* gate */
68 NULL, /* execute */
69 NULL, /* sub */
70 NULL, /* next */
71 0, /* static_pass_number */
72 0, /* tv_id */
73 0, /* properties_required */
74 0, /* properties_provided */
75 0, /* properties_destroyed */
76 0, /* todo_flags_start */
77 0, /* todo_flags_finish */
78 0 /* letter */
79 };
80
81 struct tree_opt_pass pass_early_local_passes =
82 {
83 NULL, /* name */
84 gate_all_optimizations, /* gate */
85 NULL, /* execute */
86 NULL, /* sub */
87 NULL, /* next */
88 0, /* static_pass_number */
89 0, /* tv_id */
90 0, /* properties_required */
91 0, /* properties_provided */
92 0, /* properties_destroyed */
93 0, /* todo_flags_start */
94 0, /* todo_flags_finish */
95 0 /* letter */
96 };
97
98 /* Pass: cleanup the CFG just before expanding trees to RTL.
99 This is just a round of label cleanups and case node grouping
100 because after the tree optimizers have run such cleanups may
101 be necessary. */
102
103 static unsigned int
execute_cleanup_cfg_pre_ipa(void)104 execute_cleanup_cfg_pre_ipa (void)
105 {
106 cleanup_tree_cfg ();
107 return 0;
108 }
109
110 struct tree_opt_pass pass_cleanup_cfg =
111 {
112 "cleanup_cfg", /* name */
113 NULL, /* gate */
114 execute_cleanup_cfg_pre_ipa, /* execute */
115 NULL, /* sub */
116 NULL, /* next */
117 0, /* static_pass_number */
118 0, /* tv_id */
119 PROP_cfg, /* properties_required */
120 0, /* properties_provided */
121 0, /* properties_destroyed */
122 0, /* todo_flags_start */
123 TODO_dump_func, /* todo_flags_finish */
124 0 /* letter */
125 };
126
127
128 /* Pass: cleanup the CFG just before expanding trees to RTL.
129 This is just a round of label cleanups and case node grouping
130 because after the tree optimizers have run such cleanups may
131 be necessary. */
132
133 static unsigned int
execute_cleanup_cfg_post_optimizing(void)134 execute_cleanup_cfg_post_optimizing (void)
135 {
136 fold_cond_expr_cond ();
137 cleanup_tree_cfg ();
138 cleanup_dead_labels ();
139 group_case_labels ();
140 return 0;
141 }
142
143 struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
144 {
145 "final_cleanup", /* name */
146 NULL, /* gate */
147 execute_cleanup_cfg_post_optimizing, /* execute */
148 NULL, /* sub */
149 NULL, /* next */
150 0, /* static_pass_number */
151 0, /* tv_id */
152 PROP_cfg, /* properties_required */
153 0, /* properties_provided */
154 0, /* properties_destroyed */
155 0, /* todo_flags_start */
156 TODO_dump_func, /* todo_flags_finish */
157 0 /* letter */
158 };
159
160 /* Pass: do the actions required to finish with tree-ssa optimization
161 passes. */
162
163 static unsigned int
execute_free_datastructures(void)164 execute_free_datastructures (void)
165 {
166 /* ??? This isn't the right place for this. Worse, it got computed
167 more or less at random in various passes. */
168 free_dominance_info (CDI_DOMINATORS);
169 free_dominance_info (CDI_POST_DOMINATORS);
170
171 /* Remove the ssa structures. Do it here since this includes statement
172 annotations that need to be intact during disband_implicit_edges. */
173 delete_tree_ssa ();
174 return 0;
175 }
176
177 struct tree_opt_pass pass_free_datastructures =
178 {
179 NULL, /* name */
180 NULL, /* gate */
181 execute_free_datastructures, /* execute */
182 NULL, /* sub */
183 NULL, /* next */
184 0, /* static_pass_number */
185 0, /* tv_id */
186 PROP_cfg, /* properties_required */
187 0, /* properties_provided */
188 0, /* properties_destroyed */
189 0, /* todo_flags_start */
190 0, /* todo_flags_finish */
191 0 /* letter */
192 };
193 /* Pass: free cfg annotations. */
194
195 static unsigned int
execute_free_cfg_annotations(void)196 execute_free_cfg_annotations (void)
197 {
198 basic_block bb;
199 block_stmt_iterator bsi;
200
201 /* Emit gotos for implicit jumps. */
202 disband_implicit_edges ();
203
204 /* Remove annotations from every tree in the function. */
205 FOR_EACH_BB (bb)
206 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
207 {
208 tree stmt = bsi_stmt (bsi);
209 ggc_free (stmt->common.ann);
210 stmt->common.ann = NULL;
211 }
212
213 /* And get rid of annotations we no longer need. */
214 delete_tree_cfg_annotations ();
215
216 #ifdef ENABLE_CHECKING
217 /* Once the statement annotations have been removed, we can verify
218 the integrity of statements in the EH throw table. */
219 verify_eh_throw_table_statements ();
220 #endif
221 return 0;
222 }
223
224 struct tree_opt_pass pass_free_cfg_annotations =
225 {
226 NULL, /* name */
227 NULL, /* gate */
228 execute_free_cfg_annotations, /* execute */
229 NULL, /* sub */
230 NULL, /* next */
231 0, /* static_pass_number */
232 0, /* tv_id */
233 PROP_cfg, /* properties_required */
234 0, /* properties_provided */
235 0, /* properties_destroyed */
236 0, /* todo_flags_start */
237 0, /* todo_flags_finish */
238 0 /* letter */
239 };
240
241 /* Return true if BB has at least one abnormal outgoing edge. */
242
243 static inline bool
has_abnormal_outgoing_edge_p(basic_block bb)244 has_abnormal_outgoing_edge_p (basic_block bb)
245 {
246 edge e;
247 edge_iterator ei;
248
249 FOR_EACH_EDGE (e, ei, bb->succs)
250 if (e->flags & EDGE_ABNORMAL)
251 return true;
252
253 return false;
254 }
255
256 /* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
257 might have changed some properties, such as marked functions nothrow or
258 added calls that can potentially go to non-local labels. Remove redundant
259 edges and basic blocks, and create new ones if necessary. */
260
261 static unsigned int
execute_fixup_cfg(void)262 execute_fixup_cfg (void)
263 {
264 basic_block bb;
265 block_stmt_iterator bsi;
266
267 if (cfun->eh)
268 FOR_EACH_BB (bb)
269 {
270 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
271 {
272 tree stmt = bsi_stmt (bsi);
273 tree call = get_call_expr_in (stmt);
274
275 if (call && call_expr_flags (call) & (ECF_CONST | ECF_PURE))
276 TREE_SIDE_EFFECTS (call) = 0;
277 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
278 remove_stmt_from_eh_region (stmt);
279 }
280 tree_purge_dead_eh_edges (bb);
281 }
282
283 if (current_function_has_nonlocal_label)
284 FOR_EACH_BB (bb)
285 {
286 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
287 {
288 tree stmt = bsi_stmt (bsi);
289 if (tree_can_make_abnormal_goto (stmt))
290 {
291 if (stmt == bsi_stmt (bsi_last (bb)))
292 {
293 if (!has_abnormal_outgoing_edge_p (bb))
294 make_abnormal_goto_edges (bb, true);
295 }
296 else
297 {
298 edge e = split_block (bb, stmt);
299 bb = e->src;
300 make_abnormal_goto_edges (bb, true);
301 }
302 break;
303 }
304 }
305 }
306
307 cleanup_tree_cfg ();
308
309 /* Dump a textual representation of the flowgraph. */
310 if (dump_file)
311 dump_tree_cfg (dump_file, dump_flags);
312
313 return 0;
314 }
315
316 struct tree_opt_pass pass_fixup_cfg =
317 {
318 "fixupcfg", /* name */
319 NULL, /* gate */
320 execute_fixup_cfg, /* execute */
321 NULL, /* sub */
322 NULL, /* next */
323 0, /* static_pass_number */
324 0, /* tv_id */
325 PROP_cfg, /* properties_required */
326 0, /* properties_provided */
327 0, /* properties_destroyed */
328 0, /* todo_flags_start */
329 0, /* todo_flags_finish */
330 0 /* letter */
331 };
332
333 /* Do the actions required to initialize internal data structures used
334 in tree-ssa optimization passes. */
335
336 static unsigned int
execute_init_datastructures(void)337 execute_init_datastructures (void)
338 {
339 /* Allocate hash tables, arrays and other structures. */
340 init_tree_ssa ();
341 return 0;
342 }
343
344 struct tree_opt_pass pass_init_datastructures =
345 {
346 NULL, /* name */
347 NULL, /* gate */
348 execute_init_datastructures, /* execute */
349 NULL, /* sub */
350 NULL, /* next */
351 0, /* static_pass_number */
352 0, /* tv_id */
353 PROP_cfg, /* properties_required */
354 0, /* properties_provided */
355 0, /* properties_destroyed */
356 0, /* todo_flags_start */
357 0, /* todo_flags_finish */
358 0 /* letter */
359 };
360
361 void
tree_lowering_passes(tree fn)362 tree_lowering_passes (tree fn)
363 {
364 tree saved_current_function_decl = current_function_decl;
365
366 current_function_decl = fn;
367 push_cfun (DECL_STRUCT_FUNCTION (fn));
368 tree_register_cfg_hooks ();
369 bitmap_obstack_initialize (NULL);
370 execute_pass_list (all_lowering_passes);
371 free_dominance_info (CDI_POST_DOMINATORS);
372 compact_blocks ();
373 current_function_decl = saved_current_function_decl;
374 bitmap_obstack_release (NULL);
375 pop_cfun ();
376 }
377
378 /* Update recursively all inlined_to pointers of functions
379 inlined into NODE to INLINED_TO. */
380 static void
update_inlined_to_pointers(struct cgraph_node * node,struct cgraph_node * inlined_to)381 update_inlined_to_pointers (struct cgraph_node *node,
382 struct cgraph_node *inlined_to)
383 {
384 struct cgraph_edge *e;
385 for (e = node->callees; e; e = e->next_callee)
386 {
387 if (e->callee->global.inlined_to)
388 {
389 e->callee->global.inlined_to = inlined_to;
390 update_inlined_to_pointers (e->callee, inlined_to);
391 }
392 }
393 }
394
395
396 /* For functions-as-trees languages, this performs all optimization and
397 compilation for FNDECL. */
398
399 void
tree_rest_of_compilation(tree fndecl)400 tree_rest_of_compilation (tree fndecl)
401 {
402 location_t saved_loc;
403 struct cgraph_node *node;
404
405 timevar_push (TV_EXPAND);
406
407 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
408
409 node = cgraph_node (fndecl);
410
411 /* We might need the body of this function so that we can expand
412 it inline somewhere else. */
413 if (cgraph_preserve_function_body_p (fndecl))
414 save_inline_function_body (node);
415
416 /* Initialize the RTL code for the function. */
417 current_function_decl = fndecl;
418 saved_loc = input_location;
419 input_location = DECL_SOURCE_LOCATION (fndecl);
420 init_function_start (fndecl);
421
422 /* Even though we're inside a function body, we still don't want to
423 call expand_expr to calculate the size of a variable-sized array.
424 We haven't necessarily assigned RTL to all variables yet, so it's
425 not safe to try to expand expressions involving them. */
426 cfun->x_dont_save_pending_sizes_p = 1;
427 cfun->after_inlining = true;
428
429 if (flag_inline_trees)
430 {
431 struct cgraph_edge *e;
432 for (e = node->callees; e; e = e->next_callee)
433 if (!e->inline_failed || warn_inline)
434 break;
435 if (e)
436 {
437 timevar_push (TV_INTEGRATION);
438 optimize_inline_calls (fndecl);
439 timevar_pop (TV_INTEGRATION);
440 }
441 }
442 /* In non-unit-at-a-time we must mark all referenced functions as needed.
443 */
444 if (!flag_unit_at_a_time)
445 {
446 struct cgraph_edge *e;
447 for (e = node->callees; e; e = e->next_callee)
448 if (e->callee->analyzed)
449 cgraph_mark_needed_node (e->callee);
450 }
451
452 /* We are not going to maintain the cgraph edges up to date.
453 Kill it so it won't confuse us. */
454 cgraph_node_remove_callees (node);
455
456
457 /* Initialize the default bitmap obstack. */
458 bitmap_obstack_initialize (NULL);
459 bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/
460
461 tree_register_cfg_hooks ();
462 /* Perform all tree transforms and optimizations. */
463 execute_pass_list (all_passes);
464
465 bitmap_obstack_release (®_obstack);
466
467 /* Release the default bitmap obstack. */
468 bitmap_obstack_release (NULL);
469
470 DECL_SAVED_TREE (fndecl) = NULL;
471 cfun = 0;
472
473 /* If requested, warn about function definitions where the function will
474 return a value (usually of some struct or union type) which itself will
475 take up a lot of stack space. */
476 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
477 {
478 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
479
480 if (ret_type && TYPE_SIZE_UNIT (ret_type)
481 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
482 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
483 larger_than_size))
484 {
485 unsigned int size_as_int
486 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
487
488 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
489 warning (0, "size of return value of %q+D is %u bytes",
490 fndecl, size_as_int);
491 else
492 warning (0, "size of return value of %q+D is larger than %wd bytes",
493 fndecl, larger_than_size);
494 }
495 }
496
497 if (!flag_inline_trees)
498 {
499 DECL_SAVED_TREE (fndecl) = NULL;
500 if (DECL_STRUCT_FUNCTION (fndecl) == 0
501 && !cgraph_node (fndecl)->origin)
502 {
503 /* Stop pointing to the local nodes about to be freed.
504 But DECL_INITIAL must remain nonzero so we know this
505 was an actual function definition.
506 For a nested function, this is done in c_pop_function_context.
507 If rest_of_compilation set this to 0, leave it 0. */
508 if (DECL_INITIAL (fndecl) != 0)
509 DECL_INITIAL (fndecl) = error_mark_node;
510 }
511 }
512
513 input_location = saved_loc;
514
515 ggc_collect ();
516 timevar_pop (TV_EXPAND);
517 }
518