xref: /openbsd/gnu/gcc/gcc/cgraphunit.c (revision 8529ddd3)
1 /* Callgraph based interprocedural optimizations.
2    Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 /* This module implements main driver of compilation process as well as
23    few basic interprocedural optimizers.
24 
25    The main scope of this file is to act as an interface in between
26    tree based frontends and the backend (and middle end)
27 
28    The front-end is supposed to use following functionality:
29 
30     - cgraph_finalize_function
31 
32       This function is called once front-end has parsed whole body of function
33       and it is certain that the function body nor the declaration will change.
34 
35       (There is one exception needed for implementing GCC extern inline function.)
36 
37     - cgraph_varpool_finalize_variable
38 
39       This function has same behavior as the above but is used for static
40       variables.
41 
42     - cgraph_finalize_compilation_unit
43 
44       This function is called once compilation unit is finalized and it will
45       no longer change.
46 
47       In the unit-at-a-time the call-graph construction and local function
48       analysis takes place here.  Bodies of unreachable functions are released
49       to conserve memory usage.
50 
51       ???  The compilation unit in this point of view should be compilation
52       unit as defined by the language - for instance C frontend allows multiple
53       compilation units to be parsed at once and it should call function each
54       time parsing is done so we save memory.
55 
56     - cgraph_optimize
57 
58       In this unit-at-a-time compilation the intra procedural analysis takes
59       place here.  In particular the static functions whose address is never
60       taken are marked as local.  Backend can then use this information to
61       modify calling conventions, do better inlining or similar optimizations.
62 
63     - cgraph_assemble_pending_functions
64     - cgraph_varpool_assemble_pending_variables
65 
66       In non-unit-at-a-time mode these functions can be used to force compilation
67       of functions or variables that are known to be needed at given stage
68       of compilation
69 
70     - cgraph_mark_needed_node
71     - cgraph_varpool_mark_needed_node
72 
73       When function or variable is referenced by some hidden way (for instance
74       via assembly code and marked by attribute "used"), the call-graph data structure
75       must be updated accordingly by this function.
76 
77     - analyze_expr callback
78 
79       This function is responsible for lowering tree nodes not understood by
80       generic code into understandable ones or alternatively marking
81       callgraph and varpool nodes referenced by the as needed.
82 
83       ??? On the tree-ssa genericizing should take place here and we will avoid
84       need for these hooks (replacing them by genericizing hook)
85 
86     - expand_function callback
87 
88       This function is used to expand function and pass it into RTL back-end.
89       Front-end should not make any assumptions about when this function can be
90       called.  In particular cgraph_assemble_pending_functions,
91       cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92       cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93       previously finalized functions to be expanded.
94 
95     We implement two compilation modes.
96 
97       - unit-at-a-time:  In this mode analyzing of all functions is deferred
98 	to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99 
100 	In cgraph_finalize_compilation_unit the reachable functions are
101 	analyzed.  During analysis the call-graph edges from reachable
102 	functions are constructed and their destinations are marked as
103 	reachable.  References to functions and variables are discovered too
104 	and variables found to be needed output to the assembly file.  Via
105 	mark_referenced call in assemble_variable functions referenced by
106 	static variables are noticed too.
107 
108 	The intra-procedural information is produced and its existence
109 	indicated by global_info_ready.  Once this flag is set it is impossible
110 	to change function from !reachable to reachable and thus
111 	assemble_variable no longer call mark_referenced.
112 
113 	Finally the call-graph is topologically sorted and all reachable functions
114 	that has not been completely inlined or are not external are output.
115 
116 	??? It is possible that reference to function or variable is optimized
117 	out.  We can not deal with this nicely because topological order is not
118 	suitable for it.  For tree-ssa we may consider another pass doing
119 	optimization and re-discovering reachable functions.
120 
121 	??? Reorganize code so variables are output very last and only if they
122 	really has been referenced by produced code, so we catch more cases
123 	where reference has been optimized out.
124 
125       - non-unit-at-a-time
126 
127 	All functions are variables are output as early as possible to conserve
128 	memory consumption.  This may or may not result in less memory used but
129 	it is still needed for some legacy code that rely on particular ordering
130 	of things output from the compiler.
131 
132 	Varpool data structures are not used and variables are output directly.
133 
134 	Functions are output early using call of
135 	cgraph_assemble_pending_function from cgraph_finalize_function.  The
136 	decision on whether function is needed is made more conservative so
137 	uninlininable static functions are needed too.  During the call-graph
138 	construction the edge destinations are not marked as reachable and it
139 	is completely relied upn assemble_variable to mark them.  */
140 
141 
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169 
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175 static void cgraph_increase_alignment (void);
176 
177 /* Lists all assembled variables to be sent to debugger output later on.  */
178 static GTY(()) struct cgraph_varpool_node *cgraph_varpool_assembled_nodes_queue;
179 
180 /* Records tree nodes seen in record_reference.  Simply using
181    walk_tree_without_duplicates doesn't guarantee each node is visited
182    once because it gets a new htab upon each recursive call from
183    record_reference itself.  */
184 static struct pointer_set_t *visited_nodes;
185 
186 static FILE *cgraph_dump_file;
187 
188 /* Determine if function DECL is needed.  That is, visible to something
189    either outside this translation unit, something magic in the system
190    configury, or (if not doing unit-at-a-time) to something we havn't
191    seen yet.  */
192 
193 static bool
decide_is_function_needed(struct cgraph_node * node,tree decl)194 decide_is_function_needed (struct cgraph_node *node, tree decl)
195 {
196   tree origin;
197   if (MAIN_NAME_P (DECL_NAME (decl))
198       && TREE_PUBLIC (decl))
199     {
200       node->local.externally_visible = true;
201       return true;
202     }
203 
204   /* If the user told us it is used, then it must be so.  */
205   if (node->local.externally_visible)
206     return true;
207 
208   if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
209     return true;
210 
211   /* ??? If the assembler name is set by hand, it is possible to assemble
212      the name later after finalizing the function and the fact is noticed
213      in assemble_name then.  This is arguably a bug.  */
214   if (DECL_ASSEMBLER_NAME_SET_P (decl)
215       && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
216     return true;
217 
218   /* If we decided it was needed before, but at the time we didn't have
219      the body of the function available, then it's still needed.  We have
220      to go back and re-check its dependencies now.  */
221   if (node->needed)
222     return true;
223 
224   /* Externally visible functions must be output.  The exception is
225      COMDAT functions that must be output only when they are needed.
226 
227      When not optimizing, also output the static functions. (see
228      PR24561), but don't do so for always_inline functions, functions
229      declared inline and nested functions.  These was optimized out
230      in the original implementation and it is unclear whether we want
231      to change the behavior here.  */
232   if (((TREE_PUBLIC (decl)
233 	|| (!optimize && !node->local.disregard_inline_limits
234 	    && !DECL_DECLARED_INLINE_P (decl)
235 	    && !node->origin))
236       && !flag_whole_program)
237       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
238     return true;
239 
240   /* Constructors and destructors are reachable from the runtime by
241      some mechanism.  */
242   if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
243     return true;
244 
245   if (flag_unit_at_a_time)
246     return false;
247 
248   /* If not doing unit at a time, then we'll only defer this function
249      if its marked for inlining.  Otherwise we want to emit it now.  */
250 
251   /* "extern inline" functions are never output locally.  */
252   if (DECL_EXTERNAL (decl))
253     return false;
254   /* Nested functions of extern inline function shall not be emit unless
255      we inlined the origin.  */
256   for (origin = decl_function_context (decl); origin;
257        origin = decl_function_context (origin))
258     if (DECL_EXTERNAL (origin))
259       return false;
260   /* We want to emit COMDAT functions only when absolutely necessary.  */
261   if (DECL_COMDAT (decl))
262     return false;
263   if (!DECL_INLINE (decl)
264       || (!node->local.disregard_inline_limits
265 	  /* When declared inline, defer even the uninlinable functions.
266 	     This allows them to be eliminated when unused.  */
267 	  && !DECL_DECLARED_INLINE_P (decl)
268 	  && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
269     return true;
270 
271   return false;
272 }
273 
274 /* Walk the decls we marked as necessary and see if they reference new
275    variables or functions and add them into the worklists.  */
276 static bool
cgraph_varpool_analyze_pending_decls(void)277 cgraph_varpool_analyze_pending_decls (void)
278 {
279   bool changed = false;
280   timevar_push (TV_CGRAPH);
281 
282   while (cgraph_varpool_first_unanalyzed_node)
283     {
284       tree decl = cgraph_varpool_first_unanalyzed_node->decl;
285 
286       cgraph_varpool_first_unanalyzed_node->analyzed = true;
287 
288       cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
289 
290       /* Compute the alignment early so function body expanders are
291 	 already informed about increased alignment.  */
292       align_variable (decl, 0);
293 
294       if (DECL_INITIAL (decl))
295 	{
296 	  visited_nodes = pointer_set_create ();
297 	  walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
298 	  pointer_set_destroy (visited_nodes);
299 	  visited_nodes = NULL;
300 	}
301       changed = true;
302     }
303   timevar_pop (TV_CGRAPH);
304   return changed;
305 }
306 
307 /* Optimization of function bodies might've rendered some variables as
308    unnecessary so we want to avoid these from being compiled.
309 
310    This is done by pruning the queue and keeping only the variables that
311    really appear needed (ie they are either externally visible or referenced
312    by compiled function). Re-doing the reachability analysis on variables
313    brings back the remaining variables referenced by these.  */
314 static void
cgraph_varpool_remove_unreferenced_decls(void)315 cgraph_varpool_remove_unreferenced_decls (void)
316 {
317   struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
318 
319   cgraph_varpool_reset_queue ();
320 
321   if (errorcount || sorrycount)
322     return;
323 
324   while (node)
325     {
326       tree decl = node->decl;
327       next = node->next_needed;
328       node->needed = 0;
329 
330       if (node->finalized
331 	  && ((DECL_ASSEMBLER_NAME_SET_P (decl)
332 	       && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
333 	      || node->force_output
334 	      || decide_is_variable_needed (node, decl)
335 	      /* ??? Cgraph does not yet rule the world with an iron hand,
336 		 and does not control the emission of debug information.
337 		 After a variable has its DECL_RTL set, we must assume that
338 		 it may be referenced by the debug information, and we can
339 		 no longer elide it.  */
340 	      || DECL_RTL_SET_P (decl)))
341 	cgraph_varpool_mark_needed_node (node);
342 
343       node = next;
344     }
345   /* Make sure we mark alias targets as used targets.  */
346   finish_aliases_1 ();
347   cgraph_varpool_analyze_pending_decls ();
348 }
349 
350 
351 /* When not doing unit-at-a-time, output all functions enqueued.
352    Return true when such a functions were found.  */
353 
354 bool
cgraph_assemble_pending_functions(void)355 cgraph_assemble_pending_functions (void)
356 {
357   bool output = false;
358 
359   if (flag_unit_at_a_time)
360     return false;
361 
362   cgraph_output_pending_asms ();
363 
364   while (cgraph_nodes_queue)
365     {
366       struct cgraph_node *n = cgraph_nodes_queue;
367 
368       cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
369       n->next_needed = NULL;
370       if (!n->global.inlined_to
371 	  && !n->alias
372 	  && !DECL_EXTERNAL (n->decl))
373 	{
374 	  cgraph_expand_function (n);
375 	  output = true;
376 	}
377     }
378 
379   /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
380      the expansion process.  Note that this queue may grow as its
381      being processed, as the new functions may generate new ones.  */
382   while (cgraph_expand_queue)
383     {
384       struct cgraph_node *n = cgraph_expand_queue;
385       cgraph_expand_queue = cgraph_expand_queue->next_needed;
386       n->next_needed = NULL;
387       cgraph_finalize_function (n->decl, false);
388       output = true;
389     }
390 
391   return output;
392 }
393 
394 
395 /* As an GCC extension we allow redefinition of the function.  The
396    semantics when both copies of bodies differ is not well defined.
397    We replace the old body with new body so in unit at a time mode
398    we always use new body, while in normal mode we may end up with
399    old body inlined into some functions and new body expanded and
400    inlined in others.
401 
402    ??? It may make more sense to use one body for inlining and other
403    body for expanding the function but this is difficult to do.  */
404 
405 static void
cgraph_reset_node(struct cgraph_node * node)406 cgraph_reset_node (struct cgraph_node *node)
407 {
408   /* If node->output is set, then this is a unit-at-a-time compilation
409      and we have already begun whole-unit analysis.  This is *not*
410      testing for whether we've already emitted the function.  That
411      case can be sort-of legitimately seen with real function
412      redefinition errors.  I would argue that the front end should
413      never present us with such a case, but don't enforce that for now.  */
414   gcc_assert (!node->output);
415 
416   /* Reset our data structures so we can analyze the function again.  */
417   memset (&node->local, 0, sizeof (node->local));
418   memset (&node->global, 0, sizeof (node->global));
419   memset (&node->rtl, 0, sizeof (node->rtl));
420   node->analyzed = false;
421   node->local.redefined_extern_inline = true;
422   node->local.finalized = false;
423 
424   if (!flag_unit_at_a_time)
425     {
426       struct cgraph_node *n, *next;
427 
428       for (n = cgraph_nodes; n; n = next)
429 	{
430 	  next = n->next;
431 	  if (n->global.inlined_to == node)
432 	    cgraph_remove_node (n);
433 	}
434     }
435 
436   cgraph_node_remove_callees (node);
437 
438   /* We may need to re-queue the node for assembling in case
439      we already proceeded it and ignored as not needed.  */
440   if (node->reachable && !flag_unit_at_a_time)
441     {
442       struct cgraph_node *n;
443 
444       for (n = cgraph_nodes_queue; n; n = n->next_needed)
445 	if (n == node)
446 	  break;
447       if (!n)
448 	node->reachable = 0;
449     }
450 }
451 
452 static void
cgraph_lower_function(struct cgraph_node * node)453 cgraph_lower_function (struct cgraph_node *node)
454 {
455   if (node->lowered)
456     return;
457   tree_lowering_passes (node->decl);
458   node->lowered = true;
459 }
460 
461 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
462    logic in effect.  If NESTED is true, then our caller cannot stand to have
463    the garbage collector run at the moment.  We would need to either create
464    a new GC context, or just not compile right now.  */
465 
466 void
cgraph_finalize_function(tree decl,bool nested)467 cgraph_finalize_function (tree decl, bool nested)
468 {
469   struct cgraph_node *node = cgraph_node (decl);
470 
471   if (node->local.finalized)
472     cgraph_reset_node (node);
473 
474   notice_global_symbol (decl);
475   node->decl = decl;
476   node->local.finalized = true;
477   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
478   if (node->nested)
479     lower_nested_functions (decl);
480   gcc_assert (!node->nested);
481 
482   /* If not unit at a time, then we need to create the call graph
483      now, so that called functions can be queued and emitted now.  */
484   if (!flag_unit_at_a_time)
485     {
486       cgraph_analyze_function (node);
487       cgraph_decide_inlining_incrementally (node, false);
488     }
489 
490   if (decide_is_function_needed (node, decl))
491     cgraph_mark_needed_node (node);
492 
493   /* Since we reclaim unreachable nodes at the end of every language
494      level unit, we need to be conservative about possible entry points
495      there.  */
496   if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
497     cgraph_mark_reachable_node (node);
498 
499   /* If not unit at a time, go ahead and emit everything we've found
500      to be reachable at this time.  */
501   if (!nested)
502     {
503       if (!cgraph_assemble_pending_functions ())
504 	ggc_collect ();
505     }
506 
507   /* If we've not yet emitted decl, tell the debug info about it.  */
508   if (!TREE_ASM_WRITTEN (decl))
509     (*debug_hooks->deferred_inline_function) (decl);
510 
511   /* Possibly warn about unused parameters.  */
512   if (warn_unused_parameter)
513     do_warn_unused_parameter (decl);
514 }
515 
516 /* Walk tree and record all calls.  Called via walk_tree.  */
517 static tree
record_reference(tree * tp,int * walk_subtrees,void * data)518 record_reference (tree *tp, int *walk_subtrees, void *data)
519 {
520   tree t = *tp;
521 
522   switch (TREE_CODE (t))
523     {
524     case VAR_DECL:
525       /* ??? Really, we should mark this decl as *potentially* referenced
526 	 by this function and re-examine whether the decl is actually used
527 	 after rtl has been generated.  */
528       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
529 	{
530 	  cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
531 	  if (lang_hooks.callgraph.analyze_expr)
532 	    return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
533 						      data);
534 	}
535       break;
536 
537     case FDESC_EXPR:
538     case ADDR_EXPR:
539       if (flag_unit_at_a_time)
540 	{
541 	  /* Record dereferences to the functions.  This makes the
542 	     functions reachable unconditionally.  */
543 	  tree decl = TREE_OPERAND (*tp, 0);
544 	  if (TREE_CODE (decl) == FUNCTION_DECL)
545 	    cgraph_mark_needed_node (cgraph_node (decl));
546 	}
547       break;
548 
549     default:
550       /* Save some cycles by not walking types and declaration as we
551 	 won't find anything useful there anyway.  */
552       if (IS_TYPE_OR_DECL_P (*tp))
553 	{
554 	  *walk_subtrees = 0;
555 	  break;
556 	}
557 
558       if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
559 	return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
560       break;
561     }
562 
563   return NULL;
564 }
565 
566 /* Create cgraph edges for function calls inside BODY from NODE.  */
567 
568 static void
cgraph_create_edges(struct cgraph_node * node,tree body)569 cgraph_create_edges (struct cgraph_node *node, tree body)
570 {
571   basic_block bb;
572 
573   struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
574   block_stmt_iterator bsi;
575   tree step;
576   visited_nodes = pointer_set_create ();
577 
578   /* Reach the trees by walking over the CFG, and note the
579      enclosing basic-blocks in the call edges.  */
580   FOR_EACH_BB_FN (bb, this_cfun)
581     for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
582       {
583 	tree stmt = bsi_stmt (bsi);
584 	tree call = get_call_expr_in (stmt);
585 	tree decl;
586 
587 	if (call && (decl = get_callee_fndecl (call)))
588 	  {
589 	    cgraph_create_edge (node, cgraph_node (decl), stmt,
590 				bb->count,
591 				bb->loop_depth);
592 	    walk_tree (&TREE_OPERAND (call, 1),
593 		       record_reference, node, visited_nodes);
594 	    if (TREE_CODE (stmt) == MODIFY_EXPR)
595 	      walk_tree (&TREE_OPERAND (stmt, 0),
596 			 record_reference, node, visited_nodes);
597 	  }
598 	else
599 	  walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
600       }
601 
602   /* Look for initializers of constant variables and private statics.  */
603   for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
604        step;
605        step = TREE_CHAIN (step))
606     {
607       tree decl = TREE_VALUE (step);
608       if (TREE_CODE (decl) == VAR_DECL
609 	  && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
610 	  && flag_unit_at_a_time)
611 	cgraph_varpool_finalize_decl (decl);
612       else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
613 	walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
614     }
615 
616   pointer_set_destroy (visited_nodes);
617   visited_nodes = NULL;
618 }
619 
620 /* Give initial reasons why inlining would fail.  Those gets
621    either NULLified or usually overwritten by more precise reason
622    later.  */
623 static void
initialize_inline_failed(struct cgraph_node * node)624 initialize_inline_failed (struct cgraph_node *node)
625 {
626   struct cgraph_edge *e;
627 
628   for (e = node->callers; e; e = e->next_caller)
629     {
630       gcc_assert (!e->callee->global.inlined_to);
631       gcc_assert (e->inline_failed);
632       if (node->local.redefined_extern_inline)
633 	e->inline_failed = N_("redefined extern inline functions are not "
634 			   "considered for inlining");
635       else if (!node->local.inlinable)
636 	e->inline_failed = N_("function not inlinable");
637       else
638 	e->inline_failed = N_("function not considered for inlining");
639     }
640 }
641 
642 /* Rebuild call edges from current function after a passes not aware
643    of cgraph updating.  */
644 static unsigned int
rebuild_cgraph_edges(void)645 rebuild_cgraph_edges (void)
646 {
647   basic_block bb;
648   struct cgraph_node *node = cgraph_node (current_function_decl);
649   block_stmt_iterator bsi;
650 
651   cgraph_node_remove_callees (node);
652 
653   node->count = ENTRY_BLOCK_PTR->count;
654 
655   FOR_EACH_BB (bb)
656     for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
657       {
658 	tree stmt = bsi_stmt (bsi);
659 	tree call = get_call_expr_in (stmt);
660 	tree decl;
661 
662 	if (call && (decl = get_callee_fndecl (call)))
663 	  cgraph_create_edge (node, cgraph_node (decl), stmt,
664 			      bb->count,
665 			      bb->loop_depth);
666       }
667   initialize_inline_failed (node);
668   gcc_assert (!node->global.inlined_to);
669   return 0;
670 }
671 
672 struct tree_opt_pass pass_rebuild_cgraph_edges =
673 {
674   NULL,					/* name */
675   NULL,					/* gate */
676   rebuild_cgraph_edges,			/* execute */
677   NULL,					/* sub */
678   NULL,					/* next */
679   0,					/* static_pass_number */
680   0,					/* tv_id */
681   PROP_cfg,				/* properties_required */
682   0,					/* properties_provided */
683   0,					/* properties_destroyed */
684   0,					/* todo_flags_start */
685   0,					/* todo_flags_finish */
686   0					/* letter */
687 };
688 
689 /* Verify cgraph nodes of given cgraph node.  */
690 void
verify_cgraph_node(struct cgraph_node * node)691 verify_cgraph_node (struct cgraph_node *node)
692 {
693   struct cgraph_edge *e;
694   struct cgraph_node *main_clone;
695   struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
696   basic_block this_block;
697   block_stmt_iterator bsi;
698   bool error_found = false;
699 
700   if (errorcount || sorrycount)
701     return;
702 
703   timevar_push (TV_CGRAPH_VERIFY);
704   for (e = node->callees; e; e = e->next_callee)
705     if (e->aux)
706       {
707 	error ("aux field set for edge %s->%s",
708 	       cgraph_node_name (e->caller), cgraph_node_name (e->callee));
709 	error_found = true;
710       }
711   if (node->count < 0)
712     {
713       error ("Execution count is negative");
714       error_found = true;
715     }
716   for (e = node->callers; e; e = e->next_caller)
717     {
718       if (e->count < 0)
719 	{
720 	  error ("caller edge count is negative");
721 	  error_found = true;
722 	}
723       if (!e->inline_failed)
724 	{
725 	  if (node->global.inlined_to
726 	      != (e->caller->global.inlined_to
727 		  ? e->caller->global.inlined_to : e->caller))
728 	    {
729 	      error ("inlined_to pointer is wrong");
730 	      error_found = true;
731 	    }
732 	  if (node->callers->next_caller)
733 	    {
734 	      error ("multiple inline callers");
735 	      error_found = true;
736 	    }
737 	}
738       else
739 	if (node->global.inlined_to)
740 	  {
741 	    error ("inlined_to pointer set for noninline callers");
742 	    error_found = true;
743 	  }
744     }
745   if (!node->callers && node->global.inlined_to)
746     {
747       error ("inlined_to pointer is set but no predecessors found");
748       error_found = true;
749     }
750   if (node->global.inlined_to == node)
751     {
752       error ("inlined_to pointer refers to itself");
753       error_found = true;
754     }
755 
756   for (main_clone = cgraph_node (node->decl); main_clone;
757        main_clone = main_clone->next_clone)
758     if (main_clone == node)
759       break;
760   if (!cgraph_node (node->decl))
761     {
762       error ("node not found in cgraph_hash");
763       error_found = true;
764     }
765 
766   if (node->analyzed
767       && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
768       && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
769     {
770       if (this_cfun->cfg)
771 	{
772 	  /* The nodes we're interested in are never shared, so walk
773 	     the tree ignoring duplicates.  */
774 	  visited_nodes = pointer_set_create ();
775 	  /* Reach the trees by walking over the CFG, and note the
776 	     enclosing basic-blocks in the call edges.  */
777 	  FOR_EACH_BB_FN (this_block, this_cfun)
778 	    for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
779 	      {
780 		tree stmt = bsi_stmt (bsi);
781 		tree call = get_call_expr_in (stmt);
782 		tree decl;
783 		if (call && (decl = get_callee_fndecl (call)))
784 		  {
785 		    struct cgraph_edge *e = cgraph_edge (node, stmt);
786 		    if (e)
787 		      {
788 			if (e->aux)
789 			  {
790 			    error ("shared call_stmt:");
791 			    debug_generic_stmt (stmt);
792 			    error_found = true;
793 			  }
794 			if (e->callee->decl != cgraph_node (decl)->decl
795 			    && e->inline_failed)
796 			  {
797 			    error ("edge points to wrong declaration:");
798 			    debug_tree (e->callee->decl);
799 			    fprintf (stderr," Instead of:");
800 			    debug_tree (decl);
801 			  }
802 			e->aux = (void *)1;
803 		      }
804 		    else
805 		      {
806 			error ("missing callgraph edge for call stmt:");
807 			debug_generic_stmt (stmt);
808 			error_found = true;
809 		      }
810 		  }
811 	      }
812 	  pointer_set_destroy (visited_nodes);
813 	  visited_nodes = NULL;
814 	}
815       else
816 	/* No CFG available?!  */
817 	gcc_unreachable ();
818 
819       for (e = node->callees; e; e = e->next_callee)
820 	{
821 	  if (!e->aux)
822 	    {
823 	      error ("edge %s->%s has no corresponding call_stmt",
824 		     cgraph_node_name (e->caller),
825 		     cgraph_node_name (e->callee));
826 	      debug_generic_stmt (e->call_stmt);
827 	      error_found = true;
828 	    }
829 	  e->aux = 0;
830 	}
831     }
832   if (error_found)
833     {
834       dump_cgraph_node (stderr, node);
835       internal_error ("verify_cgraph_node failed");
836     }
837   timevar_pop (TV_CGRAPH_VERIFY);
838 }
839 
840 /* Verify whole cgraph structure.  */
841 void
verify_cgraph(void)842 verify_cgraph (void)
843 {
844   struct cgraph_node *node;
845 
846   if (sorrycount || errorcount)
847     return;
848 
849   for (node = cgraph_nodes; node; node = node->next)
850     verify_cgraph_node (node);
851 }
852 
853 /* Output one variable, if necessary.  Return whether we output it.  */
854 static bool
cgraph_varpool_assemble_decl(struct cgraph_varpool_node * node)855 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
856 {
857   tree decl = node->decl;
858 
859   if (!TREE_ASM_WRITTEN (decl)
860       && !node->alias
861       && !DECL_EXTERNAL (decl)
862       && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
863     {
864       assemble_variable (decl, 0, 1, 0);
865       return TREE_ASM_WRITTEN (decl);
866     }
867 
868   return false;
869 }
870 
871 /* Output all variables enqueued to be assembled.  */
872 bool
cgraph_varpool_assemble_pending_decls(void)873 cgraph_varpool_assemble_pending_decls (void)
874 {
875   bool changed = false;
876 
877   if (errorcount || sorrycount)
878     return false;
879 
880   /* EH might mark decls as needed during expansion.  This should be safe since
881      we don't create references to new function, but it should not be used
882      elsewhere.  */
883   cgraph_varpool_analyze_pending_decls ();
884 
885   while (cgraph_varpool_nodes_queue)
886     {
887       struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
888 
889       cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
890       if (cgraph_varpool_assemble_decl (node))
891 	{
892 	  changed = true;
893 	  node->next_needed = cgraph_varpool_assembled_nodes_queue;
894 	  cgraph_varpool_assembled_nodes_queue = node;
895 	  node->finalized = 1;
896 	}
897       else
898         node->next_needed = NULL;
899     }
900   /* cgraph_varpool_nodes_queue is now empty, clear the pointer to the last
901      element in the queue.  */
902   cgraph_varpool_last_needed_node = NULL;
903   return changed;
904 }
905 /* Output all variables enqueued to be assembled.  */
906 static void
cgraph_varpool_output_debug_info(void)907 cgraph_varpool_output_debug_info (void)
908 {
909   timevar_push (TV_SYMOUT);
910   if (errorcount == 0 && sorrycount == 0)
911     while (cgraph_varpool_assembled_nodes_queue)
912       {
913 	struct cgraph_varpool_node *node = cgraph_varpool_assembled_nodes_queue;
914 
915 	/* Local static variables are never seen by check_global_declarations
916 	   so we need to output debug info by hand.  */
917 	if (DECL_CONTEXT (node->decl)
918 	    && (TREE_CODE (DECL_CONTEXT (node->decl)) == BLOCK
919 		|| TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
920 	    && errorcount == 0 && sorrycount == 0)
921 	     (*debug_hooks->global_decl) (node->decl);
922 	cgraph_varpool_assembled_nodes_queue = node->next_needed;
923 	node->next_needed = 0;
924       }
925   timevar_pop (TV_SYMOUT);
926 }
927 
928 /* Output all asm statements we have stored up to be output.  */
929 
930 static void
cgraph_output_pending_asms(void)931 cgraph_output_pending_asms (void)
932 {
933   struct cgraph_asm_node *can;
934 
935   if (errorcount || sorrycount)
936     return;
937 
938   for (can = cgraph_asm_nodes; can; can = can->next)
939     assemble_asm (can->asm_str);
940   cgraph_asm_nodes = NULL;
941 }
942 
943 /* Analyze the function scheduled to be output.  */
944 void
cgraph_analyze_function(struct cgraph_node * node)945 cgraph_analyze_function (struct cgraph_node *node)
946 {
947   tree decl = node->decl;
948 
949   current_function_decl = decl;
950   push_cfun (DECL_STRUCT_FUNCTION (decl));
951   cgraph_lower_function (node);
952 
953   /* First kill forward declaration so reverse inlining works properly.  */
954   cgraph_create_edges (node, decl);
955 
956   node->local.inlinable = tree_inlinable_function_p (decl);
957   if (!flag_unit_at_a_time)
958     node->local.self_insns = estimate_num_insns (decl);
959   if (node->local.inlinable)
960     node->local.disregard_inline_limits
961       = lang_hooks.tree_inlining.disregard_inline_limits (decl);
962   initialize_inline_failed (node);
963   if (flag_really_no_inline && !node->local.disregard_inline_limits)
964     node->local.inlinable = 0;
965   /* Inlining characteristics are maintained by the cgraph_mark_inline.  */
966   node->global.insns = node->local.self_insns;
967 
968   node->analyzed = true;
969   pop_cfun ();
970   current_function_decl = NULL;
971 }
972 
973 /* Look for externally_visible and used attributes and mark cgraph nodes
974    accordingly.
975 
976    We cannot mark the nodes at the point the attributes are processed (in
977    handle_*_attribute) because the copy of the declarations available at that
978    point may not be canonical.  For example, in:
979 
980     void f();
981     void f() __attribute__((used));
982 
983    the declaration we see in handle_used_attribute will be the second
984    declaration -- but the front end will subsequently merge that declaration
985    with the original declaration and discard the second declaration.
986 
987    Furthermore, we can't mark these nodes in cgraph_finalize_function because:
988 
989     void f() {}
990     void f() __attribute__((externally_visible));
991 
992    is valid.
993 
994    So, we walk the nodes at the end of the translation unit, applying the
995    attributes at that point.  */
996 
997 static void
process_function_and_variable_attributes(struct cgraph_node * first,struct cgraph_varpool_node * first_var)998 process_function_and_variable_attributes (struct cgraph_node *first,
999                                           struct cgraph_varpool_node *first_var)
1000 {
1001   struct cgraph_node *node;
1002   struct cgraph_varpool_node *vnode;
1003 
1004   for (node = cgraph_nodes; node != first; node = node->next)
1005     {
1006       tree decl = node->decl;
1007       if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
1008 	{
1009 	  mark_decl_referenced (decl);
1010 	  if (node->local.finalized)
1011 	     cgraph_mark_needed_node (node);
1012 	}
1013       if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1014 	{
1015 	  if (! TREE_PUBLIC (node->decl))
1016 	    warning (OPT_Wattributes,
1017 		     "%J%<externally_visible%> attribute have effect only on public objects",
1018 		     node->decl);
1019 	  else
1020 	    {
1021 	      if (node->local.finalized)
1022 		cgraph_mark_needed_node (node);
1023 	      node->local.externally_visible = true;
1024 	    }
1025 	}
1026     }
1027   for (vnode = cgraph_varpool_nodes; vnode != first_var; vnode = vnode->next)
1028     {
1029       tree decl = vnode->decl;
1030       if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
1031 	{
1032 	  mark_decl_referenced (decl);
1033 	  if (vnode->finalized)
1034 	    cgraph_varpool_mark_needed_node (vnode);
1035 	}
1036       if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1037 	{
1038 	  if (! TREE_PUBLIC (vnode->decl))
1039 	    warning (OPT_Wattributes,
1040 		     "%J%<externally_visible%> attribute have effect only on public objects",
1041 		     vnode->decl);
1042 	  else
1043 	    {
1044 	      if (vnode->finalized)
1045 		cgraph_varpool_mark_needed_node (vnode);
1046 	      vnode->externally_visible = true;
1047 	    }
1048 	}
1049     }
1050 }
1051 
1052 /* Analyze the whole compilation unit once it is parsed completely.  */
1053 
1054 void
cgraph_finalize_compilation_unit(void)1055 cgraph_finalize_compilation_unit (void)
1056 {
1057   struct cgraph_node *node, *next;
1058   /* Keep track of already processed nodes when called multiple times for
1059      intermodule optimization.  */
1060   static struct cgraph_node *first_analyzed;
1061   struct cgraph_node *first_processed = first_analyzed;
1062   static struct cgraph_varpool_node *first_analyzed_var;
1063 
1064   if (errorcount || sorrycount)
1065     return;
1066 
1067   finish_aliases_1 ();
1068 
1069   if (!flag_unit_at_a_time)
1070     {
1071       cgraph_output_pending_asms ();
1072       cgraph_assemble_pending_functions ();
1073       cgraph_varpool_output_debug_info ();
1074       return;
1075     }
1076 
1077   if (!quiet_flag)
1078     {
1079       fprintf (stderr, "\nAnalyzing compilation unit");
1080       fflush (stderr);
1081     }
1082 
1083   timevar_push (TV_CGRAPH);
1084   process_function_and_variable_attributes (first_processed,
1085 					    first_analyzed_var);
1086   first_processed = cgraph_nodes;
1087   first_analyzed_var = cgraph_varpool_nodes;
1088   cgraph_varpool_analyze_pending_decls ();
1089   if (cgraph_dump_file)
1090     {
1091       fprintf (cgraph_dump_file, "Initial entry points:");
1092       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1093 	if (node->needed && DECL_SAVED_TREE (node->decl))
1094 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1095       fprintf (cgraph_dump_file, "\n");
1096     }
1097 
1098   /* Propagate reachability flag and lower representation of all reachable
1099      functions.  In the future, lowering will introduce new functions and
1100      new entry points on the way (by template instantiation and virtual
1101      method table generation for instance).  */
1102   while (cgraph_nodes_queue)
1103     {
1104       struct cgraph_edge *edge;
1105       tree decl = cgraph_nodes_queue->decl;
1106 
1107       node = cgraph_nodes_queue;
1108       cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1109       node->next_needed = NULL;
1110 
1111       /* ??? It is possible to create extern inline function and later using
1112 	 weak alias attribute to kill its body. See
1113 	 gcc.c-torture/compile/20011119-1.c  */
1114       if (!DECL_SAVED_TREE (decl))
1115 	{
1116 	  cgraph_reset_node (node);
1117 	  continue;
1118 	}
1119 
1120       gcc_assert (!node->analyzed && node->reachable);
1121       gcc_assert (DECL_SAVED_TREE (decl));
1122 
1123       cgraph_analyze_function (node);
1124 
1125       for (edge = node->callees; edge; edge = edge->next_callee)
1126 	if (!edge->callee->reachable)
1127 	  cgraph_mark_reachable_node (edge->callee);
1128 
1129       /* We finalize local static variables during constructing callgraph
1130          edges.  Process their attributes too.  */
1131       process_function_and_variable_attributes (first_processed,
1132 						first_analyzed_var);
1133       first_processed = cgraph_nodes;
1134       first_analyzed_var = cgraph_varpool_nodes;
1135       cgraph_varpool_analyze_pending_decls ();
1136     }
1137 
1138   /* Collect entry points to the unit.  */
1139   if (cgraph_dump_file)
1140     {
1141       fprintf (cgraph_dump_file, "Unit entry points:");
1142       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1143 	if (node->needed && DECL_SAVED_TREE (node->decl))
1144 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1145       fprintf (cgraph_dump_file, "\n\nInitial ");
1146       dump_cgraph (cgraph_dump_file);
1147     }
1148 
1149   if (cgraph_dump_file)
1150     fprintf (cgraph_dump_file, "\nReclaiming functions:");
1151 
1152   for (node = cgraph_nodes; node != first_analyzed; node = next)
1153     {
1154       tree decl = node->decl;
1155       next = node->next;
1156 
1157       if (node->local.finalized && !DECL_SAVED_TREE (decl))
1158 	cgraph_reset_node (node);
1159 
1160       if (!node->reachable && DECL_SAVED_TREE (decl))
1161 	{
1162 	  if (cgraph_dump_file)
1163 	    fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1164 	  cgraph_remove_node (node);
1165 	  continue;
1166 	}
1167       else
1168 	node->next_needed = NULL;
1169       gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1170       gcc_assert (node->analyzed == node->local.finalized);
1171     }
1172   if (cgraph_dump_file)
1173     {
1174       fprintf (cgraph_dump_file, "\n\nReclaimed ");
1175       dump_cgraph (cgraph_dump_file);
1176     }
1177   first_analyzed = cgraph_nodes;
1178   ggc_collect ();
1179   timevar_pop (TV_CGRAPH);
1180 }
1181 /* Figure out what functions we want to assemble.  */
1182 
1183 static void
cgraph_mark_functions_to_output(void)1184 cgraph_mark_functions_to_output (void)
1185 {
1186   struct cgraph_node *node;
1187 
1188   for (node = cgraph_nodes; node; node = node->next)
1189     {
1190       tree decl = node->decl;
1191       struct cgraph_edge *e;
1192 
1193       gcc_assert (!node->output);
1194 
1195       for (e = node->callers; e; e = e->next_caller)
1196 	if (e->inline_failed)
1197 	  break;
1198 
1199       /* We need to output all local functions that are used and not
1200 	 always inlined, as well as those that are reachable from
1201 	 outside the current compilation unit.  */
1202       if (DECL_SAVED_TREE (decl)
1203 	  && !node->global.inlined_to
1204 	  && (node->needed
1205 	      || (e && node->reachable))
1206 	  && !TREE_ASM_WRITTEN (decl)
1207 	  && !DECL_EXTERNAL (decl))
1208 	node->output = 1;
1209       else
1210 	{
1211 	  /* We should've reclaimed all functions that are not needed.  */
1212 #ifdef ENABLE_CHECKING
1213 	  if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1214 	      && !DECL_EXTERNAL (decl))
1215 	    {
1216 	      dump_cgraph_node (stderr, node);
1217 	      internal_error ("failed to reclaim unneeded function");
1218 	    }
1219 #endif
1220 	  gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1221 		      || DECL_EXTERNAL (decl));
1222 
1223 	}
1224 
1225     }
1226 }
1227 
1228 /* Expand function specified by NODE.  */
1229 
1230 static void
cgraph_expand_function(struct cgraph_node * node)1231 cgraph_expand_function (struct cgraph_node *node)
1232 {
1233   tree decl = node->decl;
1234 
1235   /* We ought to not compile any inline clones.  */
1236   gcc_assert (!node->global.inlined_to);
1237 
1238   if (flag_unit_at_a_time)
1239     announce_function (decl);
1240 
1241   cgraph_lower_function (node);
1242 
1243   /* Generate RTL for the body of DECL.  */
1244   lang_hooks.callgraph.expand_function (decl);
1245 
1246   /* Make sure that BE didn't give up on compiling.  */
1247   /* ??? Can happen with nested function of extern inline.  */
1248   gcc_assert (TREE_ASM_WRITTEN (node->decl));
1249 
1250   current_function_decl = NULL;
1251   if (!cgraph_preserve_function_body_p (node->decl))
1252     {
1253       DECL_SAVED_TREE (node->decl) = NULL;
1254       DECL_STRUCT_FUNCTION (node->decl) = NULL;
1255       DECL_INITIAL (node->decl) = error_mark_node;
1256       /* Eliminate all call edges.  This is important so the call_expr no longer
1257 	 points to the dead function body.  */
1258       cgraph_node_remove_callees (node);
1259     }
1260 
1261   cgraph_function_flags_ready = true;
1262 }
1263 
1264 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL.  */
1265 
1266 bool
cgraph_inline_p(struct cgraph_edge * e,const char ** reason)1267 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1268 {
1269   *reason = e->inline_failed;
1270   return !e->inline_failed;
1271 }
1272 
1273 
1274 
1275 /* Expand all functions that must be output.
1276 
1277    Attempt to topologically sort the nodes so function is output when
1278    all called functions are already assembled to allow data to be
1279    propagated across the callgraph.  Use a stack to get smaller distance
1280    between a function and its callees (later we may choose to use a more
1281    sophisticated algorithm for function reordering; we will likely want
1282    to use subsections to make the output functions appear in top-down
1283    order).  */
1284 
1285 static void
cgraph_expand_all_functions(void)1286 cgraph_expand_all_functions (void)
1287 {
1288   struct cgraph_node *node;
1289   struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1290   int order_pos = 0, new_order_pos = 0;
1291   int i;
1292 
1293   order_pos = cgraph_postorder (order);
1294   gcc_assert (order_pos == cgraph_n_nodes);
1295 
1296   /* Garbage collector may remove inline clones we eliminate during
1297      optimization.  So we must be sure to not reference them.  */
1298   for (i = 0; i < order_pos; i++)
1299     if (order[i]->output)
1300       order[new_order_pos++] = order[i];
1301 
1302   for (i = new_order_pos - 1; i >= 0; i--)
1303     {
1304       node = order[i];
1305       if (node->output)
1306 	{
1307 	  gcc_assert (node->reachable);
1308 	  node->output = 0;
1309 	  cgraph_expand_function (node);
1310 	}
1311     }
1312 
1313   free (order);
1314 
1315   /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1316      the expansion process.  Note that this queue may grow as its
1317      being processed, as the new functions may generate new ones.  */
1318   while (cgraph_expand_queue)
1319     {
1320       node = cgraph_expand_queue;
1321       cgraph_expand_queue = cgraph_expand_queue->next_needed;
1322       node->next_needed = NULL;
1323       node->output = 0;
1324       node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1325       cgraph_expand_function (node);
1326     }
1327 }
1328 
1329 /* This is used to sort the node types by the cgraph order number.  */
1330 
1331 struct cgraph_order_sort
1332 {
1333   enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1334   union
1335   {
1336     struct cgraph_node *f;
1337     struct cgraph_varpool_node *v;
1338     struct cgraph_asm_node *a;
1339   } u;
1340 };
1341 
1342 /* Output all functions, variables, and asm statements in the order
1343    according to their order fields, which is the order in which they
1344    appeared in the file.  This implements -fno-toplevel-reorder.  In
1345    this mode we may output functions and variables which don't really
1346    need to be output.  */
1347 
1348 static void
cgraph_output_in_order(void)1349 cgraph_output_in_order (void)
1350 {
1351   int max;
1352   size_t size;
1353   struct cgraph_order_sort *nodes;
1354   int i;
1355   struct cgraph_node *pf;
1356   struct cgraph_varpool_node *pv;
1357   struct cgraph_asm_node *pa;
1358 
1359   max = cgraph_order;
1360   size = max * sizeof (struct cgraph_order_sort);
1361   nodes = (struct cgraph_order_sort *) alloca (size);
1362   memset (nodes, 0, size);
1363 
1364   cgraph_varpool_analyze_pending_decls ();
1365 
1366   for (pf = cgraph_nodes; pf; pf = pf->next)
1367     {
1368       if (pf->output)
1369 	{
1370 	  i = pf->order;
1371 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1372 	  nodes[i].kind = ORDER_FUNCTION;
1373 	  nodes[i].u.f = pf;
1374 	}
1375     }
1376 
1377   for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1378     {
1379       i = pv->order;
1380       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1381       nodes[i].kind = ORDER_VAR;
1382       nodes[i].u.v = pv;
1383     }
1384 
1385   for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1386     {
1387       i = pa->order;
1388       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1389       nodes[i].kind = ORDER_ASM;
1390       nodes[i].u.a = pa;
1391     }
1392 
1393   for (i = 0; i < max; ++i)
1394     {
1395       switch (nodes[i].kind)
1396 	{
1397 	case ORDER_FUNCTION:
1398 	  nodes[i].u.f->output = 0;
1399 	  cgraph_expand_function (nodes[i].u.f);
1400 	  break;
1401 
1402 	case ORDER_VAR:
1403 	  cgraph_varpool_assemble_decl (nodes[i].u.v);
1404 	  break;
1405 
1406 	case ORDER_ASM:
1407 	  assemble_asm (nodes[i].u.a->asm_str);
1408 	  break;
1409 
1410 	case ORDER_UNDEFINED:
1411 	  break;
1412 
1413 	default:
1414 	  gcc_unreachable ();
1415 	}
1416     }
1417 
1418   cgraph_asm_nodes = NULL;
1419 }
1420 
1421 /* Mark visibility of all functions.
1422 
1423    A local function is one whose calls can occur only in the current
1424    compilation unit and all its calls are explicit, so we can change
1425    its calling convention.  We simply mark all static functions whose
1426    address is not taken as local.
1427 
1428    We also change the TREE_PUBLIC flag of all declarations that are public
1429    in language point of view but we want to overwrite this default
1430    via visibilities for the backend point of view.  */
1431 
1432 static void
cgraph_function_and_variable_visibility(void)1433 cgraph_function_and_variable_visibility (void)
1434 {
1435   struct cgraph_node *node;
1436   struct cgraph_varpool_node *vnode;
1437 
1438   for (node = cgraph_nodes; node; node = node->next)
1439     {
1440       if (node->reachable
1441 	  && (DECL_COMDAT (node->decl)
1442 	      || (!flag_whole_program
1443 		  && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1444 	node->local.externally_visible = true;
1445       if (!node->local.externally_visible && node->analyzed
1446 	  && !DECL_EXTERNAL (node->decl))
1447 	{
1448 	  gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1449 	  TREE_PUBLIC (node->decl) = 0;
1450 	}
1451       node->local.local = (!node->needed
1452 			   && node->analyzed
1453 			   && !DECL_EXTERNAL (node->decl)
1454 			   && !node->local.externally_visible);
1455     }
1456   for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1457     {
1458       if (vnode->needed
1459 	  && !flag_whole_program
1460 	  && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1461 	vnode->externally_visible = 1;
1462       if (!vnode->externally_visible)
1463 	{
1464 	  gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1465 	  TREE_PUBLIC (vnode->decl) = 0;
1466 	}
1467      gcc_assert (TREE_STATIC (vnode->decl));
1468     }
1469 
1470   /* Because we have to be conservative on the boundaries of source
1471      level units, it is possible that we marked some functions in
1472      reachable just because they might be used later via external
1473      linkage, but after making them local they are really unreachable
1474      now.  */
1475   cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1476 
1477   if (cgraph_dump_file)
1478     {
1479       fprintf (cgraph_dump_file, "\nMarking local functions:");
1480       for (node = cgraph_nodes; node; node = node->next)
1481 	if (node->local.local)
1482 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1483       fprintf (cgraph_dump_file, "\n\n");
1484       fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1485       for (node = cgraph_nodes; node; node = node->next)
1486 	if (node->local.externally_visible)
1487 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1488       fprintf (cgraph_dump_file, "\n\n");
1489     }
1490   cgraph_function_flags_ready = true;
1491 }
1492 
1493 /* Return true when function body of DECL still needs to be kept around
1494    for later re-use.  */
1495 bool
cgraph_preserve_function_body_p(tree decl)1496 cgraph_preserve_function_body_p (tree decl)
1497 {
1498   struct cgraph_node *node;
1499   if (!cgraph_global_info_ready)
1500     return (flag_really_no_inline
1501 	    ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1502 	    : DECL_INLINE (decl));
1503   /* Look if there is any clone around.  */
1504   for (node = cgraph_node (decl); node; node = node->next_clone)
1505     if (node->global.inlined_to)
1506       return true;
1507   return false;
1508 }
1509 
1510 static void
ipa_passes(void)1511 ipa_passes (void)
1512 {
1513   cfun = NULL;
1514   tree_register_cfg_hooks ();
1515   bitmap_obstack_initialize (NULL);
1516   execute_ipa_pass_list (all_ipa_passes);
1517   bitmap_obstack_release (NULL);
1518 }
1519 
1520 /* Perform simple optimizations based on callgraph.  */
1521 
1522 void
cgraph_optimize(void)1523 cgraph_optimize (void)
1524 {
1525   if (errorcount || sorrycount)
1526     return;
1527 
1528 #ifdef ENABLE_CHECKING
1529   verify_cgraph ();
1530 #endif
1531   if (!flag_unit_at_a_time)
1532     {
1533       cgraph_output_pending_asms ();
1534       cgraph_varpool_assemble_pending_decls ();
1535       cgraph_varpool_output_debug_info ();
1536       return;
1537     }
1538 
1539   /* Frontend may output common variables after the unit has been finalized.
1540      It is safe to deal with them here as they are always zero initialized.  */
1541   cgraph_varpool_analyze_pending_decls ();
1542 
1543   timevar_push (TV_CGRAPHOPT);
1544   if (!quiet_flag)
1545     fprintf (stderr, "Performing interprocedural optimizations\n");
1546 
1547   cgraph_function_and_variable_visibility ();
1548   if (cgraph_dump_file)
1549     {
1550       fprintf (cgraph_dump_file, "Marked ");
1551       dump_cgraph (cgraph_dump_file);
1552     }
1553 
1554   /* Don't run the IPA passes if there was any error or sorry messages.  */
1555   if (errorcount == 0 && sorrycount == 0)
1556     ipa_passes ();
1557 
1558   /* This pass remove bodies of extern inline functions we never inlined.
1559      Do this later so other IPA passes see what is really going on.  */
1560   cgraph_remove_unreachable_nodes (false, dump_file);
1561   cgraph_increase_alignment ();
1562   cgraph_global_info_ready = true;
1563   if (cgraph_dump_file)
1564     {
1565       fprintf (cgraph_dump_file, "Optimized ");
1566       dump_cgraph (cgraph_dump_file);
1567       dump_varpool (cgraph_dump_file);
1568     }
1569   timevar_pop (TV_CGRAPHOPT);
1570 
1571   /* Output everything.  */
1572   if (!quiet_flag)
1573     fprintf (stderr, "Assembling functions:\n");
1574 #ifdef ENABLE_CHECKING
1575   verify_cgraph ();
1576 #endif
1577 
1578   cgraph_mark_functions_to_output ();
1579 
1580   if (!flag_toplevel_reorder)
1581     cgraph_output_in_order ();
1582   else
1583     {
1584       cgraph_output_pending_asms ();
1585 
1586       cgraph_expand_all_functions ();
1587       cgraph_varpool_remove_unreferenced_decls ();
1588 
1589       cgraph_varpool_assemble_pending_decls ();
1590       cgraph_varpool_output_debug_info ();
1591     }
1592 
1593   if (cgraph_dump_file)
1594     {
1595       fprintf (cgraph_dump_file, "\nFinal ");
1596       dump_cgraph (cgraph_dump_file);
1597     }
1598 #ifdef ENABLE_CHECKING
1599   verify_cgraph ();
1600   /* Double check that all inline clones are gone and that all
1601      function bodies have been released from memory.  */
1602   if (flag_unit_at_a_time
1603       && !(sorrycount || errorcount))
1604     {
1605       struct cgraph_node *node;
1606       bool error_found = false;
1607 
1608       for (node = cgraph_nodes; node; node = node->next)
1609 	if (node->analyzed
1610 	    && (node->global.inlined_to
1611 		|| DECL_SAVED_TREE (node->decl)))
1612 	  {
1613 	    error_found = true;
1614 	    dump_cgraph_node (stderr, node);
1615 	  }
1616       if (error_found)
1617 	internal_error ("nodes with no released memory found");
1618     }
1619 #endif
1620 }
1621 
1622 /* Increase alignment of global arrays to improve vectorization potential.
1623    TODO:
1624    - Consider also structs that have an array field.
1625    - Use ipa analysis to prune arrays that can't be vectorized?
1626      This should involve global alignment analysis and in the future also
1627      array padding.  */
1628 
1629 static void
cgraph_increase_alignment(void)1630 cgraph_increase_alignment (void)
1631 {
1632   if (flag_section_anchors && flag_tree_vectorize)
1633     {
1634       struct cgraph_varpool_node *vnode;
1635 
1636       /* Increase the alignment of all global arrays for vectorization.  */
1637       for (vnode = cgraph_varpool_nodes_queue;
1638            vnode;
1639            vnode = vnode->next_needed)
1640         {
1641           tree vectype, decl = vnode->decl;
1642           unsigned int alignment;
1643 
1644           if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1645             continue;
1646           vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1647           if (!vectype)
1648             continue;
1649           alignment = TYPE_ALIGN (vectype);
1650           if (DECL_ALIGN (decl) >= alignment)
1651             continue;
1652 
1653           if (vect_can_force_dr_alignment_p (decl, alignment))
1654             {
1655               DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1656               DECL_USER_ALIGN (decl) = 1;
1657               if (cgraph_dump_file)
1658                 {
1659                   fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1660                   print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1661                 }
1662             }
1663         }
1664     }
1665 }
1666 
1667 /* Generate and emit a static constructor or destructor.  WHICH must be
1668    one of 'I' or 'D'.  BODY should be a STATEMENT_LIST containing
1669    GENERIC statements.  */
1670 
1671 void
cgraph_build_static_cdtor(char which,tree body,int priority)1672 cgraph_build_static_cdtor (char which, tree body, int priority)
1673 {
1674   static int counter = 0;
1675   char which_buf[16];
1676   tree decl, name, resdecl;
1677 
1678   sprintf (which_buf, "%c_%d", which, counter++);
1679   name = get_file_function_name_long (which_buf);
1680 
1681   decl = build_decl (FUNCTION_DECL, name,
1682 		     build_function_type (void_type_node, void_list_node));
1683   current_function_decl = decl;
1684 
1685   resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1686   DECL_ARTIFICIAL (resdecl) = 1;
1687   DECL_IGNORED_P (resdecl) = 1;
1688   DECL_RESULT (decl) = resdecl;
1689 
1690   allocate_struct_function (decl);
1691 
1692   TREE_STATIC (decl) = 1;
1693   TREE_USED (decl) = 1;
1694   DECL_ARTIFICIAL (decl) = 1;
1695   DECL_IGNORED_P (decl) = 1;
1696   DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1697   DECL_SAVED_TREE (decl) = body;
1698   TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1699   DECL_UNINLINABLE (decl) = 1;
1700 
1701   DECL_INITIAL (decl) = make_node (BLOCK);
1702   TREE_USED (DECL_INITIAL (decl)) = 1;
1703 
1704   DECL_SOURCE_LOCATION (decl) = input_location;
1705   cfun->function_end_locus = input_location;
1706 
1707   switch (which)
1708     {
1709     case 'I':
1710       DECL_STATIC_CONSTRUCTOR (decl) = 1;
1711       break;
1712     case 'D':
1713       DECL_STATIC_DESTRUCTOR (decl) = 1;
1714       break;
1715     default:
1716       gcc_unreachable ();
1717     }
1718 
1719   gimplify_function_tree (decl);
1720 
1721   /* ??? We will get called LATE in the compilation process.  */
1722   if (cgraph_global_info_ready)
1723     {
1724       tree_lowering_passes (decl);
1725       tree_rest_of_compilation (decl);
1726     }
1727   else
1728     cgraph_finalize_function (decl, 0);
1729 
1730   if (targetm.have_ctors_dtors)
1731     {
1732       void (*fn) (rtx, int);
1733 
1734       if (which == 'I')
1735 	fn = targetm.asm_out.constructor;
1736       else
1737 	fn = targetm.asm_out.destructor;
1738       fn (XEXP (DECL_RTL (decl), 0), priority);
1739     }
1740 }
1741 
1742 void
init_cgraph(void)1743 init_cgraph (void)
1744 {
1745   cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1746 }
1747 
1748 /* The edges representing the callers of the NEW_VERSION node were
1749    fixed by cgraph_function_versioning (), now the call_expr in their
1750    respective tree code should be updated to call the NEW_VERSION.  */
1751 
1752 static void
update_call_expr(struct cgraph_node * new_version)1753 update_call_expr (struct cgraph_node *new_version)
1754 {
1755   struct cgraph_edge *e;
1756 
1757   gcc_assert (new_version);
1758   for (e = new_version->callers; e; e = e->next_caller)
1759     /* Update the call expr on the edges
1760        to call the new version.  */
1761     TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1762 }
1763 
1764 
1765 /* Create a new cgraph node which is the new version of
1766    OLD_VERSION node.  REDIRECT_CALLERS holds the callers
1767    edges which should be redirected to point to
1768    NEW_VERSION.  ALL the callees edges of OLD_VERSION
1769    are cloned to the new version node.  Return the new
1770    version node.  */
1771 
1772 static struct cgraph_node *
cgraph_copy_node_for_versioning(struct cgraph_node * old_version,tree new_decl,VEC (cgraph_edge_p,heap)* redirect_callers)1773 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1774 				 tree new_decl,
1775 				 VEC(cgraph_edge_p,heap) *redirect_callers)
1776  {
1777    struct cgraph_node *new_version;
1778    struct cgraph_edge *e, *new_e;
1779    struct cgraph_edge *next_callee;
1780    unsigned i;
1781 
1782    gcc_assert (old_version);
1783 
1784    new_version = cgraph_node (new_decl);
1785 
1786    new_version->analyzed = true;
1787    new_version->local = old_version->local;
1788    new_version->global = old_version->global;
1789    new_version->rtl = new_version->rtl;
1790    new_version->reachable = true;
1791    new_version->count = old_version->count;
1792 
1793    /* Clone the old node callees.  Recursive calls are
1794       also cloned.  */
1795    for (e = old_version->callees;e; e=e->next_callee)
1796      {
1797        new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1798        new_e->count = e->count;
1799      }
1800    /* Fix recursive calls.
1801       If OLD_VERSION has a recursive call after the
1802       previous edge cloning, the new version will have an edge
1803       pointing to the old version, which is wrong;
1804       Redirect it to point to the new version. */
1805    for (e = new_version->callees ; e; e = next_callee)
1806      {
1807        next_callee = e->next_callee;
1808        if (e->callee == old_version)
1809 	 cgraph_redirect_edge_callee (e, new_version);
1810 
1811        if (!next_callee)
1812 	 break;
1813      }
1814    for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1815      {
1816        /* Redirect calls to the old version node to point to its new
1817 	  version.  */
1818        cgraph_redirect_edge_callee (e, new_version);
1819      }
1820 
1821    return new_version;
1822  }
1823 
1824  /* Perform function versioning.
1825     Function versioning includes copying of the tree and
1826     a callgraph update (creating a new cgraph node and updating
1827     its callees and callers).
1828 
1829     REDIRECT_CALLERS varray includes the edges to be redirected
1830     to the new version.
1831 
1832     TREE_MAP is a mapping of tree nodes we want to replace with
1833     new ones (according to results of prior analysis).
1834     OLD_VERSION_NODE is the node that is versioned.
1835     It returns the new version's cgraph node.  */
1836 
1837 struct cgraph_node *
cgraph_function_versioning(struct cgraph_node * old_version_node,VEC (cgraph_edge_p,heap)* redirect_callers,varray_type tree_map)1838 cgraph_function_versioning (struct cgraph_node *old_version_node,
1839 			    VEC(cgraph_edge_p,heap) *redirect_callers,
1840 			    varray_type tree_map)
1841 {
1842   tree old_decl = old_version_node->decl;
1843   struct cgraph_node *new_version_node = NULL;
1844   tree new_decl;
1845 
1846   if (!tree_versionable_function_p (old_decl))
1847     return NULL;
1848 
1849   /* Make a new FUNCTION_DECL tree node for the
1850      new version. */
1851   new_decl = copy_node (old_decl);
1852 
1853   /* Create the new version's call-graph node.
1854      and update the edges of the new node. */
1855   new_version_node =
1856     cgraph_copy_node_for_versioning (old_version_node, new_decl,
1857 				     redirect_callers);
1858 
1859   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
1860   tree_function_versioning (old_decl, new_decl, tree_map, false);
1861   /* Update the call_expr on the edges to call the new version node. */
1862   update_call_expr (new_version_node);
1863 
1864   /* Update the new version's properties.
1865      Make The new version visible only within this translation unit.
1866      ??? We cannot use COMDAT linkage because there is no
1867      ABI support for this.  */
1868   DECL_EXTERNAL (new_version_node->decl) = 0;
1869   DECL_ONE_ONLY (new_version_node->decl) = 0;
1870   TREE_PUBLIC (new_version_node->decl) = 0;
1871   DECL_COMDAT (new_version_node->decl) = 0;
1872   new_version_node->local.externally_visible = 0;
1873   new_version_node->local.local = 1;
1874   new_version_node->lowered = true;
1875   return new_version_node;
1876 }
1877 
1878 /* Produce separate function body for inline clones so the offline copy can be
1879    modified without affecting them.  */
1880 struct cgraph_node *
save_inline_function_body(struct cgraph_node * node)1881 save_inline_function_body (struct cgraph_node *node)
1882 {
1883   struct cgraph_node *first_clone;
1884 
1885   gcc_assert (node == cgraph_node (node->decl));
1886 
1887   cgraph_lower_function (node);
1888 
1889   /* In non-unit-at-a-time we construct full fledged clone we never output to
1890      assembly file.  This clone is pointed out by inline_decl of original function
1891      and inlining infrastructure knows how to deal with this.  */
1892   if (!flag_unit_at_a_time)
1893     {
1894       struct cgraph_edge *e;
1895 
1896       first_clone = cgraph_clone_node (node, node->count, 0, false);
1897       first_clone->needed = 0;
1898       first_clone->reachable = 1;
1899       /* Recursively clone all bodies.  */
1900       for (e = first_clone->callees; e; e = e->next_callee)
1901 	if (!e->inline_failed)
1902 	  cgraph_clone_inlined_nodes (e, true, false);
1903     }
1904   else
1905     first_clone = node->next_clone;
1906 
1907   first_clone->decl = copy_node (node->decl);
1908   node->next_clone = NULL;
1909   if (!flag_unit_at_a_time)
1910     node->inline_decl = first_clone->decl;
1911   first_clone->prev_clone = NULL;
1912   cgraph_insert_node_to_hashtable (first_clone);
1913   gcc_assert (first_clone == cgraph_node (first_clone->decl));
1914 
1915   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
1916   tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1917 
1918   DECL_EXTERNAL (first_clone->decl) = 0;
1919   DECL_ONE_ONLY (first_clone->decl) = 0;
1920   TREE_PUBLIC (first_clone->decl) = 0;
1921   DECL_COMDAT (first_clone->decl) = 0;
1922 
1923   for (node = first_clone->next_clone; node; node = node->next_clone)
1924     node->decl = first_clone->decl;
1925 #ifdef ENABLE_CHECKING
1926   verify_cgraph_node (first_clone);
1927 #endif
1928   return first_clone;
1929 }
1930 
1931 #include "gt-cgraphunit.h"
1932