1 /* Driver of optimization process
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "tree-chkp.h"
206 #include "lto-section-names.h"
207 #include "stringpool.h"
208 #include "attribs.h"
209
210 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
211 secondary queue used during optimization to accommodate passes that
212 may generate new functions that need to be optimized and expanded. */
213 vec<cgraph_node *> cgraph_new_nodes;
214
215 static void expand_all_functions (void);
216 static void mark_functions_to_output (void);
217 static void handle_alias_pairs (void);
218
219 /* Used for vtable lookup in thunk adjusting. */
220 static GTY (()) tree vtable_entry_type;
221
222 /* Return true if this symbol is a function from the C frontend specified
223 directly in RTL form (with "__RTL"). */
224
225 bool
native_rtl_p()226 symtab_node::native_rtl_p () const
227 {
228 if (TREE_CODE (decl) != FUNCTION_DECL)
229 return false;
230 if (!DECL_STRUCT_FUNCTION (decl))
231 return false;
232 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
233 }
234
235 /* Determine if symbol declaration is needed. That is, visible to something
236 either outside this translation unit, something magic in the system
237 configury */
238 bool
needed_p(void)239 symtab_node::needed_p (void)
240 {
241 /* Double check that no one output the function into assembly file
242 early. */
243 if (!native_rtl_p ())
244 gcc_checking_assert
245 (!DECL_ASSEMBLER_NAME_SET_P (decl)
246 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
247
248 if (!definition)
249 return false;
250
251 if (DECL_EXTERNAL (decl))
252 return false;
253
254 /* If the user told us it is used, then it must be so. */
255 if (force_output)
256 return true;
257
258 /* ABI forced symbols are needed when they are external. */
259 if (forced_by_abi && TREE_PUBLIC (decl))
260 return true;
261
262 /* Keep constructors, destructors and virtual functions. */
263 if (TREE_CODE (decl) == FUNCTION_DECL
264 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
265 return true;
266
267 /* Externally visible variables must be output. The exception is
268 COMDAT variables that must be output only when they are needed. */
269 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
270 return true;
271
272 return false;
273 }
274
275 /* Head and terminator of the queue of nodes to be processed while building
276 callgraph. */
277
278 static symtab_node symtab_terminator;
279 static symtab_node *queued_nodes = &symtab_terminator;
280
281 /* Add NODE to queue starting at QUEUED_NODES.
282 The queue is linked via AUX pointers and terminated by pointer to 1. */
283
284 static void
enqueue_node(symtab_node * node)285 enqueue_node (symtab_node *node)
286 {
287 if (node->aux)
288 return;
289 gcc_checking_assert (queued_nodes);
290 node->aux = queued_nodes;
291 queued_nodes = node;
292 }
293
294 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
295 functions into callgraph in a way so they look like ordinary reachable
296 functions inserted into callgraph already at construction time. */
297
298 void
process_new_functions(void)299 symbol_table::process_new_functions (void)
300 {
301 tree fndecl;
302
303 if (!cgraph_new_nodes.exists ())
304 return;
305
306 handle_alias_pairs ();
307 /* Note that this queue may grow as its being processed, as the new
308 functions may generate new ones. */
309 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
310 {
311 cgraph_node *node = cgraph_new_nodes[i];
312 fndecl = node->decl;
313 switch (state)
314 {
315 case CONSTRUCTION:
316 /* At construction time we just need to finalize function and move
317 it into reachable functions list. */
318
319 cgraph_node::finalize_function (fndecl, false);
320 call_cgraph_insertion_hooks (node);
321 enqueue_node (node);
322 break;
323
324 case IPA:
325 case IPA_SSA:
326 case IPA_SSA_AFTER_INLINING:
327 /* When IPA optimization already started, do all essential
328 transformations that has been already performed on the whole
329 cgraph but not on this function. */
330
331 gimple_register_cfg_hooks ();
332 if (!node->analyzed)
333 node->analyze ();
334 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
335 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
336 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
337 {
338 bool summaried_computed = ipa_fn_summaries != NULL;
339 g->get_passes ()->execute_early_local_passes ();
340 /* Early passes compure inline parameters to do inlining
341 and splitting. This is redundant for functions added late.
342 Just throw away whatever it did. */
343 if (!summaried_computed)
344 ipa_free_fn_summary ();
345 }
346 else if (ipa_fn_summaries != NULL)
347 compute_fn_summary (node, true);
348 free_dominance_info (CDI_POST_DOMINATORS);
349 free_dominance_info (CDI_DOMINATORS);
350 pop_cfun ();
351 call_cgraph_insertion_hooks (node);
352 break;
353
354 case EXPANSION:
355 /* Functions created during expansion shall be compiled
356 directly. */
357 node->process = 0;
358 call_cgraph_insertion_hooks (node);
359 node->expand ();
360 break;
361
362 default:
363 gcc_unreachable ();
364 break;
365 }
366 }
367
368 cgraph_new_nodes.release ();
369 }
370
371 /* As an GCC extension we allow redefinition of the function. The
372 semantics when both copies of bodies differ is not well defined.
373 We replace the old body with new body so in unit at a time mode
374 we always use new body, while in normal mode we may end up with
375 old body inlined into some functions and new body expanded and
376 inlined in others.
377
378 ??? It may make more sense to use one body for inlining and other
379 body for expanding the function but this is difficult to do. */
380
381 void
reset(void)382 cgraph_node::reset (void)
383 {
384 /* If process is set, then we have already begun whole-unit analysis.
385 This is *not* testing for whether we've already emitted the function.
386 That case can be sort-of legitimately seen with real function redefinition
387 errors. I would argue that the front end should never present us with
388 such a case, but don't enforce that for now. */
389 gcc_assert (!process);
390
391 /* Reset our data structures so we can analyze the function again. */
392 memset (&local, 0, sizeof (local));
393 memset (&global, 0, sizeof (global));
394 memset (&rtl, 0, sizeof (rtl));
395 analyzed = false;
396 definition = false;
397 alias = false;
398 transparent_alias = false;
399 weakref = false;
400 cpp_implicit_alias = false;
401
402 remove_callees ();
403 remove_all_references ();
404 }
405
406 /* Return true when there are references to the node. INCLUDE_SELF is
407 true if a self reference counts as a reference. */
408
409 bool
referred_to_p(bool include_self)410 symtab_node::referred_to_p (bool include_self)
411 {
412 ipa_ref *ref = NULL;
413
414 /* See if there are any references at all. */
415 if (iterate_referring (0, ref))
416 return true;
417 /* For functions check also calls. */
418 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
419 if (cn && cn->callers)
420 {
421 if (include_self)
422 return true;
423 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
424 if (e->caller != this)
425 return true;
426 }
427 return false;
428 }
429
430 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
431 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
432 the garbage collector run at the moment. We would need to either create
433 a new GC context, or just not compile right now. */
434
435 void
finalize_function(tree decl,bool no_collect)436 cgraph_node::finalize_function (tree decl, bool no_collect)
437 {
438 cgraph_node *node = cgraph_node::get_create (decl);
439
440 if (node->definition)
441 {
442 /* Nested functions should only be defined once. */
443 gcc_assert (!DECL_CONTEXT (decl)
444 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
445 node->reset ();
446 node->local.redefined_extern_inline = true;
447 }
448
449 /* Set definition first before calling notice_global_symbol so that
450 it is available to notice_global_symbol. */
451 node->definition = true;
452 notice_global_symbol (decl);
453 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
454 if (!flag_toplevel_reorder)
455 node->no_reorder = true;
456
457 /* With -fkeep-inline-functions we are keeping all inline functions except
458 for extern inline ones. */
459 if (flag_keep_inline_functions
460 && DECL_DECLARED_INLINE_P (decl)
461 && !DECL_EXTERNAL (decl)
462 && !DECL_DISREGARD_INLINE_LIMITS (decl))
463 node->force_output = 1;
464
465 /* __RTL functions were already output as soon as they were parsed (due
466 to the large amount of global state in the backend).
467 Mark such functions as "force_output" to reflect the fact that they
468 will be in the asm file when considering the symbols they reference.
469 The attempt to output them later on will bail out immediately. */
470 if (node->native_rtl_p ())
471 node->force_output = 1;
472
473 /* When not optimizing, also output the static functions. (see
474 PR24561), but don't do so for always_inline functions, functions
475 declared inline and nested functions. These were optimized out
476 in the original implementation and it is unclear whether we want
477 to change the behavior here. */
478 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
479 || node->no_reorder)
480 && !node->cpp_implicit_alias
481 && !DECL_DISREGARD_INLINE_LIMITS (decl)
482 && !DECL_DECLARED_INLINE_P (decl)
483 && !(DECL_CONTEXT (decl)
484 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
485 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
486 node->force_output = 1;
487
488 /* If we've not yet emitted decl, tell the debug info about it. */
489 if (!TREE_ASM_WRITTEN (decl))
490 (*debug_hooks->deferred_inline_function) (decl);
491
492 if (!no_collect)
493 ggc_collect ();
494
495 if (symtab->state == CONSTRUCTION
496 && (node->needed_p () || node->referred_to_p ()))
497 enqueue_node (node);
498 }
499
500 /* Add the function FNDECL to the call graph.
501 Unlike finalize_function, this function is intended to be used
502 by middle end and allows insertion of new function at arbitrary point
503 of compilation. The function can be either in high, low or SSA form
504 GIMPLE.
505
506 The function is assumed to be reachable and have address taken (so no
507 API breaking optimizations are performed on it).
508
509 Main work done by this function is to enqueue the function for later
510 processing to avoid need the passes to be re-entrant. */
511
512 void
add_new_function(tree fndecl,bool lowered)513 cgraph_node::add_new_function (tree fndecl, bool lowered)
514 {
515 gcc::pass_manager *passes = g->get_passes ();
516 cgraph_node *node;
517
518 if (dump_file)
519 {
520 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
521 const char *function_type = ((gimple_has_body_p (fndecl))
522 ? (lowered
523 ? (gimple_in_ssa_p (fn)
524 ? "ssa gimple"
525 : "low gimple")
526 : "high gimple")
527 : "to-be-gimplified");
528 fprintf (dump_file,
529 "Added new %s function %s to callgraph\n",
530 function_type,
531 fndecl_name (fndecl));
532 }
533
534 switch (symtab->state)
535 {
536 case PARSING:
537 cgraph_node::finalize_function (fndecl, false);
538 break;
539 case CONSTRUCTION:
540 /* Just enqueue function to be processed at nearest occurrence. */
541 node = cgraph_node::get_create (fndecl);
542 if (lowered)
543 node->lowered = true;
544 cgraph_new_nodes.safe_push (node);
545 break;
546
547 case IPA:
548 case IPA_SSA:
549 case IPA_SSA_AFTER_INLINING:
550 case EXPANSION:
551 /* Bring the function into finalized state and enqueue for later
552 analyzing and compilation. */
553 node = cgraph_node::get_create (fndecl);
554 node->local.local = false;
555 node->definition = true;
556 node->force_output = true;
557 if (TREE_PUBLIC (fndecl))
558 node->externally_visible = true;
559 if (!lowered && symtab->state == EXPANSION)
560 {
561 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
562 gimple_register_cfg_hooks ();
563 bitmap_obstack_initialize (NULL);
564 execute_pass_list (cfun, passes->all_lowering_passes);
565 passes->execute_early_local_passes ();
566 bitmap_obstack_release (NULL);
567 pop_cfun ();
568
569 lowered = true;
570 }
571 if (lowered)
572 node->lowered = true;
573 cgraph_new_nodes.safe_push (node);
574 break;
575
576 case FINISHED:
577 /* At the very end of compilation we have to do all the work up
578 to expansion. */
579 node = cgraph_node::create (fndecl);
580 if (lowered)
581 node->lowered = true;
582 node->definition = true;
583 node->analyze ();
584 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
585 gimple_register_cfg_hooks ();
586 bitmap_obstack_initialize (NULL);
587 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
588 g->get_passes ()->execute_early_local_passes ();
589 bitmap_obstack_release (NULL);
590 pop_cfun ();
591 node->expand ();
592 break;
593
594 default:
595 gcc_unreachable ();
596 }
597
598 /* Set a personality if required and we already passed EH lowering. */
599 if (lowered
600 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
601 == eh_personality_lang))
602 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
603 }
604
605 /* Analyze the function scheduled to be output. */
606 void
analyze(void)607 cgraph_node::analyze (void)
608 {
609 if (native_rtl_p ())
610 {
611 analyzed = true;
612 return;
613 }
614
615 tree decl = this->decl;
616 location_t saved_loc = input_location;
617 input_location = DECL_SOURCE_LOCATION (decl);
618
619 if (thunk.thunk_p)
620 {
621 cgraph_node *t = cgraph_node::get (thunk.alias);
622
623 create_edge (t, NULL, t->count);
624 callees->can_throw_external = !TREE_NOTHROW (t->decl);
625 /* Target code in expand_thunk may need the thunk's target
626 to be analyzed, so recurse here. */
627 if (!t->analyzed)
628 t->analyze ();
629 if (t->alias)
630 {
631 t = t->get_alias_target ();
632 if (!t->analyzed)
633 t->analyze ();
634 }
635 if (!expand_thunk (false, false))
636 {
637 thunk.alias = NULL;
638 return;
639 }
640 thunk.alias = NULL;
641 }
642 if (alias)
643 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
644 else if (dispatcher_function)
645 {
646 /* Generate the dispatcher body of multi-versioned functions. */
647 cgraph_function_version_info *dispatcher_version_info
648 = function_version ();
649 if (dispatcher_version_info != NULL
650 && (dispatcher_version_info->dispatcher_resolver
651 == NULL_TREE))
652 {
653 tree resolver = NULL_TREE;
654 gcc_assert (targetm.generate_version_dispatcher_body);
655 resolver = targetm.generate_version_dispatcher_body (this);
656 gcc_assert (resolver != NULL_TREE);
657 }
658 }
659 else
660 {
661 push_cfun (DECL_STRUCT_FUNCTION (decl));
662
663 assign_assembler_name_if_needed (decl);
664
665 /* Make sure to gimplify bodies only once. During analyzing a
666 function we lower it, which will require gimplified nested
667 functions, so we can end up here with an already gimplified
668 body. */
669 if (!gimple_has_body_p (decl))
670 gimplify_function_tree (decl);
671
672 /* Lower the function. */
673 if (!lowered)
674 {
675 if (nested)
676 lower_nested_functions (decl);
677 gcc_assert (!nested);
678
679 gimple_register_cfg_hooks ();
680 bitmap_obstack_initialize (NULL);
681 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
682 free_dominance_info (CDI_POST_DOMINATORS);
683 free_dominance_info (CDI_DOMINATORS);
684 compact_blocks ();
685 bitmap_obstack_release (NULL);
686 lowered = true;
687 }
688
689 pop_cfun ();
690 }
691 analyzed = true;
692
693 input_location = saved_loc;
694 }
695
696 /* C++ frontend produce same body aliases all over the place, even before PCH
697 gets streamed out. It relies on us linking the aliases with their function
698 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
699 first produce aliases without links, but once C++ FE is sure he won't sream
700 PCH we build the links via this function. */
701
702 void
process_same_body_aliases(void)703 symbol_table::process_same_body_aliases (void)
704 {
705 symtab_node *node;
706 FOR_EACH_SYMBOL (node)
707 if (node->cpp_implicit_alias && !node->analyzed)
708 node->resolve_alias
709 (VAR_P (node->alias_target)
710 ? (symtab_node *)varpool_node::get_create (node->alias_target)
711 : (symtab_node *)cgraph_node::get_create (node->alias_target));
712 cpp_implicit_aliases_done = true;
713 }
714
715 /* Process attributes common for vars and functions. */
716
717 static void
process_common_attributes(symtab_node * node,tree decl)718 process_common_attributes (symtab_node *node, tree decl)
719 {
720 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
721
722 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
723 {
724 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
725 "%<weakref%> attribute should be accompanied with"
726 " an %<alias%> attribute");
727 DECL_WEAK (decl) = 0;
728 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
729 DECL_ATTRIBUTES (decl));
730 }
731
732 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
733 node->no_reorder = 1;
734 }
735
736 /* Look for externally_visible and used attributes and mark cgraph nodes
737 accordingly.
738
739 We cannot mark the nodes at the point the attributes are processed (in
740 handle_*_attribute) because the copy of the declarations available at that
741 point may not be canonical. For example, in:
742
743 void f();
744 void f() __attribute__((used));
745
746 the declaration we see in handle_used_attribute will be the second
747 declaration -- but the front end will subsequently merge that declaration
748 with the original declaration and discard the second declaration.
749
750 Furthermore, we can't mark these nodes in finalize_function because:
751
752 void f() {}
753 void f() __attribute__((externally_visible));
754
755 is valid.
756
757 So, we walk the nodes at the end of the translation unit, applying the
758 attributes at that point. */
759
760 static void
process_function_and_variable_attributes(cgraph_node * first,varpool_node * first_var)761 process_function_and_variable_attributes (cgraph_node *first,
762 varpool_node *first_var)
763 {
764 cgraph_node *node;
765 varpool_node *vnode;
766
767 for (node = symtab->first_function (); node != first;
768 node = symtab->next_function (node))
769 {
770 tree decl = node->decl;
771 if (DECL_PRESERVE_P (decl))
772 node->mark_force_output ();
773 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
774 {
775 if (! TREE_PUBLIC (node->decl))
776 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
777 "%<externally_visible%>"
778 " attribute have effect only on public objects");
779 }
780 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
781 && (node->definition && !node->alias))
782 {
783 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
784 "%<weakref%> attribute ignored"
785 " because function is defined");
786 DECL_WEAK (decl) = 0;
787 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
788 DECL_ATTRIBUTES (decl));
789 }
790
791 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
792 && !DECL_DECLARED_INLINE_P (decl)
793 /* redefining extern inline function makes it DECL_UNINLINABLE. */
794 && !DECL_UNINLINABLE (decl))
795 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
796 "always_inline function might not be inlinable");
797
798 process_common_attributes (node, decl);
799 }
800 for (vnode = symtab->first_variable (); vnode != first_var;
801 vnode = symtab->next_variable (vnode))
802 {
803 tree decl = vnode->decl;
804 if (DECL_EXTERNAL (decl)
805 && DECL_INITIAL (decl))
806 varpool_node::finalize_decl (decl);
807 if (DECL_PRESERVE_P (decl))
808 vnode->force_output = true;
809 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
810 {
811 if (! TREE_PUBLIC (vnode->decl))
812 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
813 "%<externally_visible%>"
814 " attribute have effect only on public objects");
815 }
816 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
817 && vnode->definition
818 && DECL_INITIAL (decl))
819 {
820 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
821 "%<weakref%> attribute ignored"
822 " because variable is initialized");
823 DECL_WEAK (decl) = 0;
824 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
825 DECL_ATTRIBUTES (decl));
826 }
827 process_common_attributes (vnode, decl);
828 }
829 }
830
831 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
832 middle end to output the variable to asm file, if needed or externally
833 visible. */
834
835 void
finalize_decl(tree decl)836 varpool_node::finalize_decl (tree decl)
837 {
838 varpool_node *node = varpool_node::get_create (decl);
839
840 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
841
842 if (node->definition)
843 return;
844 /* Set definition first before calling notice_global_symbol so that
845 it is available to notice_global_symbol. */
846 node->definition = true;
847 notice_global_symbol (decl);
848 if (!flag_toplevel_reorder)
849 node->no_reorder = true;
850 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
851 /* Traditionally we do not eliminate static variables when not
852 optimizing and when not doing toplevel reoder. */
853 || (node->no_reorder && !DECL_COMDAT (node->decl)
854 && !DECL_ARTIFICIAL (node->decl)))
855 node->force_output = true;
856
857 if (symtab->state == CONSTRUCTION
858 && (node->needed_p () || node->referred_to_p ()))
859 enqueue_node (node);
860 if (symtab->state >= IPA_SSA)
861 node->analyze ();
862 /* Some frontends produce various interface variables after compilation
863 finished. */
864 if (symtab->state == FINISHED
865 || (node->no_reorder
866 && symtab->state == EXPANSION))
867 node->assemble_decl ();
868
869 if (DECL_INITIAL (decl))
870 chkp_register_var_initializer (decl);
871 }
872
873 /* EDGE is an polymorphic call. Mark all possible targets as reachable
874 and if there is only one target, perform trivial devirtualization.
875 REACHABLE_CALL_TARGETS collects target lists we already walked to
876 avoid udplicate work. */
877
878 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,cgraph_edge * edge)879 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
880 cgraph_edge *edge)
881 {
882 unsigned int i;
883 void *cache_token;
884 bool final;
885 vec <cgraph_node *>targets
886 = possible_polymorphic_call_targets
887 (edge, &final, &cache_token);
888
889 if (!reachable_call_targets->add (cache_token))
890 {
891 if (symtab->dump_file)
892 dump_possible_polymorphic_call_targets
893 (symtab->dump_file, edge);
894
895 for (i = 0; i < targets.length (); i++)
896 {
897 /* Do not bother to mark virtual methods in anonymous namespace;
898 either we will find use of virtual table defining it, or it is
899 unused. */
900 if (targets[i]->definition
901 && TREE_CODE
902 (TREE_TYPE (targets[i]->decl))
903 == METHOD_TYPE
904 && !type_in_anonymous_namespace_p
905 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
906 enqueue_node (targets[i]);
907 }
908 }
909
910 /* Very trivial devirtualization; when the type is
911 final or anonymous (so we know all its derivation)
912 and there is only one possible virtual call target,
913 make the edge direct. */
914 if (final)
915 {
916 if (targets.length () <= 1 && dbg_cnt (devirt))
917 {
918 cgraph_node *target;
919 if (targets.length () == 1)
920 target = targets[0];
921 else
922 target = cgraph_node::create
923 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
924
925 if (symtab->dump_file)
926 {
927 fprintf (symtab->dump_file,
928 "Devirtualizing call: ");
929 print_gimple_stmt (symtab->dump_file,
930 edge->call_stmt, 0,
931 TDF_SLIM);
932 }
933 if (dump_enabled_p ())
934 {
935 location_t locus = gimple_location_safe (edge->call_stmt);
936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
937 "devirtualizing call in %s to %s\n",
938 edge->caller->name (), target->name ());
939 }
940
941 edge->make_direct (target);
942 edge->redirect_call_stmt_to_callee ();
943
944 /* Call to __builtin_unreachable shouldn't be instrumented. */
945 if (!targets.length ())
946 gimple_call_set_with_bounds (edge->call_stmt, false);
947
948 if (symtab->dump_file)
949 {
950 fprintf (symtab->dump_file,
951 "Devirtualized as: ");
952 print_gimple_stmt (symtab->dump_file,
953 edge->call_stmt, 0,
954 TDF_SLIM);
955 }
956 }
957 }
958 }
959
960 /* Issue appropriate warnings for the global declaration DECL. */
961
962 static void
check_global_declaration(symtab_node * snode)963 check_global_declaration (symtab_node *snode)
964 {
965 const char *decl_file;
966 tree decl = snode->decl;
967
968 /* Warn about any function declared static but not defined. We don't
969 warn about variables, because many programs have static variables
970 that exist only to get some text into the object file. */
971 if (TREE_CODE (decl) == FUNCTION_DECL
972 && DECL_INITIAL (decl) == 0
973 && DECL_EXTERNAL (decl)
974 && ! DECL_ARTIFICIAL (decl)
975 && ! TREE_NO_WARNING (decl)
976 && ! TREE_PUBLIC (decl)
977 && (warn_unused_function
978 || snode->referred_to_p (/*include_self=*/false)))
979 {
980 if (snode->referred_to_p (/*include_self=*/false))
981 pedwarn (input_location, 0, "%q+F used but never defined", decl);
982 else
983 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
984 /* This symbol is effectively an "extern" declaration now. */
985 TREE_PUBLIC (decl) = 1;
986 }
987
988 /* Warn about static fns or vars defined but not used. */
989 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
990 || (((warn_unused_variable && ! TREE_READONLY (decl))
991 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
992 && (warn_unused_const_variable == 2
993 || (main_input_filename != NULL
994 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
995 && filename_cmp (main_input_filename,
996 decl_file) == 0))))
997 && VAR_P (decl)))
998 && ! DECL_IN_SYSTEM_HEADER (decl)
999 && ! snode->referred_to_p (/*include_self=*/false)
1000 /* This TREE_USED check is needed in addition to referred_to_p
1001 above, because the `__unused__' attribute is not being
1002 considered for referred_to_p. */
1003 && ! TREE_USED (decl)
1004 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1005 to handle multiple external decls in different scopes. */
1006 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1007 && ! DECL_EXTERNAL (decl)
1008 && ! DECL_ARTIFICIAL (decl)
1009 && ! DECL_ABSTRACT_ORIGIN (decl)
1010 && ! TREE_PUBLIC (decl)
1011 /* A volatile variable might be used in some non-obvious way. */
1012 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1013 /* Global register variables must be declared to reserve them. */
1014 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1015 /* Global ctors and dtors are called by the runtime. */
1016 && (TREE_CODE (decl) != FUNCTION_DECL
1017 || (!DECL_STATIC_CONSTRUCTOR (decl)
1018 && !DECL_STATIC_DESTRUCTOR (decl)))
1019 /* Otherwise, ask the language. */
1020 && lang_hooks.decls.warn_unused_global (decl))
1021 warning_at (DECL_SOURCE_LOCATION (decl),
1022 (TREE_CODE (decl) == FUNCTION_DECL)
1023 ? OPT_Wunused_function
1024 : (TREE_READONLY (decl)
1025 ? OPT_Wunused_const_variable_
1026 : OPT_Wunused_variable),
1027 "%qD defined but not used", decl);
1028 }
1029
1030 /* Discover all functions and variables that are trivially needed, analyze
1031 them as well as all functions and variables referred by them */
1032 static cgraph_node *first_analyzed;
1033 static varpool_node *first_analyzed_var;
1034
1035 /* FIRST_TIME is set to TRUE for the first time we are called for a
1036 translation unit from finalize_compilation_unit() or false
1037 otherwise. */
1038
1039 static void
analyze_functions(bool first_time)1040 analyze_functions (bool first_time)
1041 {
1042 /* Keep track of already processed nodes when called multiple times for
1043 intermodule optimization. */
1044 cgraph_node *first_handled = first_analyzed;
1045 varpool_node *first_handled_var = first_analyzed_var;
1046 hash_set<void *> reachable_call_targets;
1047
1048 symtab_node *node;
1049 symtab_node *next;
1050 int i;
1051 ipa_ref *ref;
1052 bool changed = true;
1053 location_t saved_loc = input_location;
1054
1055 bitmap_obstack_initialize (NULL);
1056 symtab->state = CONSTRUCTION;
1057 input_location = UNKNOWN_LOCATION;
1058
1059 /* Ugly, but the fixup can not happen at a time same body alias is created;
1060 C++ FE is confused about the COMDAT groups being right. */
1061 if (symtab->cpp_implicit_aliases_done)
1062 FOR_EACH_SYMBOL (node)
1063 if (node->cpp_implicit_alias)
1064 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1065 build_type_inheritance_graph ();
1066
1067 /* Analysis adds static variables that in turn adds references to new functions.
1068 So we need to iterate the process until it stabilize. */
1069 while (changed)
1070 {
1071 changed = false;
1072 process_function_and_variable_attributes (first_analyzed,
1073 first_analyzed_var);
1074
1075 /* First identify the trivially needed symbols. */
1076 for (node = symtab->first_symbol ();
1077 node != first_analyzed
1078 && node != first_analyzed_var; node = node->next)
1079 {
1080 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1081 node->get_comdat_group_id ();
1082 if (node->needed_p ())
1083 {
1084 enqueue_node (node);
1085 if (!changed && symtab->dump_file)
1086 fprintf (symtab->dump_file, "Trivially needed symbols:");
1087 changed = true;
1088 if (symtab->dump_file)
1089 fprintf (symtab->dump_file, " %s", node->asm_name ());
1090 if (!changed && symtab->dump_file)
1091 fprintf (symtab->dump_file, "\n");
1092 }
1093 if (node == first_analyzed
1094 || node == first_analyzed_var)
1095 break;
1096 }
1097 symtab->process_new_functions ();
1098 first_analyzed_var = symtab->first_variable ();
1099 first_analyzed = symtab->first_function ();
1100
1101 if (changed && symtab->dump_file)
1102 fprintf (symtab->dump_file, "\n");
1103
1104 /* Lower representation, build callgraph edges and references for all trivially
1105 needed symbols and all symbols referred by them. */
1106 while (queued_nodes != &symtab_terminator)
1107 {
1108 changed = true;
1109 node = queued_nodes;
1110 queued_nodes = (symtab_node *)queued_nodes->aux;
1111 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1112 if (cnode && cnode->definition)
1113 {
1114 cgraph_edge *edge;
1115 tree decl = cnode->decl;
1116
1117 /* ??? It is possible to create extern inline function
1118 and later using weak alias attribute to kill its body.
1119 See gcc.c-torture/compile/20011119-1.c */
1120 if (!DECL_STRUCT_FUNCTION (decl)
1121 && !cnode->alias
1122 && !cnode->thunk.thunk_p
1123 && !cnode->dispatcher_function)
1124 {
1125 cnode->reset ();
1126 cnode->local.redefined_extern_inline = true;
1127 continue;
1128 }
1129
1130 if (!cnode->analyzed)
1131 cnode->analyze ();
1132
1133 for (edge = cnode->callees; edge; edge = edge->next_callee)
1134 if (edge->callee->definition
1135 && (!DECL_EXTERNAL (edge->callee->decl)
1136 /* When not optimizing, do not try to analyze extern
1137 inline functions. Doing so is pointless. */
1138 || opt_for_fn (edge->callee->decl, optimize)
1139 /* Weakrefs needs to be preserved. */
1140 || edge->callee->alias
1141 /* always_inline functions are inlined aven at -O0. */
1142 || lookup_attribute
1143 ("always_inline",
1144 DECL_ATTRIBUTES (edge->callee->decl))
1145 /* Multiversioned functions needs the dispatcher to
1146 be produced locally even for extern functions. */
1147 || edge->callee->function_version ()))
1148 enqueue_node (edge->callee);
1149 if (opt_for_fn (cnode->decl, optimize)
1150 && opt_for_fn (cnode->decl, flag_devirtualize))
1151 {
1152 cgraph_edge *next;
1153
1154 for (edge = cnode->indirect_calls; edge; edge = next)
1155 {
1156 next = edge->next_callee;
1157 if (edge->indirect_info->polymorphic)
1158 walk_polymorphic_call_targets (&reachable_call_targets,
1159 edge);
1160 }
1161 }
1162
1163 /* If decl is a clone of an abstract function,
1164 mark that abstract function so that we don't release its body.
1165 The DECL_INITIAL() of that abstract function declaration
1166 will be later needed to output debug info. */
1167 if (DECL_ABSTRACT_ORIGIN (decl))
1168 {
1169 cgraph_node *origin_node
1170 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1171 origin_node->used_as_abstract_origin = true;
1172 }
1173 /* Preserve a functions function context node. It will
1174 later be needed to output debug info. */
1175 if (tree fn = decl_function_context (decl))
1176 {
1177 cgraph_node *origin_node = cgraph_node::get_create (fn);
1178 enqueue_node (origin_node);
1179 }
1180 }
1181 else
1182 {
1183 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1184 if (vnode && vnode->definition && !vnode->analyzed)
1185 vnode->analyze ();
1186 }
1187
1188 if (node->same_comdat_group)
1189 {
1190 symtab_node *next;
1191 for (next = node->same_comdat_group;
1192 next != node;
1193 next = next->same_comdat_group)
1194 if (!next->comdat_local_p ())
1195 enqueue_node (next);
1196 }
1197 for (i = 0; node->iterate_reference (i, ref); i++)
1198 if (ref->referred->definition
1199 && (!DECL_EXTERNAL (ref->referred->decl)
1200 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1201 && optimize)
1202 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1203 && opt_for_fn (ref->referred->decl, optimize))
1204 || node->alias
1205 || ref->referred->alias)))
1206 enqueue_node (ref->referred);
1207 symtab->process_new_functions ();
1208 }
1209 }
1210 update_type_inheritance_graph ();
1211
1212 /* Collect entry points to the unit. */
1213 if (symtab->dump_file)
1214 {
1215 fprintf (symtab->dump_file, "\n\nInitial ");
1216 symtab->dump (symtab->dump_file);
1217 }
1218
1219 if (first_time)
1220 {
1221 symtab_node *snode;
1222 FOR_EACH_SYMBOL (snode)
1223 check_global_declaration (snode);
1224 }
1225
1226 if (symtab->dump_file)
1227 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1228
1229 for (node = symtab->first_symbol ();
1230 node != first_handled
1231 && node != first_handled_var; node = next)
1232 {
1233 next = node->next;
1234 if (!node->aux && !node->referred_to_p ())
1235 {
1236 if (symtab->dump_file)
1237 fprintf (symtab->dump_file, " %s", node->name ());
1238
1239 /* See if the debugger can use anything before the DECL
1240 passes away. Perhaps it can notice a DECL that is now a
1241 constant and can tag the early DIE with an appropriate
1242 attribute.
1243
1244 Otherwise, this is the last chance the debug_hooks have
1245 at looking at optimized away DECLs, since
1246 late_global_decl will subsequently be called from the
1247 contents of the now pruned symbol table. */
1248 if (VAR_P (node->decl)
1249 && !decl_function_context (node->decl))
1250 {
1251 /* We are reclaiming totally unreachable code and variables
1252 so they effectively appear as readonly. Show that to
1253 the debug machinery. */
1254 TREE_READONLY (node->decl) = 1;
1255 node->definition = false;
1256 (*debug_hooks->late_global_decl) (node->decl);
1257 }
1258
1259 node->remove ();
1260 continue;
1261 }
1262 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1263 {
1264 tree decl = node->decl;
1265
1266 if (cnode->definition && !gimple_has_body_p (decl)
1267 && !cnode->alias
1268 && !cnode->thunk.thunk_p)
1269 cnode->reset ();
1270
1271 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1272 || cnode->alias
1273 || gimple_has_body_p (decl)
1274 || cnode->native_rtl_p ());
1275 gcc_assert (cnode->analyzed == cnode->definition);
1276 }
1277 node->aux = NULL;
1278 }
1279 for (;node; node = node->next)
1280 node->aux = NULL;
1281 first_analyzed = symtab->first_function ();
1282 first_analyzed_var = symtab->first_variable ();
1283 if (symtab->dump_file)
1284 {
1285 fprintf (symtab->dump_file, "\n\nReclaimed ");
1286 symtab->dump (symtab->dump_file);
1287 }
1288 bitmap_obstack_release (NULL);
1289 ggc_collect ();
1290 /* Initialize assembler name hash, in particular we want to trigger C++
1291 mangling and same body alias creation before we free DECL_ARGUMENTS
1292 used by it. */
1293 if (!seen_error ())
1294 symtab->symtab_initialize_asm_name_hash ();
1295
1296 input_location = saved_loc;
1297 }
1298
1299 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1300 (which may be an ifunc resolver) and issue a diagnostic when they are
1301 not compatible according to language rules (plus a C++ extension for
1302 non-static member functions). */
1303
1304 static void
maybe_diag_incompatible_alias(tree alias,tree target)1305 maybe_diag_incompatible_alias (tree alias, tree target)
1306 {
1307 tree altype = TREE_TYPE (alias);
1308 tree targtype = TREE_TYPE (target);
1309
1310 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1311 tree funcptr = altype;
1312
1313 if (ifunc)
1314 {
1315 /* Handle attribute ifunc first. */
1316 if (TREE_CODE (altype) == METHOD_TYPE)
1317 {
1318 /* Set FUNCPTR to the type of the alias target. If the type
1319 is a non-static member function of class C, construct a type
1320 of an ordinary function taking C* as the first argument,
1321 followed by the member function argument list, and use it
1322 instead to check for incompatibility. This conversion is
1323 not defined by the language but an extension provided by
1324 G++. */
1325
1326 tree rettype = TREE_TYPE (altype);
1327 tree args = TYPE_ARG_TYPES (altype);
1328 altype = build_function_type (rettype, args);
1329 funcptr = altype;
1330 }
1331
1332 targtype = TREE_TYPE (targtype);
1333
1334 if (POINTER_TYPE_P (targtype))
1335 {
1336 targtype = TREE_TYPE (targtype);
1337
1338 /* Only issue Wattribute-alias for conversions to void* with
1339 -Wextra. */
1340 if (VOID_TYPE_P (targtype) && !extra_warnings)
1341 return;
1342
1343 /* Proceed to handle incompatible ifunc resolvers below. */
1344 }
1345 else
1346 {
1347 funcptr = build_pointer_type (funcptr);
1348
1349 error_at (DECL_SOURCE_LOCATION (target),
1350 "%<ifunc%> resolver for %qD must return %qT",
1351 alias, funcptr);
1352 inform (DECL_SOURCE_LOCATION (alias),
1353 "resolver indirect function declared here");
1354 return;
1355 }
1356 }
1357
1358 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1359 || (prototype_p (altype)
1360 && prototype_p (targtype)
1361 && !types_compatible_p (altype, targtype))))
1362 {
1363 /* Warn for incompatibilities. Avoid warning for functions
1364 without a prototype to make it possible to declare aliases
1365 without knowing the exact type, as libstdc++ does. */
1366 if (ifunc)
1367 {
1368 funcptr = build_pointer_type (funcptr);
1369
1370 if (warning_at (DECL_SOURCE_LOCATION (target),
1371 OPT_Wattribute_alias,
1372 "%<ifunc%> resolver for %qD should return %qT",
1373 alias, funcptr))
1374 inform (DECL_SOURCE_LOCATION (alias),
1375 "resolver indirect function declared here");
1376 }
1377 else if (warning_at (DECL_SOURCE_LOCATION (alias),
1378 OPT_Wattribute_alias,
1379 "%qD alias between functions of incompatible "
1380 "types %qT and %qT", alias, altype, targtype))
1381 inform (DECL_SOURCE_LOCATION (target),
1382 "aliased declaration here");
1383 }
1384 }
1385
1386 /* Translate the ugly representation of aliases as alias pairs into nice
1387 representation in callgraph. We don't handle all cases yet,
1388 unfortunately. */
1389
1390 static void
handle_alias_pairs(void)1391 handle_alias_pairs (void)
1392 {
1393 alias_pair *p;
1394 unsigned i;
1395
1396 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1397 {
1398 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1399
1400 /* Weakrefs with target not defined in current unit are easy to handle:
1401 they behave just as external variables except we need to note the
1402 alias flag to later output the weakref pseudo op into asm file. */
1403 if (!target_node
1404 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1405 {
1406 symtab_node *node = symtab_node::get (p->decl);
1407 if (node)
1408 {
1409 node->alias_target = p->target;
1410 node->weakref = true;
1411 node->alias = true;
1412 node->transparent_alias = true;
1413 }
1414 alias_pairs->unordered_remove (i);
1415 continue;
1416 }
1417 else if (!target_node)
1418 {
1419 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1420 symtab_node *node = symtab_node::get (p->decl);
1421 if (node)
1422 node->alias = false;
1423 alias_pairs->unordered_remove (i);
1424 continue;
1425 }
1426
1427 if (DECL_EXTERNAL (target_node->decl)
1428 /* We use local aliases for C++ thunks to force the tailcall
1429 to bind locally. This is a hack - to keep it working do
1430 the following (which is not strictly correct). */
1431 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1432 || ! DECL_VIRTUAL_P (target_node->decl))
1433 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1434 {
1435 error ("%q+D aliased to external symbol %qE",
1436 p->decl, p->target);
1437 }
1438
1439 if (TREE_CODE (p->decl) == FUNCTION_DECL
1440 && target_node && is_a <cgraph_node *> (target_node))
1441 {
1442 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1443
1444 cgraph_node *src_node = cgraph_node::get (p->decl);
1445 if (src_node && src_node->definition)
1446 src_node->reset ();
1447 cgraph_node::create_alias (p->decl, target_node->decl);
1448 alias_pairs->unordered_remove (i);
1449 }
1450 else if (VAR_P (p->decl)
1451 && target_node && is_a <varpool_node *> (target_node))
1452 {
1453 varpool_node::create_alias (p->decl, target_node->decl);
1454 alias_pairs->unordered_remove (i);
1455 }
1456 else
1457 {
1458 error ("%q+D alias between function and variable is not supported",
1459 p->decl);
1460 inform (DECL_SOURCE_LOCATION (target_node->decl),
1461 "aliased declaration here");
1462
1463 alias_pairs->unordered_remove (i);
1464 }
1465 }
1466 vec_free (alias_pairs);
1467 }
1468
1469
1470 /* Figure out what functions we want to assemble. */
1471
1472 static void
mark_functions_to_output(void)1473 mark_functions_to_output (void)
1474 {
1475 bool check_same_comdat_groups = false;
1476 cgraph_node *node;
1477
1478 if (flag_checking)
1479 FOR_EACH_FUNCTION (node)
1480 gcc_assert (!node->process);
1481
1482 FOR_EACH_FUNCTION (node)
1483 {
1484 tree decl = node->decl;
1485
1486 gcc_assert (!node->process || node->same_comdat_group);
1487 if (node->process)
1488 continue;
1489
1490 /* We need to output all local functions that are used and not
1491 always inlined, as well as those that are reachable from
1492 outside the current compilation unit. */
1493 if (node->analyzed
1494 && !node->thunk.thunk_p
1495 && !node->alias
1496 && !node->global.inlined_to
1497 && !TREE_ASM_WRITTEN (decl)
1498 && !DECL_EXTERNAL (decl))
1499 {
1500 node->process = 1;
1501 if (node->same_comdat_group)
1502 {
1503 cgraph_node *next;
1504 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1505 next != node;
1506 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1507 if (!next->thunk.thunk_p && !next->alias
1508 && !next->comdat_local_p ())
1509 next->process = 1;
1510 }
1511 }
1512 else if (node->same_comdat_group)
1513 {
1514 if (flag_checking)
1515 check_same_comdat_groups = true;
1516 }
1517 else
1518 {
1519 /* We should've reclaimed all functions that are not needed. */
1520 if (flag_checking
1521 && !node->global.inlined_to
1522 && gimple_has_body_p (decl)
1523 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1524 are inside partition, we can end up not removing the body since we no longer
1525 have analyzed node pointing to it. */
1526 && !node->in_other_partition
1527 && !node->alias
1528 && !node->clones
1529 && !DECL_EXTERNAL (decl))
1530 {
1531 node->debug ();
1532 internal_error ("failed to reclaim unneeded function");
1533 }
1534 gcc_assert (node->global.inlined_to
1535 || !gimple_has_body_p (decl)
1536 || node->in_other_partition
1537 || node->clones
1538 || DECL_ARTIFICIAL (decl)
1539 || DECL_EXTERNAL (decl));
1540
1541 }
1542
1543 }
1544 if (flag_checking && check_same_comdat_groups)
1545 FOR_EACH_FUNCTION (node)
1546 if (node->same_comdat_group && !node->process)
1547 {
1548 tree decl = node->decl;
1549 if (!node->global.inlined_to
1550 && gimple_has_body_p (decl)
1551 /* FIXME: in an ltrans unit when the offline copy is outside a
1552 partition but inline copies are inside a partition, we can
1553 end up not removing the body since we no longer have an
1554 analyzed node pointing to it. */
1555 && !node->in_other_partition
1556 && !node->clones
1557 && !DECL_EXTERNAL (decl))
1558 {
1559 node->debug ();
1560 internal_error ("failed to reclaim unneeded function in same "
1561 "comdat group");
1562 }
1563 }
1564 }
1565
1566 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1567 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1568
1569 Set current_function_decl and cfun to newly constructed empty function body.
1570 return basic block in the function body. */
1571
1572 basic_block
init_lowered_empty_function(tree decl,bool in_ssa,profile_count count)1573 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1574 {
1575 basic_block bb;
1576 edge e;
1577
1578 current_function_decl = decl;
1579 allocate_struct_function (decl, false);
1580 gimple_register_cfg_hooks ();
1581 init_empty_tree_cfg ();
1582 init_tree_ssa (cfun);
1583
1584 if (in_ssa)
1585 {
1586 init_ssa_operands (cfun);
1587 cfun->gimple_df->in_ssa_p = true;
1588 cfun->curr_properties |= PROP_ssa;
1589 }
1590
1591 DECL_INITIAL (decl) = make_node (BLOCK);
1592 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1593
1594 DECL_SAVED_TREE (decl) = error_mark_node;
1595 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1596 | PROP_cfg | PROP_loops);
1597
1598 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1599 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1600 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1601
1602 /* Create BB for body of the function and connect it properly. */
1603 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1604 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1605 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1606 bb->count = count;
1607 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1608 e->probability = profile_probability::always ();
1609 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1610 e->probability = profile_probability::always ();
1611 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1612
1613 return bb;
1614 }
1615
1616 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1617 offset indicated by VIRTUAL_OFFSET, if that is
1618 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1619 zero for a result adjusting thunk. */
1620
1621 tree
thunk_adjust(gimple_stmt_iterator * bsi,tree ptr,bool this_adjusting,HOST_WIDE_INT fixed_offset,tree virtual_offset)1622 thunk_adjust (gimple_stmt_iterator * bsi,
1623 tree ptr, bool this_adjusting,
1624 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1625 {
1626 gassign *stmt;
1627 tree ret;
1628
1629 if (this_adjusting
1630 && fixed_offset != 0)
1631 {
1632 stmt = gimple_build_assign
1633 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1634 ptr,
1635 fixed_offset));
1636 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1637 }
1638
1639 /* If there's a virtual offset, look up that value in the vtable and
1640 adjust the pointer again. */
1641 if (virtual_offset)
1642 {
1643 tree vtabletmp;
1644 tree vtabletmp2;
1645 tree vtabletmp3;
1646
1647 if (!vtable_entry_type)
1648 {
1649 tree vfunc_type = make_node (FUNCTION_TYPE);
1650 TREE_TYPE (vfunc_type) = integer_type_node;
1651 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1652 layout_type (vfunc_type);
1653
1654 vtable_entry_type = build_pointer_type (vfunc_type);
1655 }
1656
1657 vtabletmp =
1658 create_tmp_reg (build_pointer_type
1659 (build_pointer_type (vtable_entry_type)), "vptr");
1660
1661 /* The vptr is always at offset zero in the object. */
1662 stmt = gimple_build_assign (vtabletmp,
1663 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1664 ptr));
1665 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1666
1667 /* Form the vtable address. */
1668 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1669 "vtableaddr");
1670 stmt = gimple_build_assign (vtabletmp2,
1671 build_simple_mem_ref (vtabletmp));
1672 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1673
1674 /* Find the entry with the vcall offset. */
1675 stmt = gimple_build_assign (vtabletmp2,
1676 fold_build_pointer_plus_loc (input_location,
1677 vtabletmp2,
1678 virtual_offset));
1679 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1680
1681 /* Get the offset itself. */
1682 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1683 "vcalloffset");
1684 stmt = gimple_build_assign (vtabletmp3,
1685 build_simple_mem_ref (vtabletmp2));
1686 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1687
1688 /* Adjust the `this' pointer. */
1689 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1690 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1691 GSI_CONTINUE_LINKING);
1692 }
1693
1694 if (!this_adjusting
1695 && fixed_offset != 0)
1696 /* Adjust the pointer by the constant. */
1697 {
1698 tree ptrtmp;
1699
1700 if (VAR_P (ptr))
1701 ptrtmp = ptr;
1702 else
1703 {
1704 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1705 stmt = gimple_build_assign (ptrtmp, ptr);
1706 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1707 }
1708 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1709 ptrtmp, fixed_offset);
1710 }
1711
1712 /* Emit the statement and gimplify the adjustment expression. */
1713 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1714 stmt = gimple_build_assign (ret, ptr);
1715 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1716
1717 return ret;
1718 }
1719
1720 /* Expand thunk NODE to gimple if possible.
1721 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1722 no assembler is produced.
1723 When OUTPUT_ASM_THUNK is true, also produce assembler for
1724 thunks that are not lowered. */
1725
1726 bool
expand_thunk(bool output_asm_thunks,bool force_gimple_thunk)1727 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1728 {
1729 bool this_adjusting = thunk.this_adjusting;
1730 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1731 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1732 tree virtual_offset = NULL;
1733 tree alias = callees->callee->decl;
1734 tree thunk_fndecl = decl;
1735 tree a;
1736
1737 /* Instrumentation thunk is the same function with
1738 a different signature. Never need to expand it. */
1739 if (thunk.add_pointer_bounds_args)
1740 return false;
1741
1742 if (!force_gimple_thunk && this_adjusting
1743 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1744 virtual_value, alias))
1745 {
1746 const char *fnname;
1747 tree fn_block;
1748 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1749
1750 if (!output_asm_thunks)
1751 {
1752 analyzed = true;
1753 return false;
1754 }
1755
1756 if (in_lto_p)
1757 get_untransformed_body ();
1758 a = DECL_ARGUMENTS (thunk_fndecl);
1759
1760 current_function_decl = thunk_fndecl;
1761
1762 /* Ensure thunks are emitted in their correct sections. */
1763 resolve_unique_section (thunk_fndecl, 0,
1764 flag_function_sections);
1765
1766 DECL_RESULT (thunk_fndecl)
1767 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1768 RESULT_DECL, 0, restype);
1769 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1770 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1771
1772 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1773 create one. */
1774 fn_block = make_node (BLOCK);
1775 BLOCK_VARS (fn_block) = a;
1776 DECL_INITIAL (thunk_fndecl) = fn_block;
1777 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1778 allocate_struct_function (thunk_fndecl, false);
1779 init_function_start (thunk_fndecl);
1780 cfun->is_thunk = 1;
1781 insn_locations_init ();
1782 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1783 prologue_location = curr_insn_location ();
1784 assemble_start_function (thunk_fndecl, fnname);
1785
1786 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1787 fixed_offset, virtual_value, alias);
1788
1789 assemble_end_function (thunk_fndecl, fnname);
1790 insn_locations_finalize ();
1791 init_insn_lengths ();
1792 free_after_compilation (cfun);
1793 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1794 thunk.thunk_p = false;
1795 analyzed = false;
1796 }
1797 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1798 {
1799 error ("generic thunk code fails for method %qD which uses %<...%>",
1800 thunk_fndecl);
1801 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1802 analyzed = true;
1803 return false;
1804 }
1805 else
1806 {
1807 tree restype;
1808 basic_block bb, then_bb, else_bb, return_bb;
1809 gimple_stmt_iterator bsi;
1810 int nargs = 0;
1811 tree arg;
1812 int i;
1813 tree resdecl;
1814 tree restmp = NULL;
1815 tree resbnd = NULL;
1816
1817 gcall *call;
1818 greturn *ret;
1819 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1820
1821 /* We may be called from expand_thunk that releses body except for
1822 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1823 if (in_lto_p && !force_gimple_thunk)
1824 get_untransformed_body ();
1825 a = DECL_ARGUMENTS (thunk_fndecl);
1826
1827 current_function_decl = thunk_fndecl;
1828
1829 /* Ensure thunks are emitted in their correct sections. */
1830 resolve_unique_section (thunk_fndecl, 0,
1831 flag_function_sections);
1832
1833 DECL_IGNORED_P (thunk_fndecl) = 1;
1834 bitmap_obstack_initialize (NULL);
1835
1836 if (thunk.virtual_offset_p)
1837 virtual_offset = size_int (virtual_value);
1838
1839 /* Build the return declaration for the function. */
1840 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1841 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1842 {
1843 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1844 DECL_ARTIFICIAL (resdecl) = 1;
1845 DECL_IGNORED_P (resdecl) = 1;
1846 DECL_RESULT (thunk_fndecl) = resdecl;
1847 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1848 }
1849 else
1850 resdecl = DECL_RESULT (thunk_fndecl);
1851
1852 profile_count cfg_count = count;
1853 if (!cfg_count.initialized_p ())
1854 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1855
1856 bb = then_bb = else_bb = return_bb
1857 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1858
1859 bsi = gsi_start_bb (bb);
1860
1861 /* Build call to the function being thunked. */
1862 if (!VOID_TYPE_P (restype)
1863 && (!alias_is_noreturn
1864 || TREE_ADDRESSABLE (restype)
1865 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1866 {
1867 if (DECL_BY_REFERENCE (resdecl))
1868 {
1869 restmp = gimple_fold_indirect_ref (resdecl);
1870 if (!restmp)
1871 restmp = build2 (MEM_REF,
1872 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1873 resdecl,
1874 build_int_cst (TREE_TYPE
1875 (DECL_RESULT (alias)), 0));
1876 }
1877 else if (!is_gimple_reg_type (restype))
1878 {
1879 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1880 {
1881 restmp = resdecl;
1882
1883 if (VAR_P (restmp))
1884 add_local_decl (cfun, restmp);
1885 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1886 }
1887 else
1888 restmp = create_tmp_var (restype, "retval");
1889 }
1890 else
1891 restmp = create_tmp_reg (restype, "retval");
1892 }
1893
1894 for (arg = a; arg; arg = DECL_CHAIN (arg))
1895 nargs++;
1896 auto_vec<tree> vargs (nargs);
1897 i = 0;
1898 arg = a;
1899 if (this_adjusting)
1900 {
1901 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1902 virtual_offset));
1903 arg = DECL_CHAIN (a);
1904 i = 1;
1905 }
1906
1907 if (nargs)
1908 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1909 {
1910 tree tmp = arg;
1911 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1912 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1913 DECL_GIMPLE_REG_P (arg) = 1;
1914
1915 if (!is_gimple_val (arg))
1916 {
1917 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1918 (TREE_TYPE (arg)), "arg");
1919 gimple *stmt = gimple_build_assign (tmp, arg);
1920 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1921 }
1922 vargs.quick_push (tmp);
1923 }
1924 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1925 callees->call_stmt = call;
1926 gimple_call_set_from_thunk (call, true);
1927 gimple_call_set_with_bounds (call, instrumentation_clone);
1928
1929 /* Return slot optimization is always possible and in fact requred to
1930 return values with DECL_BY_REFERENCE. */
1931 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1932 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1933 || DECL_BY_REFERENCE (resdecl)))
1934 gimple_call_set_return_slot_opt (call, true);
1935
1936 if (restmp)
1937 {
1938 gimple_call_set_lhs (call, restmp);
1939 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1940 TREE_TYPE (TREE_TYPE (alias))));
1941 }
1942 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1943 if (!alias_is_noreturn)
1944 {
1945 if (instrumentation_clone
1946 && !DECL_BY_REFERENCE (resdecl)
1947 && restmp
1948 && BOUNDED_P (restmp))
1949 {
1950 resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1951 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1952 as_a <gcall *> (gsi_stmt (bsi)),
1953 callees->count);
1954 }
1955
1956 if (restmp && !this_adjusting
1957 && (fixed_offset || virtual_offset))
1958 {
1959 tree true_label = NULL_TREE;
1960
1961 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1962 {
1963 gimple *stmt;
1964 edge e;
1965 /* If the return type is a pointer, we need to
1966 protect against NULL. We know there will be an
1967 adjustment, because that's why we're emitting a
1968 thunk. */
1969 then_bb = create_basic_block (NULL, bb);
1970 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
1971 return_bb = create_basic_block (NULL, then_bb);
1972 return_bb->count = cfg_count;
1973 else_bb = create_basic_block (NULL, else_bb);
1974 else_bb->count = cfg_count.apply_scale (1, 16);
1975 add_bb_to_loop (then_bb, bb->loop_father);
1976 add_bb_to_loop (return_bb, bb->loop_father);
1977 add_bb_to_loop (else_bb, bb->loop_father);
1978 remove_edge (single_succ_edge (bb));
1979 true_label = gimple_block_label (then_bb);
1980 stmt = gimple_build_cond (NE_EXPR, restmp,
1981 build_zero_cst (TREE_TYPE (restmp)),
1982 NULL_TREE, NULL_TREE);
1983 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1984 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1985 e->probability = profile_probability::guessed_always ()
1986 .apply_scale (1, 16);
1987 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1988 e->probability = profile_probability::guessed_always ()
1989 .apply_scale (1, 16);
1990 make_single_succ_edge (return_bb,
1991 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1992 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
1993 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1994 e->probability = profile_probability::always ();
1995 bsi = gsi_last_bb (then_bb);
1996 }
1997
1998 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1999 fixed_offset, virtual_offset);
2000 if (true_label)
2001 {
2002 gimple *stmt;
2003 bsi = gsi_last_bb (else_bb);
2004 stmt = gimple_build_assign (restmp,
2005 build_zero_cst (TREE_TYPE (restmp)));
2006 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2007 bsi = gsi_last_bb (return_bb);
2008 }
2009 }
2010 else
2011 gimple_call_set_tail (call, true);
2012
2013 /* Build return value. */
2014 if (!DECL_BY_REFERENCE (resdecl))
2015 ret = gimple_build_return (restmp);
2016 else
2017 ret = gimple_build_return (resdecl);
2018 gimple_return_set_retbnd (ret, resbnd);
2019
2020 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2021 }
2022 else
2023 {
2024 gimple_call_set_tail (call, true);
2025 remove_edge (single_succ_edge (bb));
2026 }
2027
2028 cfun->gimple_df->in_ssa_p = true;
2029 update_max_bb_count ();
2030 profile_status_for_fn (cfun)
2031 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2032 ? PROFILE_READ : PROFILE_GUESSED;
2033 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2034 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2035 delete_unreachable_blocks ();
2036 update_ssa (TODO_update_ssa);
2037 checking_verify_flow_info ();
2038 free_dominance_info (CDI_DOMINATORS);
2039
2040 /* Since we want to emit the thunk, we explicitly mark its name as
2041 referenced. */
2042 thunk.thunk_p = false;
2043 lowered = true;
2044 bitmap_obstack_release (NULL);
2045 }
2046 current_function_decl = NULL;
2047 set_cfun (NULL);
2048 return true;
2049 }
2050
2051 /* Assemble thunks and aliases associated to node. */
2052
2053 void
assemble_thunks_and_aliases(void)2054 cgraph_node::assemble_thunks_and_aliases (void)
2055 {
2056 cgraph_edge *e;
2057 ipa_ref *ref;
2058
2059 for (e = callers; e;)
2060 if (e->caller->thunk.thunk_p
2061 && !e->caller->global.inlined_to
2062 && !e->caller->thunk.add_pointer_bounds_args)
2063 {
2064 cgraph_node *thunk = e->caller;
2065
2066 e = e->next_caller;
2067 thunk->expand_thunk (true, false);
2068 thunk->assemble_thunks_and_aliases ();
2069 }
2070 else
2071 e = e->next_caller;
2072
2073 FOR_EACH_ALIAS (this, ref)
2074 {
2075 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2076 if (!alias->transparent_alias)
2077 {
2078 bool saved_written = TREE_ASM_WRITTEN (decl);
2079
2080 /* Force assemble_alias to really output the alias this time instead
2081 of buffering it in same alias pairs. */
2082 TREE_ASM_WRITTEN (decl) = 1;
2083 do_assemble_alias (alias->decl,
2084 DECL_ASSEMBLER_NAME (decl));
2085 alias->assemble_thunks_and_aliases ();
2086 TREE_ASM_WRITTEN (decl) = saved_written;
2087 }
2088 }
2089 }
2090
2091 /* Expand function specified by node. */
2092
2093 void
expand(void)2094 cgraph_node::expand (void)
2095 {
2096 location_t saved_loc;
2097
2098 /* We ought to not compile any inline clones. */
2099 gcc_assert (!global.inlined_to);
2100
2101 /* __RTL functions are compiled as soon as they are parsed, so don't
2102 do it again. */
2103 if (native_rtl_p ())
2104 return;
2105
2106 announce_function (decl);
2107 process = 0;
2108 gcc_assert (lowered);
2109 get_untransformed_body ();
2110
2111 /* Generate RTL for the body of DECL. */
2112
2113 timevar_push (TV_REST_OF_COMPILATION);
2114
2115 gcc_assert (symtab->global_info_ready);
2116
2117 /* Initialize the default bitmap obstack. */
2118 bitmap_obstack_initialize (NULL);
2119
2120 /* Initialize the RTL code for the function. */
2121 saved_loc = input_location;
2122 input_location = DECL_SOURCE_LOCATION (decl);
2123
2124 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2125 push_cfun (DECL_STRUCT_FUNCTION (decl));
2126 init_function_start (decl);
2127
2128 gimple_register_cfg_hooks ();
2129
2130 bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/
2131
2132 execute_all_ipa_transforms ();
2133
2134 /* Perform all tree transforms and optimizations. */
2135
2136 /* Signal the start of passes. */
2137 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2138
2139 execute_pass_list (cfun, g->get_passes ()->all_passes);
2140
2141 /* Signal the end of passes. */
2142 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2143
2144 bitmap_obstack_release (®_obstack);
2145
2146 /* Release the default bitmap obstack. */
2147 bitmap_obstack_release (NULL);
2148
2149 /* If requested, warn about function definitions where the function will
2150 return a value (usually of some struct or union type) which itself will
2151 take up a lot of stack space. */
2152 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2153 {
2154 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2155
2156 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2157 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2158 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2159 larger_than_size) > 0)
2160 {
2161 unsigned int size_as_int
2162 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2163
2164 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2165 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2166 decl, size_as_int);
2167 else
2168 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2169 decl, larger_than_size);
2170 }
2171 }
2172
2173 gimple_set_body (decl, NULL);
2174 if (DECL_STRUCT_FUNCTION (decl) == 0
2175 && !cgraph_node::get (decl)->origin)
2176 {
2177 /* Stop pointing to the local nodes about to be freed.
2178 But DECL_INITIAL must remain nonzero so we know this
2179 was an actual function definition.
2180 For a nested function, this is done in c_pop_function_context.
2181 If rest_of_compilation set this to 0, leave it 0. */
2182 if (DECL_INITIAL (decl) != 0)
2183 DECL_INITIAL (decl) = error_mark_node;
2184 }
2185
2186 input_location = saved_loc;
2187
2188 ggc_collect ();
2189 timevar_pop (TV_REST_OF_COMPILATION);
2190
2191 /* Make sure that BE didn't give up on compiling. */
2192 gcc_assert (TREE_ASM_WRITTEN (decl));
2193 if (cfun)
2194 pop_cfun ();
2195
2196 /* It would make a lot more sense to output thunks before function body to get more
2197 forward and lest backwarding jumps. This however would need solving problem
2198 with comdats. See PR48668. Also aliases must come after function itself to
2199 make one pass assemblers, like one on AIX, happy. See PR 50689.
2200 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2201 groups. */
2202 assemble_thunks_and_aliases ();
2203 release_body ();
2204 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2205 points to the dead function body. */
2206 remove_callees ();
2207 remove_all_references ();
2208 }
2209
2210 /* Node comparer that is responsible for the order that corresponds
2211 to time when a function was launched for the first time. */
2212
2213 static int
node_cmp(const void * pa,const void * pb)2214 node_cmp (const void *pa, const void *pb)
2215 {
2216 const cgraph_node *a = *(const cgraph_node * const *) pa;
2217 const cgraph_node *b = *(const cgraph_node * const *) pb;
2218
2219 /* Functions with time profile must be before these without profile. */
2220 if (!a->tp_first_run || !b->tp_first_run)
2221 return a->tp_first_run - b->tp_first_run;
2222
2223 return a->tp_first_run != b->tp_first_run
2224 ? b->tp_first_run - a->tp_first_run
2225 : b->order - a->order;
2226 }
2227
2228 /* Expand all functions that must be output.
2229
2230 Attempt to topologically sort the nodes so function is output when
2231 all called functions are already assembled to allow data to be
2232 propagated across the callgraph. Use a stack to get smaller distance
2233 between a function and its callees (later we may choose to use a more
2234 sophisticated algorithm for function reordering; we will likely want
2235 to use subsections to make the output functions appear in top-down
2236 order). */
2237
2238 static void
expand_all_functions(void)2239 expand_all_functions (void)
2240 {
2241 cgraph_node *node;
2242 cgraph_node **order = XCNEWVEC (cgraph_node *,
2243 symtab->cgraph_count);
2244 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2245 int order_pos, new_order_pos = 0;
2246 int i;
2247
2248 order_pos = ipa_reverse_postorder (order);
2249 gcc_assert (order_pos == symtab->cgraph_count);
2250
2251 /* Garbage collector may remove inline clones we eliminate during
2252 optimization. So we must be sure to not reference them. */
2253 for (i = 0; i < order_pos; i++)
2254 if (order[i]->process)
2255 order[new_order_pos++] = order[i];
2256
2257 if (flag_profile_reorder_functions)
2258 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2259
2260 for (i = new_order_pos - 1; i >= 0; i--)
2261 {
2262 node = order[i];
2263
2264 if (node->process)
2265 {
2266 expanded_func_count++;
2267 if(node->tp_first_run)
2268 profiled_func_count++;
2269
2270 if (symtab->dump_file)
2271 fprintf (symtab->dump_file,
2272 "Time profile order in expand_all_functions:%s:%d\n",
2273 node->asm_name (), node->tp_first_run);
2274 node->process = 0;
2275 node->expand ();
2276 }
2277 }
2278
2279 if (dump_file)
2280 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2281 main_input_filename, profiled_func_count, expanded_func_count);
2282
2283 if (symtab->dump_file && flag_profile_reorder_functions)
2284 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2285 profiled_func_count, expanded_func_count);
2286
2287 symtab->process_new_functions ();
2288 free_gimplify_stack ();
2289
2290 free (order);
2291 }
2292
2293 /* This is used to sort the node types by the cgraph order number. */
2294
2295 enum cgraph_order_sort_kind
2296 {
2297 ORDER_UNDEFINED = 0,
2298 ORDER_FUNCTION,
2299 ORDER_VAR,
2300 ORDER_VAR_UNDEF,
2301 ORDER_ASM
2302 };
2303
2304 struct cgraph_order_sort
2305 {
2306 enum cgraph_order_sort_kind kind;
2307 union
2308 {
2309 cgraph_node *f;
2310 varpool_node *v;
2311 asm_node *a;
2312 } u;
2313 };
2314
2315 /* Output all functions, variables, and asm statements in the order
2316 according to their order fields, which is the order in which they
2317 appeared in the file. This implements -fno-toplevel-reorder. In
2318 this mode we may output functions and variables which don't really
2319 need to be output. */
2320
2321 static void
output_in_order(void)2322 output_in_order (void)
2323 {
2324 int max;
2325 cgraph_order_sort *nodes;
2326 int i;
2327 cgraph_node *pf;
2328 varpool_node *pv;
2329 asm_node *pa;
2330 max = symtab->order;
2331 nodes = XCNEWVEC (cgraph_order_sort, max);
2332
2333 FOR_EACH_DEFINED_FUNCTION (pf)
2334 {
2335 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2336 {
2337 if (!pf->no_reorder)
2338 continue;
2339 i = pf->order;
2340 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2341 nodes[i].kind = ORDER_FUNCTION;
2342 nodes[i].u.f = pf;
2343 }
2344 }
2345
2346 /* There is a similar loop in symbol_table::output_variables.
2347 Please keep them in sync. */
2348 FOR_EACH_VARIABLE (pv)
2349 {
2350 if (!pv->no_reorder)
2351 continue;
2352 if (DECL_HARD_REGISTER (pv->decl)
2353 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2354 continue;
2355 i = pv->order;
2356 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2357 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2358 nodes[i].u.v = pv;
2359 }
2360
2361 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2362 {
2363 i = pa->order;
2364 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2365 nodes[i].kind = ORDER_ASM;
2366 nodes[i].u.a = pa;
2367 }
2368
2369 /* In toplevel reorder mode we output all statics; mark them as needed. */
2370
2371 for (i = 0; i < max; ++i)
2372 if (nodes[i].kind == ORDER_VAR)
2373 nodes[i].u.v->finalize_named_section_flags ();
2374
2375 for (i = 0; i < max; ++i)
2376 {
2377 switch (nodes[i].kind)
2378 {
2379 case ORDER_FUNCTION:
2380 nodes[i].u.f->process = 0;
2381 nodes[i].u.f->expand ();
2382 break;
2383
2384 case ORDER_VAR:
2385 nodes[i].u.v->assemble_decl ();
2386 break;
2387
2388 case ORDER_VAR_UNDEF:
2389 assemble_undefined_decl (nodes[i].u.v->decl);
2390 break;
2391
2392 case ORDER_ASM:
2393 assemble_asm (nodes[i].u.a->asm_str);
2394 break;
2395
2396 case ORDER_UNDEFINED:
2397 break;
2398
2399 default:
2400 gcc_unreachable ();
2401 }
2402 }
2403
2404 symtab->clear_asm_symbols ();
2405
2406 free (nodes);
2407 }
2408
2409 static void
ipa_passes(void)2410 ipa_passes (void)
2411 {
2412 gcc::pass_manager *passes = g->get_passes ();
2413
2414 set_cfun (NULL);
2415 current_function_decl = NULL;
2416 gimple_register_cfg_hooks ();
2417 bitmap_obstack_initialize (NULL);
2418
2419 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2420
2421 if (!in_lto_p)
2422 {
2423 execute_ipa_pass_list (passes->all_small_ipa_passes);
2424 if (seen_error ())
2425 return;
2426 }
2427
2428 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2429 devirtualization and other changes where removal iterate. */
2430 symtab->remove_unreachable_nodes (symtab->dump_file);
2431
2432 /* If pass_all_early_optimizations was not scheduled, the state of
2433 the cgraph will not be properly updated. Update it now. */
2434 if (symtab->state < IPA_SSA)
2435 symtab->state = IPA_SSA;
2436
2437 if (!in_lto_p)
2438 {
2439 /* Generate coverage variables and constructors. */
2440 coverage_finish ();
2441
2442 /* Process new functions added. */
2443 set_cfun (NULL);
2444 current_function_decl = NULL;
2445 symtab->process_new_functions ();
2446
2447 execute_ipa_summary_passes
2448 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2449 }
2450
2451 /* Some targets need to handle LTO assembler output specially. */
2452 if (flag_generate_lto || flag_generate_offload)
2453 targetm.asm_out.lto_start ();
2454
2455 if (!in_lto_p)
2456 {
2457 if (g->have_offload)
2458 {
2459 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2460 lto_stream_offload_p = true;
2461 ipa_write_summaries ();
2462 lto_stream_offload_p = false;
2463 }
2464 if (flag_lto)
2465 {
2466 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2467 lto_stream_offload_p = false;
2468 ipa_write_summaries ();
2469 }
2470 }
2471
2472 if (flag_generate_lto || flag_generate_offload)
2473 targetm.asm_out.lto_end ();
2474
2475 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2476 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2477 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2478
2479 bitmap_obstack_release (NULL);
2480 }
2481
2482
2483 /* Return string alias is alias of. */
2484
2485 static tree
get_alias_symbol(tree decl)2486 get_alias_symbol (tree decl)
2487 {
2488 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2489 return get_identifier (TREE_STRING_POINTER
2490 (TREE_VALUE (TREE_VALUE (alias))));
2491 }
2492
2493
2494 /* Weakrefs may be associated to external decls and thus not output
2495 at expansion time. Emit all necessary aliases. */
2496
2497 void
output_weakrefs(void)2498 symbol_table::output_weakrefs (void)
2499 {
2500 symtab_node *node;
2501 cgraph_node *cnode;
2502 FOR_EACH_SYMBOL (node)
2503 if (node->alias
2504 && !TREE_ASM_WRITTEN (node->decl)
2505 && (!(cnode = dyn_cast <cgraph_node *> (node))
2506 || !cnode->instrumented_version
2507 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2508 && node->weakref)
2509 {
2510 tree target;
2511
2512 /* Weakrefs are special by not requiring target definition in current
2513 compilation unit. It is thus bit hard to work out what we want to
2514 alias.
2515 When alias target is defined, we need to fetch it from symtab reference,
2516 otherwise it is pointed to by alias_target. */
2517 if (node->alias_target)
2518 target = (DECL_P (node->alias_target)
2519 ? DECL_ASSEMBLER_NAME (node->alias_target)
2520 : node->alias_target);
2521 else if (node->analyzed)
2522 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2523 else
2524 {
2525 gcc_unreachable ();
2526 target = get_alias_symbol (node->decl);
2527 }
2528 do_assemble_alias (node->decl, target);
2529 }
2530 }
2531
2532 /* Perform simple optimizations based on callgraph. */
2533
2534 void
compile(void)2535 symbol_table::compile (void)
2536 {
2537 if (seen_error ())
2538 return;
2539
2540 symtab_node::checking_verify_symtab_nodes ();
2541
2542 timevar_push (TV_CGRAPHOPT);
2543 if (pre_ipa_mem_report)
2544 {
2545 fprintf (stderr, "Memory consumption before IPA\n");
2546 dump_memory_report (false);
2547 }
2548 if (!quiet_flag)
2549 fprintf (stderr, "Performing interprocedural optimizations\n");
2550 state = IPA;
2551
2552 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2553 if (flag_generate_lto || flag_generate_offload)
2554 lto_streamer_hooks_init ();
2555
2556 /* Don't run the IPA passes if there was any error or sorry messages. */
2557 if (!seen_error ())
2558 ipa_passes ();
2559
2560 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2561 if (seen_error ()
2562 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2563 {
2564 timevar_pop (TV_CGRAPHOPT);
2565 return;
2566 }
2567
2568 global_info_ready = true;
2569 if (dump_file)
2570 {
2571 fprintf (dump_file, "Optimized ");
2572 symtab->dump (dump_file);
2573 }
2574 if (post_ipa_mem_report)
2575 {
2576 fprintf (stderr, "Memory consumption after IPA\n");
2577 dump_memory_report (false);
2578 }
2579 timevar_pop (TV_CGRAPHOPT);
2580
2581 /* Output everything. */
2582 switch_to_section (text_section);
2583 (*debug_hooks->assembly_start) ();
2584 if (!quiet_flag)
2585 fprintf (stderr, "Assembling functions:\n");
2586 symtab_node::checking_verify_symtab_nodes ();
2587
2588 bitmap_obstack_initialize (NULL);
2589 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2590 bitmap_obstack_release (NULL);
2591 mark_functions_to_output ();
2592
2593 /* When weakref support is missing, we automatically translate all
2594 references to NODE to references to its ultimate alias target.
2595 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2596 TREE_CHAIN.
2597
2598 Set up this mapping before we output any assembler but once we are sure
2599 that all symbol renaming is done.
2600
2601 FIXME: All this uglyness can go away if we just do renaming at gimple
2602 level by physically rewritting the IL. At the moment we can only redirect
2603 calls, so we need infrastructure for renaming references as well. */
2604 #ifndef ASM_OUTPUT_WEAKREF
2605 symtab_node *node;
2606
2607 FOR_EACH_SYMBOL (node)
2608 if (node->alias
2609 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2610 {
2611 IDENTIFIER_TRANSPARENT_ALIAS
2612 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2613 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2614 = (node->alias_target ? node->alias_target
2615 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2616 }
2617 #endif
2618
2619 state = EXPANSION;
2620
2621 /* Output first asm statements and anything ordered. The process
2622 flag is cleared for these nodes, so we skip them later. */
2623 output_in_order ();
2624 expand_all_functions ();
2625 output_variables ();
2626
2627 process_new_functions ();
2628 state = FINISHED;
2629 output_weakrefs ();
2630
2631 if (dump_file)
2632 {
2633 fprintf (dump_file, "\nFinal ");
2634 symtab->dump (dump_file);
2635 }
2636 if (!flag_checking)
2637 return;
2638 symtab_node::verify_symtab_nodes ();
2639 /* Double check that all inline clones are gone and that all
2640 function bodies have been released from memory. */
2641 if (!seen_error ())
2642 {
2643 cgraph_node *node;
2644 bool error_found = false;
2645
2646 FOR_EACH_DEFINED_FUNCTION (node)
2647 if (node->global.inlined_to
2648 || gimple_has_body_p (node->decl))
2649 {
2650 error_found = true;
2651 node->debug ();
2652 }
2653 if (error_found)
2654 internal_error ("nodes with unreleased memory found");
2655 }
2656 }
2657
2658
2659 /* Analyze the whole compilation unit once it is parsed completely. */
2660
2661 void
finalize_compilation_unit(void)2662 symbol_table::finalize_compilation_unit (void)
2663 {
2664 timevar_push (TV_CGRAPH);
2665
2666 /* If we're here there's no current function anymore. Some frontends
2667 are lazy in clearing these. */
2668 current_function_decl = NULL;
2669 set_cfun (NULL);
2670
2671 /* Do not skip analyzing the functions if there were errors, we
2672 miss diagnostics for following functions otherwise. */
2673
2674 /* Emit size functions we didn't inline. */
2675 finalize_size_functions ();
2676
2677 /* Mark alias targets necessary and emit diagnostics. */
2678 handle_alias_pairs ();
2679
2680 if (!quiet_flag)
2681 {
2682 fprintf (stderr, "\nAnalyzing compilation unit\n");
2683 fflush (stderr);
2684 }
2685
2686 if (flag_dump_passes)
2687 dump_passes ();
2688
2689 /* Gimplify and lower all functions, compute reachability and
2690 remove unreachable nodes. */
2691 analyze_functions (/*first_time=*/true);
2692
2693 /* Mark alias targets necessary and emit diagnostics. */
2694 handle_alias_pairs ();
2695
2696 /* Gimplify and lower thunks. */
2697 analyze_functions (/*first_time=*/false);
2698
2699 /* Offloading requires LTO infrastructure. */
2700 if (!in_lto_p && g->have_offload)
2701 flag_generate_offload = 1;
2702
2703 if (!seen_error ())
2704 {
2705 /* Emit early debug for reachable functions, and by consequence,
2706 locally scoped symbols. */
2707 struct cgraph_node *cnode;
2708 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2709 (*debug_hooks->early_global_decl) (cnode->decl);
2710
2711 /* Clean up anything that needs cleaning up after initial debug
2712 generation. */
2713 (*debug_hooks->early_finish) (main_input_filename);
2714 }
2715
2716 /* Finally drive the pass manager. */
2717 compile ();
2718
2719 timevar_pop (TV_CGRAPH);
2720 }
2721
2722 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2723 within the same process. For use by toplev::finalize. */
2724
2725 void
cgraphunit_c_finalize(void)2726 cgraphunit_c_finalize (void)
2727 {
2728 gcc_assert (cgraph_new_nodes.length () == 0);
2729 cgraph_new_nodes.truncate (0);
2730
2731 vtable_entry_type = NULL;
2732 queued_nodes = &symtab_terminator;
2733
2734 first_analyzed = NULL;
2735 first_analyzed_var = NULL;
2736 }
2737
2738 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2739 kind of wrapper method. */
2740
2741 void
create_wrapper(cgraph_node * target)2742 cgraph_node::create_wrapper (cgraph_node *target)
2743 {
2744 /* Preserve DECL_RESULT so we get right by reference flag. */
2745 tree decl_result = DECL_RESULT (decl);
2746
2747 /* Remove the function's body but keep arguments to be reused
2748 for thunk. */
2749 release_body (true);
2750 reset ();
2751
2752 DECL_UNINLINABLE (decl) = false;
2753 DECL_RESULT (decl) = decl_result;
2754 DECL_INITIAL (decl) = NULL;
2755 allocate_struct_function (decl, false);
2756 set_cfun (NULL);
2757
2758 /* Turn alias into thunk and expand it into GIMPLE representation. */
2759 definition = true;
2760
2761 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2762 thunk.thunk_p = true;
2763 create_edge (target, NULL, count);
2764 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2765
2766 tree arguments = DECL_ARGUMENTS (decl);
2767
2768 while (arguments)
2769 {
2770 TREE_ADDRESSABLE (arguments) = false;
2771 arguments = TREE_CHAIN (arguments);
2772 }
2773
2774 expand_thunk (false, true);
2775
2776 /* Inline summary set-up. */
2777 analyze ();
2778 inline_analyze_function (this);
2779 }
2780
2781 #include "gt-cgraphunit.h"
2782