1 /* Driver of optimization process
2    Copyright (C) 2003-2016 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimiations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for example, transational memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO sreaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functoins (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h"     /* FIXME: For reg_obstack.  */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "ipa-prop.h"
194 #include "gimple-pretty-print.h"
195 #include "plugin.h"
196 #include "ipa-inline.h"
197 #include "ipa-utils.h"
198 #include "except.h"
199 #include "cfgloop.h"
200 #include "context.h"
201 #include "pass_manager.h"
202 #include "tree-nested.h"
203 #include "dbgcnt.h"
204 #include "tree-chkp.h"
205 #include "lto-section-names.h"
206 
207 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
208    secondary queue used during optimization to accommodate passes that
209    may generate new functions that need to be optimized and expanded.  */
210 vec<cgraph_node *> cgraph_new_nodes;
211 
212 static void expand_all_functions (void);
213 static void mark_functions_to_output (void);
214 static void handle_alias_pairs (void);
215 
216 /* Used for vtable lookup in thunk adjusting.  */
217 static GTY (()) tree vtable_entry_type;
218 
219 /* Determine if symbol declaration is needed.  That is, visible to something
220    either outside this translation unit, something magic in the system
221    configury */
222 bool
needed_p(void)223 symtab_node::needed_p (void)
224 {
225   /* Double check that no one output the function into assembly file
226      early.  */
227   gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
228 	               || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
229 
230   if (!definition)
231     return false;
232 
233   if (DECL_EXTERNAL (decl))
234     return false;
235 
236   /* If the user told us it is used, then it must be so.  */
237   if (force_output)
238     return true;
239 
240   /* ABI forced symbols are needed when they are external.  */
241   if (forced_by_abi && TREE_PUBLIC (decl))
242     return true;
243 
244   /* Keep constructors, destructors and virtual functions.  */
245    if (TREE_CODE (decl) == FUNCTION_DECL
246        && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
247     return true;
248 
249   /* Externally visible variables must be output.  The exception is
250      COMDAT variables that must be output only when they are needed.  */
251   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
252     return true;
253 
254   return false;
255 }
256 
257 /* Head and terminator of the queue of nodes to be processed while building
258    callgraph.  */
259 
260 static symtab_node symtab_terminator;
261 static symtab_node *queued_nodes = &symtab_terminator;
262 
263 /* Add NODE to queue starting at QUEUED_NODES.
264    The queue is linked via AUX pointers and terminated by pointer to 1.  */
265 
266 static void
enqueue_node(symtab_node * node)267 enqueue_node (symtab_node *node)
268 {
269   if (node->aux)
270     return;
271   gcc_checking_assert (queued_nodes);
272   node->aux = queued_nodes;
273   queued_nodes = node;
274 }
275 
276 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
277    functions into callgraph in a way so they look like ordinary reachable
278    functions inserted into callgraph already at construction time.  */
279 
280 void
process_new_functions(void)281 symbol_table::process_new_functions (void)
282 {
283   tree fndecl;
284 
285   if (!cgraph_new_nodes.exists ())
286     return;
287 
288   handle_alias_pairs ();
289   /*  Note that this queue may grow as its being processed, as the new
290       functions may generate new ones.  */
291   for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
292     {
293       cgraph_node *node = cgraph_new_nodes[i];
294       fndecl = node->decl;
295       switch (state)
296 	{
297 	case CONSTRUCTION:
298 	  /* At construction time we just need to finalize function and move
299 	     it into reachable functions list.  */
300 
301 	  cgraph_node::finalize_function (fndecl, false);
302 	  call_cgraph_insertion_hooks (node);
303 	  enqueue_node (node);
304 	  break;
305 
306 	case IPA:
307 	case IPA_SSA:
308 	case IPA_SSA_AFTER_INLINING:
309 	  /* When IPA optimization already started, do all essential
310 	     transformations that has been already performed on the whole
311 	     cgraph but not on this function.  */
312 
313 	  gimple_register_cfg_hooks ();
314 	  if (!node->analyzed)
315 	    node->analyze ();
316 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
317 	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
318 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 	    g->get_passes ()->execute_early_local_passes ();
320 	  else if (inline_summaries != NULL)
321 	    compute_inline_parameters (node, true);
322 	  free_dominance_info (CDI_POST_DOMINATORS);
323 	  free_dominance_info (CDI_DOMINATORS);
324 	  pop_cfun ();
325 	  call_cgraph_insertion_hooks (node);
326 	  break;
327 
328 	case EXPANSION:
329 	  /* Functions created during expansion shall be compiled
330 	     directly.  */
331 	  node->process = 0;
332 	  call_cgraph_insertion_hooks (node);
333 	  node->expand ();
334 	  break;
335 
336 	default:
337 	  gcc_unreachable ();
338 	  break;
339 	}
340     }
341 
342   cgraph_new_nodes.release ();
343 }
344 
345 /* As an GCC extension we allow redefinition of the function.  The
346    semantics when both copies of bodies differ is not well defined.
347    We replace the old body with new body so in unit at a time mode
348    we always use new body, while in normal mode we may end up with
349    old body inlined into some functions and new body expanded and
350    inlined in others.
351 
352    ??? It may make more sense to use one body for inlining and other
353    body for expanding the function but this is difficult to do.  */
354 
355 void
reset(void)356 cgraph_node::reset (void)
357 {
358   /* If process is set, then we have already begun whole-unit analysis.
359      This is *not* testing for whether we've already emitted the function.
360      That case can be sort-of legitimately seen with real function redefinition
361      errors.  I would argue that the front end should never present us with
362      such a case, but don't enforce that for now.  */
363   gcc_assert (!process);
364 
365   /* Reset our data structures so we can analyze the function again.  */
366   memset (&local, 0, sizeof (local));
367   memset (&global, 0, sizeof (global));
368   memset (&rtl, 0, sizeof (rtl));
369   analyzed = false;
370   definition = false;
371   alias = false;
372   transparent_alias = false;
373   weakref = false;
374   cpp_implicit_alias = false;
375 
376   remove_callees ();
377   remove_all_references ();
378 }
379 
380 /* Return true when there are references to the node.  INCLUDE_SELF is
381    true if a self reference counts as a reference.  */
382 
383 bool
referred_to_p(bool include_self)384 symtab_node::referred_to_p (bool include_self)
385 {
386   ipa_ref *ref = NULL;
387 
388   /* See if there are any references at all.  */
389   if (iterate_referring (0, ref))
390     return true;
391   /* For functions check also calls.  */
392   cgraph_node *cn = dyn_cast <cgraph_node *> (this);
393   if (cn && cn->callers)
394     {
395       if (include_self)
396 	return true;
397       for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
398 	if (e->caller != this)
399 	  return true;
400     }
401   return false;
402 }
403 
404 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
405    logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
406    the garbage collector run at the moment.  We would need to either create
407    a new GC context, or just not compile right now.  */
408 
409 void
finalize_function(tree decl,bool no_collect)410 cgraph_node::finalize_function (tree decl, bool no_collect)
411 {
412   cgraph_node *node = cgraph_node::get_create (decl);
413 
414   if (node->definition)
415     {
416       /* Nested functions should only be defined once.  */
417       gcc_assert (!DECL_CONTEXT (decl)
418 		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
419       node->reset ();
420       node->local.redefined_extern_inline = true;
421     }
422 
423   /* Set definition first before calling notice_global_symbol so that
424      it is available to notice_global_symbol.  */
425   node->definition = true;
426   notice_global_symbol (decl);
427   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
428 
429   /* With -fkeep-inline-functions we are keeping all inline functions except
430      for extern inline ones.  */
431   if (flag_keep_inline_functions
432       && DECL_DECLARED_INLINE_P (decl)
433       && !DECL_EXTERNAL (decl)
434       && !DECL_DISREGARD_INLINE_LIMITS (decl))
435     node->force_output = 1;
436 
437   /* When not optimizing, also output the static functions. (see
438      PR24561), but don't do so for always_inline functions, functions
439      declared inline and nested functions.  These were optimized out
440      in the original implementation and it is unclear whether we want
441      to change the behavior here.  */
442   if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions)
443        && !node->cpp_implicit_alias
444        && !DECL_DISREGARD_INLINE_LIMITS (decl)
445        && !DECL_DECLARED_INLINE_P (decl)
446        && !(DECL_CONTEXT (decl)
447 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
448       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
449     node->force_output = 1;
450 
451   /* If we've not yet emitted decl, tell the debug info about it.  */
452   if (!TREE_ASM_WRITTEN (decl))
453     (*debug_hooks->deferred_inline_function) (decl);
454 
455   if (!no_collect)
456     ggc_collect ();
457 
458   if (symtab->state == CONSTRUCTION
459       && (node->needed_p () || node->referred_to_p ()))
460     enqueue_node (node);
461 }
462 
463 /* Add the function FNDECL to the call graph.
464    Unlike finalize_function, this function is intended to be used
465    by middle end and allows insertion of new function at arbitrary point
466    of compilation.  The function can be either in high, low or SSA form
467    GIMPLE.
468 
469    The function is assumed to be reachable and have address taken (so no
470    API breaking optimizations are performed on it).
471 
472    Main work done by this function is to enqueue the function for later
473    processing to avoid need the passes to be re-entrant.  */
474 
475 void
add_new_function(tree fndecl,bool lowered)476 cgraph_node::add_new_function (tree fndecl, bool lowered)
477 {
478   gcc::pass_manager *passes = g->get_passes ();
479   cgraph_node *node;
480 
481   if (dump_file)
482     {
483       struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
484       const char *function_type = ((gimple_has_body_p (fndecl))
485 				   ? (lowered
486 				      ? (gimple_in_ssa_p (fn)
487 					 ? "ssa gimple"
488 					 : "low gimple")
489 				      : "high gimple")
490 				   : "to-be-gimplified");
491       fprintf (dump_file,
492 	       "Added new %s function %s to callgraph\n",
493 	       function_type,
494 	       fndecl_name (fndecl));
495     }
496 
497   switch (symtab->state)
498     {
499       case PARSING:
500 	cgraph_node::finalize_function (fndecl, false);
501 	break;
502       case CONSTRUCTION:
503 	/* Just enqueue function to be processed at nearest occurrence.  */
504 	node = cgraph_node::get_create (fndecl);
505 	if (lowered)
506 	  node->lowered = true;
507 	cgraph_new_nodes.safe_push (node);
508         break;
509 
510       case IPA:
511       case IPA_SSA:
512       case IPA_SSA_AFTER_INLINING:
513       case EXPANSION:
514 	/* Bring the function into finalized state and enqueue for later
515 	   analyzing and compilation.  */
516 	node = cgraph_node::get_create (fndecl);
517 	node->local.local = false;
518 	node->definition = true;
519 	node->force_output = true;
520 	if (!lowered && symtab->state == EXPANSION)
521 	  {
522 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
523 	    gimple_register_cfg_hooks ();
524 	    bitmap_obstack_initialize (NULL);
525 	    execute_pass_list (cfun, passes->all_lowering_passes);
526 	    passes->execute_early_local_passes ();
527 	    bitmap_obstack_release (NULL);
528 	    pop_cfun ();
529 
530 	    lowered = true;
531 	  }
532 	if (lowered)
533 	  node->lowered = true;
534 	cgraph_new_nodes.safe_push (node);
535         break;
536 
537       case FINISHED:
538 	/* At the very end of compilation we have to do all the work up
539 	   to expansion.  */
540 	node = cgraph_node::create (fndecl);
541 	if (lowered)
542 	  node->lowered = true;
543 	node->definition = true;
544 	node->analyze ();
545 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
546 	gimple_register_cfg_hooks ();
547 	bitmap_obstack_initialize (NULL);
548 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
549 	  g->get_passes ()->execute_early_local_passes ();
550 	bitmap_obstack_release (NULL);
551 	pop_cfun ();
552 	node->expand ();
553 	break;
554 
555       default:
556 	gcc_unreachable ();
557     }
558 
559   /* Set a personality if required and we already passed EH lowering.  */
560   if (lowered
561       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
562 	  == eh_personality_lang))
563     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
564 }
565 
566 /* Analyze the function scheduled to be output.  */
567 void
analyze(void)568 cgraph_node::analyze (void)
569 {
570   tree decl = this->decl;
571   location_t saved_loc = input_location;
572   input_location = DECL_SOURCE_LOCATION (decl);
573 
574   if (thunk.thunk_p)
575     {
576       cgraph_node *t = cgraph_node::get (thunk.alias);
577 
578       create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
579       callees->can_throw_external = !TREE_NOTHROW (t->decl);
580       /* Target code in expand_thunk may need the thunk's target
581 	 to be analyzed, so recurse here.  */
582       if (!t->analyzed)
583 	t->analyze ();
584       if (t->alias)
585 	{
586 	  t = t->get_alias_target ();
587 	  if (!t->analyzed)
588 	    t->analyze ();
589 	}
590       if (!expand_thunk (false, false))
591 	{
592 	  thunk.alias = NULL;
593 	  return;
594 	}
595       thunk.alias = NULL;
596     }
597   if (alias)
598     resolve_alias (cgraph_node::get (alias_target), transparent_alias);
599   else if (dispatcher_function)
600     {
601       /* Generate the dispatcher body of multi-versioned functions.  */
602       cgraph_function_version_info *dispatcher_version_info
603 	= function_version ();
604       if (dispatcher_version_info != NULL
605           && (dispatcher_version_info->dispatcher_resolver
606 	      == NULL_TREE))
607 	{
608 	  tree resolver = NULL_TREE;
609 	  gcc_assert (targetm.generate_version_dispatcher_body);
610 	  resolver = targetm.generate_version_dispatcher_body (this);
611 	  gcc_assert (resolver != NULL_TREE);
612 	}
613     }
614   else
615     {
616       push_cfun (DECL_STRUCT_FUNCTION (decl));
617 
618       assign_assembler_name_if_neeeded (decl);
619 
620       /* Make sure to gimplify bodies only once.  During analyzing a
621 	 function we lower it, which will require gimplified nested
622 	 functions, so we can end up here with an already gimplified
623 	 body.  */
624       if (!gimple_has_body_p (decl))
625 	gimplify_function_tree (decl);
626 
627       /* Lower the function.  */
628       if (!lowered)
629 	{
630 	  if (nested)
631 	    lower_nested_functions (decl);
632 	  gcc_assert (!nested);
633 
634 	  gimple_register_cfg_hooks ();
635 	  bitmap_obstack_initialize (NULL);
636 	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
637 	  free_dominance_info (CDI_POST_DOMINATORS);
638 	  free_dominance_info (CDI_DOMINATORS);
639 	  compact_blocks ();
640 	  bitmap_obstack_release (NULL);
641 	  lowered = true;
642 	}
643 
644       pop_cfun ();
645     }
646   analyzed = true;
647 
648   input_location = saved_loc;
649 }
650 
651 /* C++ frontend produce same body aliases all over the place, even before PCH
652    gets streamed out. It relies on us linking the aliases with their function
653    in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
654    first produce aliases without links, but once C++ FE is sure he won't sream
655    PCH we build the links via this function.  */
656 
657 void
process_same_body_aliases(void)658 symbol_table::process_same_body_aliases (void)
659 {
660   symtab_node *node;
661   FOR_EACH_SYMBOL (node)
662     if (node->cpp_implicit_alias && !node->analyzed)
663       node->resolve_alias
664 	(TREE_CODE (node->alias_target) == VAR_DECL
665 	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
666 	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
667   cpp_implicit_aliases_done = true;
668 }
669 
670 /* Process attributes common for vars and functions.  */
671 
672 static void
process_common_attributes(symtab_node * node,tree decl)673 process_common_attributes (symtab_node *node, tree decl)
674 {
675   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
676 
677   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
678     {
679       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
680 		  "%<weakref%> attribute should be accompanied with"
681 		  " an %<alias%> attribute");
682       DECL_WEAK (decl) = 0;
683       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
684 						 DECL_ATTRIBUTES (decl));
685     }
686 
687   if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
688     node->no_reorder = 1;
689 }
690 
691 /* Look for externally_visible and used attributes and mark cgraph nodes
692    accordingly.
693 
694    We cannot mark the nodes at the point the attributes are processed (in
695    handle_*_attribute) because the copy of the declarations available at that
696    point may not be canonical.  For example, in:
697 
698     void f();
699     void f() __attribute__((used));
700 
701    the declaration we see in handle_used_attribute will be the second
702    declaration -- but the front end will subsequently merge that declaration
703    with the original declaration and discard the second declaration.
704 
705    Furthermore, we can't mark these nodes in finalize_function because:
706 
707     void f() {}
708     void f() __attribute__((externally_visible));
709 
710    is valid.
711 
712    So, we walk the nodes at the end of the translation unit, applying the
713    attributes at that point.  */
714 
715 static void
process_function_and_variable_attributes(cgraph_node * first,varpool_node * first_var)716 process_function_and_variable_attributes (cgraph_node *first,
717                                           varpool_node *first_var)
718 {
719   cgraph_node *node;
720   varpool_node *vnode;
721 
722   for (node = symtab->first_function (); node != first;
723        node = symtab->next_function (node))
724     {
725       tree decl = node->decl;
726       if (DECL_PRESERVE_P (decl))
727 	node->mark_force_output ();
728       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
729 	{
730 	  if (! TREE_PUBLIC (node->decl))
731 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
732 			"%<externally_visible%>"
733 			" attribute have effect only on public objects");
734 	}
735       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
736 	  && (node->definition && !node->alias))
737 	{
738 	  warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
739 		      "%<weakref%> attribute ignored"
740 		      " because function is defined");
741 	  DECL_WEAK (decl) = 0;
742 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
743 						     DECL_ATTRIBUTES (decl));
744 	}
745 
746       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
747 	  && !DECL_DECLARED_INLINE_P (decl)
748 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
749 	  && !DECL_UNINLINABLE (decl))
750 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
751 		    "always_inline function might not be inlinable");
752 
753       process_common_attributes (node, decl);
754     }
755   for (vnode = symtab->first_variable (); vnode != first_var;
756        vnode = symtab->next_variable (vnode))
757     {
758       tree decl = vnode->decl;
759       if (DECL_EXTERNAL (decl)
760 	  && DECL_INITIAL (decl))
761 	varpool_node::finalize_decl (decl);
762       if (DECL_PRESERVE_P (decl))
763 	vnode->force_output = true;
764       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
765 	{
766 	  if (! TREE_PUBLIC (vnode->decl))
767 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
768 			"%<externally_visible%>"
769 			" attribute have effect only on public objects");
770 	}
771       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
772 	  && vnode->definition
773 	  && DECL_INITIAL (decl))
774 	{
775 	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
776 		      "%<weakref%> attribute ignored"
777 		      " because variable is initialized");
778 	  DECL_WEAK (decl) = 0;
779 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
780 						      DECL_ATTRIBUTES (decl));
781 	}
782       process_common_attributes (vnode, decl);
783     }
784 }
785 
786 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
787    middle end to output the variable to asm file, if needed or externally
788    visible.  */
789 
790 void
finalize_decl(tree decl)791 varpool_node::finalize_decl (tree decl)
792 {
793   varpool_node *node = varpool_node::get_create (decl);
794 
795   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
796 
797   if (node->definition)
798     return;
799   /* Set definition first before calling notice_global_symbol so that
800      it is available to notice_global_symbol.  */
801   node->definition = true;
802   notice_global_symbol (decl);
803   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
804       /* Traditionally we do not eliminate static variables when not
805 	 optimizing and when not doing toplevel reoder.  */
806       || node->no_reorder
807       || ((!flag_toplevel_reorder
808           && !DECL_COMDAT (node->decl)
809 	   && !DECL_ARTIFICIAL (node->decl))))
810     node->force_output = true;
811 
812   if (symtab->state == CONSTRUCTION
813       && (node->needed_p () || node->referred_to_p ()))
814     enqueue_node (node);
815   if (symtab->state >= IPA_SSA)
816     node->analyze ();
817   /* Some frontends produce various interface variables after compilation
818      finished.  */
819   if (symtab->state == FINISHED
820       || (!flag_toplevel_reorder
821 	&& symtab->state == EXPANSION))
822     node->assemble_decl ();
823 
824   if (DECL_INITIAL (decl))
825     chkp_register_var_initializer (decl);
826 }
827 
828 /* EDGE is an polymorphic call.  Mark all possible targets as reachable
829    and if there is only one target, perform trivial devirtualization.
830    REACHABLE_CALL_TARGETS collects target lists we already walked to
831    avoid udplicate work.  */
832 
833 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,cgraph_edge * edge)834 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
835 			       cgraph_edge *edge)
836 {
837   unsigned int i;
838   void *cache_token;
839   bool final;
840   vec <cgraph_node *>targets
841     = possible_polymorphic_call_targets
842 	(edge, &final, &cache_token);
843 
844   if (!reachable_call_targets->add (cache_token))
845     {
846       if (symtab->dump_file)
847 	dump_possible_polymorphic_call_targets
848 	  (symtab->dump_file, edge);
849 
850       for (i = 0; i < targets.length (); i++)
851 	{
852 	  /* Do not bother to mark virtual methods in anonymous namespace;
853 	     either we will find use of virtual table defining it, or it is
854 	     unused.  */
855 	  if (targets[i]->definition
856 	      && TREE_CODE
857 		  (TREE_TYPE (targets[i]->decl))
858 		   == METHOD_TYPE
859 	      && !type_in_anonymous_namespace_p
860 		   (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
861 	    enqueue_node (targets[i]);
862 	}
863     }
864 
865   /* Very trivial devirtualization; when the type is
866      final or anonymous (so we know all its derivation)
867      and there is only one possible virtual call target,
868      make the edge direct.  */
869   if (final)
870     {
871       if (targets.length () <= 1 && dbg_cnt (devirt))
872 	{
873 	  cgraph_node *target;
874 	  if (targets.length () == 1)
875 	    target = targets[0];
876 	  else
877 	    target = cgraph_node::create
878 			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
879 
880 	  if (symtab->dump_file)
881 	    {
882 	      fprintf (symtab->dump_file,
883 		       "Devirtualizing call: ");
884 	      print_gimple_stmt (symtab->dump_file,
885 				 edge->call_stmt, 0,
886 				 TDF_SLIM);
887 	    }
888           if (dump_enabled_p ())
889             {
890 	      location_t locus = gimple_location_safe (edge->call_stmt);
891 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
892 			       "devirtualizing call in %s to %s\n",
893 			       edge->caller->name (), target->name ());
894 	    }
895 
896 	  edge->make_direct (target);
897 	  edge->redirect_call_stmt_to_callee ();
898 
899 	  /* Call to __builtin_unreachable shouldn't be instrumented.  */
900 	  if (!targets.length ())
901 	    gimple_call_set_with_bounds (edge->call_stmt, false);
902 
903 	  if (symtab->dump_file)
904 	    {
905 	      fprintf (symtab->dump_file,
906 		       "Devirtualized as: ");
907 	      print_gimple_stmt (symtab->dump_file,
908 				 edge->call_stmt, 0,
909 				 TDF_SLIM);
910 	    }
911 	}
912     }
913 }
914 
915 /* Issue appropriate warnings for the global declaration DECL.  */
916 
917 static void
check_global_declaration(symtab_node * snode)918 check_global_declaration (symtab_node *snode)
919 {
920   const char *decl_file;
921   tree decl = snode->decl;
922 
923   /* Warn about any function declared static but not defined.  We don't
924      warn about variables, because many programs have static variables
925      that exist only to get some text into the object file.  */
926   if (TREE_CODE (decl) == FUNCTION_DECL
927       && DECL_INITIAL (decl) == 0
928       && DECL_EXTERNAL (decl)
929       && ! DECL_ARTIFICIAL (decl)
930       && ! TREE_NO_WARNING (decl)
931       && ! TREE_PUBLIC (decl)
932       && (warn_unused_function
933 	  || snode->referred_to_p (/*include_self=*/false)))
934     {
935       if (snode->referred_to_p (/*include_self=*/false))
936 	pedwarn (input_location, 0, "%q+F used but never defined", decl);
937       else
938 	warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
939       /* This symbol is effectively an "extern" declaration now.  */
940       TREE_PUBLIC (decl) = 1;
941     }
942 
943   /* Warn about static fns or vars defined but not used.  */
944   if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
945        || (((warn_unused_variable && ! TREE_READONLY (decl))
946 	    || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
947 		&& (warn_unused_const_variable == 2
948 		    || (main_input_filename != NULL
949 			&& (decl_file = DECL_SOURCE_FILE (decl)) != NULL
950 			&& filename_cmp (main_input_filename,
951 					 decl_file) == 0))))
952 	   && TREE_CODE (decl) == VAR_DECL))
953       && ! DECL_IN_SYSTEM_HEADER (decl)
954       && ! snode->referred_to_p (/*include_self=*/false)
955       /* This TREE_USED check is needed in addition to referred_to_p
956 	 above, because the `__unused__' attribute is not being
957 	 considered for referred_to_p.  */
958       && ! TREE_USED (decl)
959       /* The TREE_USED bit for file-scope decls is kept in the identifier,
960 	 to handle multiple external decls in different scopes.  */
961       && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
962       && ! DECL_EXTERNAL (decl)
963       && ! DECL_ARTIFICIAL (decl)
964       && ! DECL_ABSTRACT_ORIGIN (decl)
965       && ! TREE_PUBLIC (decl)
966       /* A volatile variable might be used in some non-obvious way.  */
967       && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
968       /* Global register variables must be declared to reserve them.  */
969       && ! (TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
970       /* Global ctors and dtors are called by the runtime.  */
971       && (TREE_CODE (decl) != FUNCTION_DECL
972 	  || (!DECL_STATIC_CONSTRUCTOR (decl)
973 	      && !DECL_STATIC_DESTRUCTOR (decl)))
974       /* Otherwise, ask the language.  */
975       && lang_hooks.decls.warn_unused_global (decl))
976     warning_at (DECL_SOURCE_LOCATION (decl),
977 		(TREE_CODE (decl) == FUNCTION_DECL)
978 		? OPT_Wunused_function
979 		: (TREE_READONLY (decl)
980 		   ? OPT_Wunused_const_variable_
981 		   : OPT_Wunused_variable),
982 		"%qD defined but not used", decl);
983 }
984 
985 /* Discover all functions and variables that are trivially needed, analyze
986    them as well as all functions and variables referred by them  */
987 static cgraph_node *first_analyzed;
988 static varpool_node *first_analyzed_var;
989 
990 /* FIRST_TIME is set to TRUE for the first time we are called for a
991    translation unit from finalize_compilation_unit() or false
992    otherwise.  */
993 
994 static void
analyze_functions(bool first_time)995 analyze_functions (bool first_time)
996 {
997   /* Keep track of already processed nodes when called multiple times for
998      intermodule optimization.  */
999   cgraph_node *first_handled = first_analyzed;
1000   varpool_node *first_handled_var = first_analyzed_var;
1001   hash_set<void *> reachable_call_targets;
1002 
1003   symtab_node *node;
1004   symtab_node *next;
1005   int i;
1006   ipa_ref *ref;
1007   bool changed = true;
1008   location_t saved_loc = input_location;
1009 
1010   bitmap_obstack_initialize (NULL);
1011   symtab->state = CONSTRUCTION;
1012   input_location = UNKNOWN_LOCATION;
1013 
1014   /* Ugly, but the fixup can not happen at a time same body alias is created;
1015      C++ FE is confused about the COMDAT groups being right.  */
1016   if (symtab->cpp_implicit_aliases_done)
1017     FOR_EACH_SYMBOL (node)
1018       if (node->cpp_implicit_alias)
1019 	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1020   build_type_inheritance_graph ();
1021 
1022   /* Analysis adds static variables that in turn adds references to new functions.
1023      So we need to iterate the process until it stabilize.  */
1024   while (changed)
1025     {
1026       changed = false;
1027       process_function_and_variable_attributes (first_analyzed,
1028 						first_analyzed_var);
1029 
1030       /* First identify the trivially needed symbols.  */
1031       for (node = symtab->first_symbol ();
1032 	   node != first_analyzed
1033 	   && node != first_analyzed_var; node = node->next)
1034 	{
1035 	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
1036 	  node->get_comdat_group_id ();
1037 	  if (node->needed_p ())
1038 	    {
1039 	      enqueue_node (node);
1040 	      if (!changed && symtab->dump_file)
1041 		fprintf (symtab->dump_file, "Trivially needed symbols:");
1042 	      changed = true;
1043 	      if (symtab->dump_file)
1044 		fprintf (symtab->dump_file, " %s", node->asm_name ());
1045 	      if (!changed && symtab->dump_file)
1046 		fprintf (symtab->dump_file, "\n");
1047 	    }
1048 	  if (node == first_analyzed
1049 	      || node == first_analyzed_var)
1050 	    break;
1051 	}
1052       symtab->process_new_functions ();
1053       first_analyzed_var = symtab->first_variable ();
1054       first_analyzed = symtab->first_function ();
1055 
1056       if (changed && symtab->dump_file)
1057 	fprintf (symtab->dump_file, "\n");
1058 
1059       /* Lower representation, build callgraph edges and references for all trivially
1060          needed symbols and all symbols referred by them.  */
1061       while (queued_nodes != &symtab_terminator)
1062 	{
1063 	  changed = true;
1064 	  node = queued_nodes;
1065 	  queued_nodes = (symtab_node *)queued_nodes->aux;
1066 	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1067 	  if (cnode && cnode->definition)
1068 	    {
1069 	      cgraph_edge *edge;
1070 	      tree decl = cnode->decl;
1071 
1072 	      /* ??? It is possible to create extern inline function
1073 	      and later using weak alias attribute to kill its body.
1074 	      See gcc.c-torture/compile/20011119-1.c  */
1075 	      if (!DECL_STRUCT_FUNCTION (decl)
1076 		  && !cnode->alias
1077 		  && !cnode->thunk.thunk_p
1078 		  && !cnode->dispatcher_function)
1079 		{
1080 		  cnode->reset ();
1081 		  cnode->local.redefined_extern_inline = true;
1082 		  continue;
1083 		}
1084 
1085 	      if (!cnode->analyzed)
1086 		cnode->analyze ();
1087 
1088 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1089 		if (edge->callee->definition
1090 		    && (!DECL_EXTERNAL (edge->callee->decl)
1091 			/* When not optimizing, do not try to analyze extern
1092 			   inline functions.  Doing so is pointless.  */
1093 			|| opt_for_fn (edge->callee->decl, optimize)
1094 			/* Weakrefs needs to be preserved.  */
1095 			|| edge->callee->alias
1096 			/* always_inline functions are inlined aven at -O0.  */
1097 		        || lookup_attribute
1098 				 ("always_inline",
1099 			          DECL_ATTRIBUTES (edge->callee->decl))
1100 			/* Multiversioned functions needs the dispatcher to
1101 			   be produced locally even for extern functions.  */
1102 			|| edge->callee->function_version ()))
1103 		   enqueue_node (edge->callee);
1104 	      if (opt_for_fn (cnode->decl, optimize)
1105 		  && opt_for_fn (cnode->decl, flag_devirtualize))
1106 		{
1107 		  cgraph_edge *next;
1108 
1109 		  for (edge = cnode->indirect_calls; edge; edge = next)
1110 		    {
1111 		      next = edge->next_callee;
1112 		      if (edge->indirect_info->polymorphic)
1113 			walk_polymorphic_call_targets (&reachable_call_targets,
1114 						       edge);
1115 		    }
1116 		}
1117 
1118 	      /* If decl is a clone of an abstract function,
1119 	      mark that abstract function so that we don't release its body.
1120 	      The DECL_INITIAL() of that abstract function declaration
1121 	      will be later needed to output debug info.  */
1122 	      if (DECL_ABSTRACT_ORIGIN (decl))
1123 		{
1124 		  cgraph_node *origin_node
1125 		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1126 		  origin_node->used_as_abstract_origin = true;
1127 		}
1128 	    }
1129 	  else
1130 	    {
1131 	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1132 	      if (vnode && vnode->definition && !vnode->analyzed)
1133 		vnode->analyze ();
1134 	    }
1135 
1136 	  if (node->same_comdat_group)
1137 	    {
1138 	      symtab_node *next;
1139 	      for (next = node->same_comdat_group;
1140 		   next != node;
1141 		   next = next->same_comdat_group)
1142 		if (!next->comdat_local_p ())
1143 		  enqueue_node (next);
1144 	    }
1145 	  for (i = 0; node->iterate_reference (i, ref); i++)
1146 	    if (ref->referred->definition
1147 		&& (!DECL_EXTERNAL (ref->referred->decl)
1148 		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1149 			 && optimize)
1150 			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1151 			    && opt_for_fn (ref->referred->decl, optimize))
1152 		    || node->alias
1153 		    || ref->referred->alias)))
1154 	      enqueue_node (ref->referred);
1155 	  symtab->process_new_functions ();
1156 	}
1157     }
1158   update_type_inheritance_graph ();
1159 
1160   /* Collect entry points to the unit.  */
1161   if (symtab->dump_file)
1162     {
1163       fprintf (symtab->dump_file, "\n\nInitial ");
1164       symtab_node::dump_table (symtab->dump_file);
1165     }
1166 
1167   if (first_time)
1168     {
1169       symtab_node *snode;
1170       FOR_EACH_SYMBOL (snode)
1171 	check_global_declaration (snode);
1172     }
1173 
1174   if (symtab->dump_file)
1175     fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1176 
1177   for (node = symtab->first_symbol ();
1178        node != first_handled
1179        && node != first_handled_var; node = next)
1180     {
1181       next = node->next;
1182       if (!node->aux && !node->referred_to_p ())
1183 	{
1184 	  if (symtab->dump_file)
1185 	    fprintf (symtab->dump_file, " %s", node->name ());
1186 
1187 	  /* See if the debugger can use anything before the DECL
1188 	     passes away.  Perhaps it can notice a DECL that is now a
1189 	     constant and can tag the early DIE with an appropriate
1190 	     attribute.
1191 
1192 	     Otherwise, this is the last chance the debug_hooks have
1193 	     at looking at optimized away DECLs, since
1194 	     late_global_decl will subsequently be called from the
1195 	     contents of the now pruned symbol table.  */
1196 	  if (VAR_P (node->decl)
1197 	      && !decl_function_context (node->decl))
1198 	    {
1199 	      /* We are reclaiming totally unreachable code and variables
1200 	         so they effectively appear as readonly.  Show that to
1201 	         the debug machinery.  */
1202 	      TREE_READONLY (node->decl) = 1;
1203 	      node->definition = false;
1204 	      (*debug_hooks->late_global_decl) (node->decl);
1205 	    }
1206 
1207 	  node->remove ();
1208 	  continue;
1209 	}
1210       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1211 	{
1212 	  tree decl = node->decl;
1213 
1214 	  if (cnode->definition && !gimple_has_body_p (decl)
1215 	      && !cnode->alias
1216 	      && !cnode->thunk.thunk_p)
1217 	    cnode->reset ();
1218 
1219 	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1220 		      || cnode->alias
1221 		      || gimple_has_body_p (decl));
1222 	  gcc_assert (cnode->analyzed == cnode->definition);
1223 	}
1224       node->aux = NULL;
1225     }
1226   for (;node; node = node->next)
1227     node->aux = NULL;
1228   first_analyzed = symtab->first_function ();
1229   first_analyzed_var = symtab->first_variable ();
1230   if (symtab->dump_file)
1231     {
1232       fprintf (symtab->dump_file, "\n\nReclaimed ");
1233       symtab_node::dump_table (symtab->dump_file);
1234     }
1235   bitmap_obstack_release (NULL);
1236   ggc_collect ();
1237   /* Initialize assembler name hash, in particular we want to trigger C++
1238      mangling and same body alias creation before we free DECL_ARGUMENTS
1239      used by it.  */
1240   if (!seen_error ())
1241     symtab->symtab_initialize_asm_name_hash ();
1242 
1243   input_location = saved_loc;
1244 }
1245 
1246 /* Translate the ugly representation of aliases as alias pairs into nice
1247    representation in callgraph.  We don't handle all cases yet,
1248    unfortunately.  */
1249 
1250 static void
handle_alias_pairs(void)1251 handle_alias_pairs (void)
1252 {
1253   alias_pair *p;
1254   unsigned i;
1255 
1256   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1257     {
1258       symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1259 
1260       /* Weakrefs with target not defined in current unit are easy to handle:
1261 	 they behave just as external variables except we need to note the
1262 	 alias flag to later output the weakref pseudo op into asm file.  */
1263       if (!target_node
1264 	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1265 	{
1266 	  symtab_node *node = symtab_node::get (p->decl);
1267 	  if (node)
1268 	    {
1269 	      node->alias_target = p->target;
1270 	      node->weakref = true;
1271 	      node->alias = true;
1272 	      node->transparent_alias = true;
1273 	    }
1274 	  alias_pairs->unordered_remove (i);
1275 	  continue;
1276 	}
1277       else if (!target_node)
1278 	{
1279 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1280 	  symtab_node *node = symtab_node::get (p->decl);
1281 	  if (node)
1282 	    node->alias = false;
1283 	  alias_pairs->unordered_remove (i);
1284 	  continue;
1285 	}
1286 
1287       if (DECL_EXTERNAL (target_node->decl)
1288 	  /* We use local aliases for C++ thunks to force the tailcall
1289 	     to bind locally.  This is a hack - to keep it working do
1290 	     the following (which is not strictly correct).  */
1291 	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1292 	      || ! DECL_VIRTUAL_P (target_node->decl))
1293 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1294 	{
1295 	  error ("%q+D aliased to external symbol %qE",
1296 		 p->decl, p->target);
1297 	}
1298 
1299       if (TREE_CODE (p->decl) == FUNCTION_DECL
1300           && target_node && is_a <cgraph_node *> (target_node))
1301 	{
1302 	  cgraph_node *src_node = cgraph_node::get (p->decl);
1303 	  if (src_node && src_node->definition)
1304 	    src_node->reset ();
1305 	  cgraph_node::create_alias (p->decl, target_node->decl);
1306 	  alias_pairs->unordered_remove (i);
1307 	}
1308       else if (TREE_CODE (p->decl) == VAR_DECL
1309 	       && target_node && is_a <varpool_node *> (target_node))
1310 	{
1311 	  varpool_node::create_alias (p->decl, target_node->decl);
1312 	  alias_pairs->unordered_remove (i);
1313 	}
1314       else
1315 	{
1316 	  error ("%q+D alias in between function and variable is not supported",
1317 		 p->decl);
1318 	  warning (0, "%q+D aliased declaration",
1319 		   target_node->decl);
1320 	  alias_pairs->unordered_remove (i);
1321 	}
1322     }
1323   vec_free (alias_pairs);
1324 }
1325 
1326 
1327 /* Figure out what functions we want to assemble.  */
1328 
1329 static void
mark_functions_to_output(void)1330 mark_functions_to_output (void)
1331 {
1332   bool check_same_comdat_groups = false;
1333   cgraph_node *node;
1334 
1335   if (flag_checking)
1336     FOR_EACH_FUNCTION (node)
1337       gcc_assert (!node->process);
1338 
1339   FOR_EACH_FUNCTION (node)
1340     {
1341       tree decl = node->decl;
1342 
1343       gcc_assert (!node->process || node->same_comdat_group);
1344       if (node->process)
1345 	continue;
1346 
1347       /* We need to output all local functions that are used and not
1348 	 always inlined, as well as those that are reachable from
1349 	 outside the current compilation unit.  */
1350       if (node->analyzed
1351 	  && !node->thunk.thunk_p
1352 	  && !node->alias
1353 	  && !node->global.inlined_to
1354 	  && !TREE_ASM_WRITTEN (decl)
1355 	  && !DECL_EXTERNAL (decl))
1356 	{
1357 	  node->process = 1;
1358 	  if (node->same_comdat_group)
1359 	    {
1360 	      cgraph_node *next;
1361 	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1362 		   next != node;
1363 		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1364 		if (!next->thunk.thunk_p && !next->alias
1365 		    && !next->comdat_local_p ())
1366 		  next->process = 1;
1367 	    }
1368 	}
1369       else if (node->same_comdat_group)
1370 	{
1371 	  if (flag_checking)
1372 	    check_same_comdat_groups = true;
1373 	}
1374       else
1375 	{
1376 	  /* We should've reclaimed all functions that are not needed.  */
1377 	  if (flag_checking
1378 	      && !node->global.inlined_to
1379 	      && gimple_has_body_p (decl)
1380 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1381 		 are inside partition, we can end up not removing the body since we no longer
1382 		 have analyzed node pointing to it.  */
1383 	      && !node->in_other_partition
1384 	      && !node->alias
1385 	      && !node->clones
1386 	      && !DECL_EXTERNAL (decl))
1387 	    {
1388 	      node->debug ();
1389 	      internal_error ("failed to reclaim unneeded function");
1390 	    }
1391 	  gcc_assert (node->global.inlined_to
1392 		      || !gimple_has_body_p (decl)
1393 		      || node->in_other_partition
1394 		      || node->clones
1395 		      || DECL_ARTIFICIAL (decl)
1396 		      || DECL_EXTERNAL (decl));
1397 
1398 	}
1399 
1400     }
1401   if (flag_checking && check_same_comdat_groups)
1402     FOR_EACH_FUNCTION (node)
1403       if (node->same_comdat_group && !node->process)
1404 	{
1405 	  tree decl = node->decl;
1406 	  if (!node->global.inlined_to
1407 	      && gimple_has_body_p (decl)
1408 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1409 		 partition but inline copies are inside a partition, we can
1410 		 end up not removing the body since we no longer have an
1411 		 analyzed node pointing to it.  */
1412 	      && !node->in_other_partition
1413 	      && !node->clones
1414 	      && !DECL_EXTERNAL (decl))
1415 	    {
1416 	      node->debug ();
1417 	      internal_error ("failed to reclaim unneeded function in same "
1418 			      "comdat group");
1419 	    }
1420 	}
1421 }
1422 
1423 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1424    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1425 
1426    Set current_function_decl and cfun to newly constructed empty function body.
1427    return basic block in the function body.  */
1428 
1429 basic_block
init_lowered_empty_function(tree decl,bool in_ssa,gcov_type count)1430 init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1431 {
1432   basic_block bb;
1433   edge e;
1434 
1435   current_function_decl = decl;
1436   allocate_struct_function (decl, false);
1437   gimple_register_cfg_hooks ();
1438   init_empty_tree_cfg ();
1439 
1440   if (in_ssa)
1441     {
1442       init_tree_ssa (cfun);
1443       init_ssa_operands (cfun);
1444       cfun->gimple_df->in_ssa_p = true;
1445       cfun->curr_properties |= PROP_ssa;
1446     }
1447 
1448   DECL_INITIAL (decl) = make_node (BLOCK);
1449 
1450   DECL_SAVED_TREE (decl) = error_mark_node;
1451   cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1452 			    | PROP_cfg | PROP_loops);
1453 
1454   set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1455   init_loops_structure (cfun, loops_for_fn (cfun), 1);
1456   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1457 
1458   /* Create BB for body of the function and connect it properly.  */
1459   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1460   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1461   EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1462   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1463   bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1464   bb->count = count;
1465   bb->frequency = BB_FREQ_MAX;
1466   e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1467   e->count = count;
1468   e->probability = REG_BR_PROB_BASE;
1469   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1470   e->count = count;
1471   e->probability = REG_BR_PROB_BASE;
1472   add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1473 
1474   return bb;
1475 }
1476 
1477 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1478    offset indicated by VIRTUAL_OFFSET, if that is
1479    non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1480    zero for a result adjusting thunk.  */
1481 
1482 static tree
thunk_adjust(gimple_stmt_iterator * bsi,tree ptr,bool this_adjusting,HOST_WIDE_INT fixed_offset,tree virtual_offset)1483 thunk_adjust (gimple_stmt_iterator * bsi,
1484 	      tree ptr, bool this_adjusting,
1485 	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1486 {
1487   gassign *stmt;
1488   tree ret;
1489 
1490   if (this_adjusting
1491       && fixed_offset != 0)
1492     {
1493       stmt = gimple_build_assign
1494 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1495 						       ptr,
1496 						       fixed_offset));
1497       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1498     }
1499 
1500   /* If there's a virtual offset, look up that value in the vtable and
1501      adjust the pointer again.  */
1502   if (virtual_offset)
1503     {
1504       tree vtabletmp;
1505       tree vtabletmp2;
1506       tree vtabletmp3;
1507 
1508       if (!vtable_entry_type)
1509 	{
1510 	  tree vfunc_type = make_node (FUNCTION_TYPE);
1511 	  TREE_TYPE (vfunc_type) = integer_type_node;
1512 	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1513 	  layout_type (vfunc_type);
1514 
1515 	  vtable_entry_type = build_pointer_type (vfunc_type);
1516 	}
1517 
1518       vtabletmp =
1519 	create_tmp_reg (build_pointer_type
1520 			  (build_pointer_type (vtable_entry_type)), "vptr");
1521 
1522       /* The vptr is always at offset zero in the object.  */
1523       stmt = gimple_build_assign (vtabletmp,
1524 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1525 					  ptr));
1526       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1527 
1528       /* Form the vtable address.  */
1529       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1530 				     "vtableaddr");
1531       stmt = gimple_build_assign (vtabletmp2,
1532 				  build_simple_mem_ref (vtabletmp));
1533       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1534 
1535       /* Find the entry with the vcall offset.  */
1536       stmt = gimple_build_assign (vtabletmp2,
1537 				  fold_build_pointer_plus_loc (input_location,
1538 							       vtabletmp2,
1539 							       virtual_offset));
1540       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1541 
1542       /* Get the offset itself.  */
1543       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1544 				     "vcalloffset");
1545       stmt = gimple_build_assign (vtabletmp3,
1546 				  build_simple_mem_ref (vtabletmp2));
1547       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1548 
1549       /* Adjust the `this' pointer.  */
1550       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1551       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1552 				      GSI_CONTINUE_LINKING);
1553     }
1554 
1555   if (!this_adjusting
1556       && fixed_offset != 0)
1557     /* Adjust the pointer by the constant.  */
1558     {
1559       tree ptrtmp;
1560 
1561       if (TREE_CODE (ptr) == VAR_DECL)
1562         ptrtmp = ptr;
1563       else
1564         {
1565           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1566           stmt = gimple_build_assign (ptrtmp, ptr);
1567 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1568 	}
1569       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1570 					     ptrtmp, fixed_offset);
1571     }
1572 
1573   /* Emit the statement and gimplify the adjustment expression.  */
1574   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1575   stmt = gimple_build_assign (ret, ptr);
1576   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1577 
1578   return ret;
1579 }
1580 
1581 /* Expand thunk NODE to gimple if possible.
1582    When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1583    no assembler is produced.
1584    When OUTPUT_ASM_THUNK is true, also produce assembler for
1585    thunks that are not lowered.  */
1586 
1587 bool
expand_thunk(bool output_asm_thunks,bool force_gimple_thunk)1588 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1589 {
1590   bool this_adjusting = thunk.this_adjusting;
1591   HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1592   HOST_WIDE_INT virtual_value = thunk.virtual_value;
1593   tree virtual_offset = NULL;
1594   tree alias = callees->callee->decl;
1595   tree thunk_fndecl = decl;
1596   tree a;
1597 
1598   /* Instrumentation thunk is the same function with
1599      a different signature.  Never need to expand it.  */
1600   if (thunk.add_pointer_bounds_args)
1601     return false;
1602 
1603   if (!force_gimple_thunk && this_adjusting
1604       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1605 					      virtual_value, alias))
1606     {
1607       const char *fnname;
1608       tree fn_block;
1609       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1610 
1611       if (!output_asm_thunks)
1612 	{
1613 	  analyzed = true;
1614 	  return false;
1615 	}
1616 
1617       if (in_lto_p)
1618 	get_untransformed_body ();
1619       a = DECL_ARGUMENTS (thunk_fndecl);
1620 
1621       current_function_decl = thunk_fndecl;
1622 
1623       /* Ensure thunks are emitted in their correct sections.  */
1624       resolve_unique_section (thunk_fndecl, 0,
1625 			      flag_function_sections);
1626 
1627       DECL_RESULT (thunk_fndecl)
1628 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1629 		      RESULT_DECL, 0, restype);
1630       DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1631       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1632 
1633       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1634 	 create one.  */
1635       fn_block = make_node (BLOCK);
1636       BLOCK_VARS (fn_block) = a;
1637       DECL_INITIAL (thunk_fndecl) = fn_block;
1638       allocate_struct_function (thunk_fndecl, false);
1639       init_function_start (thunk_fndecl);
1640       cfun->is_thunk = 1;
1641       insn_locations_init ();
1642       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1643       prologue_location = curr_insn_location ();
1644       assemble_start_function (thunk_fndecl, fnname);
1645 
1646       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1647 				       fixed_offset, virtual_value, alias);
1648 
1649       assemble_end_function (thunk_fndecl, fnname);
1650       insn_locations_finalize ();
1651       init_insn_lengths ();
1652       free_after_compilation (cfun);
1653       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1654       thunk.thunk_p = false;
1655       analyzed = false;
1656     }
1657   else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1658     {
1659       error ("generic thunk code fails for method %qD which uses %<...%>",
1660 	     thunk_fndecl);
1661       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1662       analyzed = true;
1663       return false;
1664     }
1665   else
1666     {
1667       tree restype;
1668       basic_block bb, then_bb, else_bb, return_bb;
1669       gimple_stmt_iterator bsi;
1670       int nargs = 0;
1671       tree arg;
1672       int i;
1673       tree resdecl;
1674       tree restmp = NULL;
1675       tree resbnd = NULL;
1676 
1677       gcall *call;
1678       greturn *ret;
1679       bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1680 
1681       /* We may be called from expand_thunk that releses body except for
1682 	 DECL_ARGUMENTS.  In this case force_gimple_thunk is true.  */
1683       if (in_lto_p && !force_gimple_thunk)
1684 	get_untransformed_body ();
1685       a = DECL_ARGUMENTS (thunk_fndecl);
1686 
1687       current_function_decl = thunk_fndecl;
1688 
1689       /* Ensure thunks are emitted in their correct sections.  */
1690       resolve_unique_section (thunk_fndecl, 0,
1691 			      flag_function_sections);
1692 
1693       DECL_IGNORED_P (thunk_fndecl) = 1;
1694       bitmap_obstack_initialize (NULL);
1695 
1696       if (thunk.virtual_offset_p)
1697         virtual_offset = size_int (virtual_value);
1698 
1699       /* Build the return declaration for the function.  */
1700       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1701       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1702 	{
1703 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1704 	  DECL_ARTIFICIAL (resdecl) = 1;
1705 	  DECL_IGNORED_P (resdecl) = 1;
1706 	  DECL_RESULT (thunk_fndecl) = resdecl;
1707           DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1708 	}
1709       else
1710 	resdecl = DECL_RESULT (thunk_fndecl);
1711 
1712       bb = then_bb = else_bb = return_bb
1713 	= init_lowered_empty_function (thunk_fndecl, true, count);
1714 
1715       bsi = gsi_start_bb (bb);
1716 
1717       /* Build call to the function being thunked.  */
1718       if (!VOID_TYPE_P (restype)
1719 	  && (!alias_is_noreturn
1720 	      || TREE_ADDRESSABLE (restype)
1721 	      || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1722 	{
1723 	  if (DECL_BY_REFERENCE (resdecl))
1724 	    {
1725 	      restmp = gimple_fold_indirect_ref (resdecl);
1726 	      if (!restmp)
1727 		restmp = build2 (MEM_REF,
1728 				 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1729 				 resdecl,
1730 				 build_int_cst (TREE_TYPE
1731 				   (DECL_RESULT (alias)), 0));
1732 	    }
1733 	  else if (!is_gimple_reg_type (restype))
1734 	    {
1735 	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1736 		{
1737 		  restmp = resdecl;
1738 
1739 		  if (TREE_CODE (restmp) == VAR_DECL)
1740 		    add_local_decl (cfun, restmp);
1741 		  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1742 		}
1743 	      else
1744 		restmp = create_tmp_var (restype, "retval");
1745 	    }
1746 	  else
1747 	    restmp = create_tmp_reg (restype, "retval");
1748 	}
1749 
1750       for (arg = a; arg; arg = DECL_CHAIN (arg))
1751         nargs++;
1752       auto_vec<tree> vargs (nargs);
1753       i = 0;
1754       arg = a;
1755       if (this_adjusting)
1756 	{
1757 	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1758 					  virtual_offset));
1759 	  arg = DECL_CHAIN (a);
1760 	  i = 1;
1761 	}
1762 
1763       if (nargs)
1764 	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1765 	  {
1766 	    tree tmp = arg;
1767 	    if (VECTOR_TYPE_P (TREE_TYPE (arg))
1768 		|| TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1769 	      DECL_GIMPLE_REG_P (arg) = 1;
1770 
1771 	    if (!is_gimple_val (arg))
1772 	      {
1773 		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1774 				      (TREE_TYPE (arg)), "arg");
1775 		gimple *stmt = gimple_build_assign (tmp, arg);
1776 		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1777 	      }
1778 	    vargs.quick_push (tmp);
1779 	  }
1780       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1781       callees->call_stmt = call;
1782       gimple_call_set_from_thunk (call, true);
1783       gimple_call_set_with_bounds (call, instrumentation_clone);
1784 
1785       /* Return slot optimization is always possible and in fact requred to
1786          return values with DECL_BY_REFERENCE.  */
1787       if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1788 	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1789 	      || DECL_BY_REFERENCE (resdecl)))
1790         gimple_call_set_return_slot_opt (call, true);
1791 
1792       if (restmp)
1793 	{
1794           gimple_call_set_lhs (call, restmp);
1795 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1796 						 TREE_TYPE (TREE_TYPE (alias))));
1797 	}
1798       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1799       if (!alias_is_noreturn)
1800 	{
1801 	  if (instrumentation_clone
1802 	      && !DECL_BY_REFERENCE (resdecl)
1803 	      && restmp
1804 	      && BOUNDED_P (restmp))
1805 	    {
1806 	      resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1807 	      create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1808 			   as_a <gcall *> (gsi_stmt (bsi)),
1809 			   callees->count, callees->frequency);
1810 	    }
1811 
1812 	  if (restmp && !this_adjusting
1813 	      && (fixed_offset || virtual_offset))
1814 	    {
1815 	      tree true_label = NULL_TREE;
1816 
1817 	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1818 		{
1819 		  gimple *stmt;
1820 		  edge e;
1821 		  /* If the return type is a pointer, we need to
1822 		     protect against NULL.  We know there will be an
1823 		     adjustment, because that's why we're emitting a
1824 		     thunk.  */
1825 		  then_bb = create_basic_block (NULL, bb);
1826 		  then_bb->count = count - count / 16;
1827 		  then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1828 		  return_bb = create_basic_block (NULL, then_bb);
1829 		  return_bb->count = count;
1830 		  return_bb->frequency = BB_FREQ_MAX;
1831 		  else_bb = create_basic_block (NULL, else_bb);
1832 		  then_bb->count = count / 16;
1833 		  then_bb->frequency = BB_FREQ_MAX / 16;
1834 		  add_bb_to_loop (then_bb, bb->loop_father);
1835 		  add_bb_to_loop (return_bb, bb->loop_father);
1836 		  add_bb_to_loop (else_bb, bb->loop_father);
1837 		  remove_edge (single_succ_edge (bb));
1838 		  true_label = gimple_block_label (then_bb);
1839 		  stmt = gimple_build_cond (NE_EXPR, restmp,
1840 					    build_zero_cst (TREE_TYPE (restmp)),
1841 					    NULL_TREE, NULL_TREE);
1842 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1843 		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1844 		  e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1845 		  e->count = count - count / 16;
1846 		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1847 		  e->probability = REG_BR_PROB_BASE / 16;
1848 		  e->count = count / 16;
1849 		  e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1850 		  e->probability = REG_BR_PROB_BASE;
1851 		  e->count = count;
1852 		  e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1853 		  e->probability = REG_BR_PROB_BASE;
1854 		  e->count = count - count / 16;
1855 		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1856 		  e->probability = REG_BR_PROB_BASE;
1857 		  e->count = count / 16;
1858 		  bsi = gsi_last_bb (then_bb);
1859 		}
1860 
1861 	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1862 				     fixed_offset, virtual_offset);
1863 	      if (true_label)
1864 		{
1865 		  gimple *stmt;
1866 		  bsi = gsi_last_bb (else_bb);
1867 		  stmt = gimple_build_assign (restmp,
1868 					      build_zero_cst (TREE_TYPE (restmp)));
1869 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1870 		  bsi = gsi_last_bb (return_bb);
1871 		}
1872 	    }
1873 	  else
1874 	    gimple_call_set_tail (call, true);
1875 
1876 	  /* Build return value.  */
1877 	  if (!DECL_BY_REFERENCE (resdecl))
1878 	    ret = gimple_build_return (restmp);
1879 	  else
1880 	    ret = gimple_build_return (resdecl);
1881 	  gimple_return_set_retbnd (ret, resbnd);
1882 
1883 	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1884 	}
1885       else
1886 	{
1887 	  gimple_call_set_tail (call, true);
1888 	  remove_edge (single_succ_edge (bb));
1889 	}
1890 
1891       cfun->gimple_df->in_ssa_p = true;
1892       profile_status_for_fn (cfun)
1893         = count ? PROFILE_READ : PROFILE_GUESSED;
1894       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
1895       TREE_ASM_WRITTEN (thunk_fndecl) = false;
1896       delete_unreachable_blocks ();
1897       update_ssa (TODO_update_ssa);
1898       checking_verify_flow_info ();
1899       free_dominance_info (CDI_DOMINATORS);
1900 
1901       /* Since we want to emit the thunk, we explicitly mark its name as
1902 	 referenced.  */
1903       thunk.thunk_p = false;
1904       lowered = true;
1905       bitmap_obstack_release (NULL);
1906     }
1907   current_function_decl = NULL;
1908   set_cfun (NULL);
1909   return true;
1910 }
1911 
1912 /* Assemble thunks and aliases associated to node.  */
1913 
1914 void
assemble_thunks_and_aliases(void)1915 cgraph_node::assemble_thunks_and_aliases (void)
1916 {
1917   cgraph_edge *e;
1918   ipa_ref *ref;
1919 
1920   for (e = callers; e;)
1921     if (e->caller->thunk.thunk_p
1922 	&& !e->caller->thunk.add_pointer_bounds_args)
1923       {
1924 	cgraph_node *thunk = e->caller;
1925 
1926 	e = e->next_caller;
1927 	thunk->expand_thunk (true, false);
1928 	thunk->assemble_thunks_and_aliases ();
1929       }
1930     else
1931       e = e->next_caller;
1932 
1933   FOR_EACH_ALIAS (this, ref)
1934     {
1935       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1936       if (!alias->transparent_alias)
1937 	{
1938 	  bool saved_written = TREE_ASM_WRITTEN (decl);
1939 
1940 	  /* Force assemble_alias to really output the alias this time instead
1941 	     of buffering it in same alias pairs.  */
1942 	  TREE_ASM_WRITTEN (decl) = 1;
1943 	  do_assemble_alias (alias->decl,
1944 			     DECL_ASSEMBLER_NAME (decl));
1945 	  alias->assemble_thunks_and_aliases ();
1946 	  TREE_ASM_WRITTEN (decl) = saved_written;
1947 	}
1948     }
1949 }
1950 
1951 /* Expand function specified by node.  */
1952 
1953 void
expand(void)1954 cgraph_node::expand (void)
1955 {
1956   location_t saved_loc;
1957 
1958   /* We ought to not compile any inline clones.  */
1959   gcc_assert (!global.inlined_to);
1960 
1961   announce_function (decl);
1962   process = 0;
1963   gcc_assert (lowered);
1964   get_untransformed_body ();
1965 
1966   /* Generate RTL for the body of DECL.  */
1967 
1968   timevar_push (TV_REST_OF_COMPILATION);
1969 
1970   gcc_assert (symtab->global_info_ready);
1971 
1972   /* Initialize the default bitmap obstack.  */
1973   bitmap_obstack_initialize (NULL);
1974 
1975   /* Initialize the RTL code for the function.  */
1976   saved_loc = input_location;
1977   input_location = DECL_SOURCE_LOCATION (decl);
1978 
1979   gcc_assert (DECL_STRUCT_FUNCTION (decl));
1980   push_cfun (DECL_STRUCT_FUNCTION (decl));
1981   init_function_start (decl);
1982 
1983   gimple_register_cfg_hooks ();
1984 
1985   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1986 
1987   execute_all_ipa_transforms ();
1988 
1989   /* Perform all tree transforms and optimizations.  */
1990 
1991   /* Signal the start of passes.  */
1992   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1993 
1994   execute_pass_list (cfun, g->get_passes ()->all_passes);
1995 
1996   /* Signal the end of passes.  */
1997   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1998 
1999   bitmap_obstack_release (&reg_obstack);
2000 
2001   /* Release the default bitmap obstack.  */
2002   bitmap_obstack_release (NULL);
2003 
2004   /* If requested, warn about function definitions where the function will
2005      return a value (usually of some struct or union type) which itself will
2006      take up a lot of stack space.  */
2007   if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2008     {
2009       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2010 
2011       if (ret_type && TYPE_SIZE_UNIT (ret_type)
2012 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2013 	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2014 				   larger_than_size))
2015 	{
2016 	  unsigned int size_as_int
2017 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2018 
2019 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2020 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2021                      decl, size_as_int);
2022 	  else
2023 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2024                      decl, larger_than_size);
2025 	}
2026     }
2027 
2028   gimple_set_body (decl, NULL);
2029   if (DECL_STRUCT_FUNCTION (decl) == 0
2030       && !cgraph_node::get (decl)->origin)
2031     {
2032       /* Stop pointing to the local nodes about to be freed.
2033 	 But DECL_INITIAL must remain nonzero so we know this
2034 	 was an actual function definition.
2035 	 For a nested function, this is done in c_pop_function_context.
2036 	 If rest_of_compilation set this to 0, leave it 0.  */
2037       if (DECL_INITIAL (decl) != 0)
2038 	DECL_INITIAL (decl) = error_mark_node;
2039     }
2040 
2041   input_location = saved_loc;
2042 
2043   ggc_collect ();
2044   timevar_pop (TV_REST_OF_COMPILATION);
2045 
2046   /* Make sure that BE didn't give up on compiling.  */
2047   gcc_assert (TREE_ASM_WRITTEN (decl));
2048   if (cfun)
2049     pop_cfun ();
2050 
2051   /* It would make a lot more sense to output thunks before function body to get more
2052      forward and lest backwarding jumps.  This however would need solving problem
2053      with comdats. See PR48668.  Also aliases must come after function itself to
2054      make one pass assemblers, like one on AIX, happy.  See PR 50689.
2055      FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2056      groups.  */
2057   assemble_thunks_and_aliases ();
2058   release_body ();
2059   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
2060      points to the dead function body.  */
2061   remove_callees ();
2062   remove_all_references ();
2063 }
2064 
2065 /* Node comparer that is responsible for the order that corresponds
2066    to time when a function was launched for the first time.  */
2067 
2068 static int
node_cmp(const void * pa,const void * pb)2069 node_cmp (const void *pa, const void *pb)
2070 {
2071   const cgraph_node *a = *(const cgraph_node * const *) pa;
2072   const cgraph_node *b = *(const cgraph_node * const *) pb;
2073 
2074   /* Functions with time profile must be before these without profile.  */
2075   if (!a->tp_first_run || !b->tp_first_run)
2076     return a->tp_first_run - b->tp_first_run;
2077 
2078   return a->tp_first_run != b->tp_first_run
2079 	 ? b->tp_first_run - a->tp_first_run
2080 	 : b->order - a->order;
2081 }
2082 
2083 /* Expand all functions that must be output.
2084 
2085    Attempt to topologically sort the nodes so function is output when
2086    all called functions are already assembled to allow data to be
2087    propagated across the callgraph.  Use a stack to get smaller distance
2088    between a function and its callees (later we may choose to use a more
2089    sophisticated algorithm for function reordering; we will likely want
2090    to use subsections to make the output functions appear in top-down
2091    order).  */
2092 
2093 static void
expand_all_functions(void)2094 expand_all_functions (void)
2095 {
2096   cgraph_node *node;
2097   cgraph_node **order = XCNEWVEC (cgraph_node *,
2098 					 symtab->cgraph_count);
2099   unsigned int expanded_func_count = 0, profiled_func_count = 0;
2100   int order_pos, new_order_pos = 0;
2101   int i;
2102 
2103   order_pos = ipa_reverse_postorder (order);
2104   gcc_assert (order_pos == symtab->cgraph_count);
2105 
2106   /* Garbage collector may remove inline clones we eliminate during
2107      optimization.  So we must be sure to not reference them.  */
2108   for (i = 0; i < order_pos; i++)
2109     if (order[i]->process)
2110       order[new_order_pos++] = order[i];
2111 
2112   if (flag_profile_reorder_functions)
2113     qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2114 
2115   for (i = new_order_pos - 1; i >= 0; i--)
2116     {
2117       node = order[i];
2118 
2119       if (node->process)
2120 	{
2121 	  expanded_func_count++;
2122 	  if(node->tp_first_run)
2123 	    profiled_func_count++;
2124 
2125 	  if (symtab->dump_file)
2126 	    fprintf (symtab->dump_file,
2127 		     "Time profile order in expand_all_functions:%s:%d\n",
2128 		     node->asm_name (), node->tp_first_run);
2129 	  node->process = 0;
2130 	  node->expand ();
2131 	}
2132     }
2133 
2134     if (dump_file)
2135       fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2136                main_input_filename, profiled_func_count, expanded_func_count);
2137 
2138   if (symtab->dump_file && flag_profile_reorder_functions)
2139     fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2140              profiled_func_count, expanded_func_count);
2141 
2142   symtab->process_new_functions ();
2143   free_gimplify_stack ();
2144 
2145   free (order);
2146 }
2147 
2148 /* This is used to sort the node types by the cgraph order number.  */
2149 
2150 enum cgraph_order_sort_kind
2151 {
2152   ORDER_UNDEFINED = 0,
2153   ORDER_FUNCTION,
2154   ORDER_VAR,
2155   ORDER_ASM
2156 };
2157 
2158 struct cgraph_order_sort
2159 {
2160   enum cgraph_order_sort_kind kind;
2161   union
2162   {
2163     cgraph_node *f;
2164     varpool_node *v;
2165     asm_node *a;
2166   } u;
2167 };
2168 
2169 /* Output all functions, variables, and asm statements in the order
2170    according to their order fields, which is the order in which they
2171    appeared in the file.  This implements -fno-toplevel-reorder.  In
2172    this mode we may output functions and variables which don't really
2173    need to be output.
2174    When NO_REORDER is true only do this for symbols marked no reorder. */
2175 
2176 static void
output_in_order(bool no_reorder)2177 output_in_order (bool no_reorder)
2178 {
2179   int max;
2180   cgraph_order_sort *nodes;
2181   int i;
2182   cgraph_node *pf;
2183   varpool_node *pv;
2184   asm_node *pa;
2185   max = symtab->order;
2186   nodes = XCNEWVEC (cgraph_order_sort, max);
2187 
2188   FOR_EACH_DEFINED_FUNCTION (pf)
2189     {
2190       if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2191 	{
2192 	  if (no_reorder && !pf->no_reorder)
2193 	    continue;
2194 	  i = pf->order;
2195 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2196 	  nodes[i].kind = ORDER_FUNCTION;
2197 	  nodes[i].u.f = pf;
2198 	}
2199     }
2200 
2201   FOR_EACH_DEFINED_VARIABLE (pv)
2202     if (!DECL_EXTERNAL (pv->decl))
2203       {
2204 	if (no_reorder && !pv->no_reorder)
2205 	    continue;
2206 	i = pv->order;
2207 	gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2208 	nodes[i].kind = ORDER_VAR;
2209 	nodes[i].u.v = pv;
2210       }
2211 
2212   for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2213     {
2214       i = pa->order;
2215       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2216       nodes[i].kind = ORDER_ASM;
2217       nodes[i].u.a = pa;
2218     }
2219 
2220   /* In toplevel reorder mode we output all statics; mark them as needed.  */
2221 
2222   for (i = 0; i < max; ++i)
2223     if (nodes[i].kind == ORDER_VAR)
2224       nodes[i].u.v->finalize_named_section_flags ();
2225 
2226   for (i = 0; i < max; ++i)
2227     {
2228       switch (nodes[i].kind)
2229 	{
2230 	case ORDER_FUNCTION:
2231 	  nodes[i].u.f->process = 0;
2232 	  nodes[i].u.f->expand ();
2233 	  break;
2234 
2235 	case ORDER_VAR:
2236 #ifdef ACCEL_COMPILER
2237 	  /* Do not assemble "omp declare target link" vars.  */
2238 	  if (DECL_HAS_VALUE_EXPR_P (nodes[i].u.v->decl)
2239 	      && lookup_attribute ("omp declare target link",
2240 				   DECL_ATTRIBUTES (nodes[i].u.v->decl)))
2241 	    break;
2242 #endif
2243 	  nodes[i].u.v->assemble_decl ();
2244 	  break;
2245 
2246 	case ORDER_ASM:
2247 	  assemble_asm (nodes[i].u.a->asm_str);
2248 	  break;
2249 
2250 	case ORDER_UNDEFINED:
2251 	  break;
2252 
2253 	default:
2254 	  gcc_unreachable ();
2255 	}
2256     }
2257 
2258   symtab->clear_asm_symbols ();
2259 
2260   free (nodes);
2261 }
2262 
2263 static void
ipa_passes(void)2264 ipa_passes (void)
2265 {
2266   gcc::pass_manager *passes = g->get_passes ();
2267 
2268   set_cfun (NULL);
2269   current_function_decl = NULL;
2270   gimple_register_cfg_hooks ();
2271   bitmap_obstack_initialize (NULL);
2272 
2273   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2274 
2275   if (!in_lto_p)
2276     {
2277       execute_ipa_pass_list (passes->all_small_ipa_passes);
2278       if (seen_error ())
2279 	return;
2280     }
2281 
2282   /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2283      devirtualization and other changes where removal iterate.  */
2284   symtab->remove_unreachable_nodes (symtab->dump_file);
2285 
2286   /* If pass_all_early_optimizations was not scheduled, the state of
2287      the cgraph will not be properly updated.  Update it now.  */
2288   if (symtab->state < IPA_SSA)
2289     symtab->state = IPA_SSA;
2290 
2291   if (!in_lto_p)
2292     {
2293       /* Generate coverage variables and constructors.  */
2294       coverage_finish ();
2295 
2296       /* Process new functions added.  */
2297       set_cfun (NULL);
2298       current_function_decl = NULL;
2299       symtab->process_new_functions ();
2300 
2301       execute_ipa_summary_passes
2302 	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2303     }
2304 
2305   /* Some targets need to handle LTO assembler output specially.  */
2306   if (flag_generate_lto || flag_generate_offload)
2307     targetm.asm_out.lto_start ();
2308 
2309   if (!in_lto_p)
2310     {
2311       if (g->have_offload)
2312 	{
2313 	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2314 	  lto_stream_offload_p = true;
2315 	  ipa_write_summaries ();
2316 	  lto_stream_offload_p = false;
2317 	}
2318       if (flag_lto)
2319 	{
2320 	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2321 	  lto_stream_offload_p = false;
2322 	  ipa_write_summaries ();
2323 	}
2324     }
2325 
2326   if (flag_generate_lto || flag_generate_offload)
2327     targetm.asm_out.lto_end ();
2328 
2329   if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2330     execute_ipa_pass_list (passes->all_regular_ipa_passes);
2331   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2332 
2333   bitmap_obstack_release (NULL);
2334 }
2335 
2336 
2337 /* Return string alias is alias of.  */
2338 
2339 static tree
get_alias_symbol(tree decl)2340 get_alias_symbol (tree decl)
2341 {
2342   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2343   return get_identifier (TREE_STRING_POINTER
2344 			  (TREE_VALUE (TREE_VALUE (alias))));
2345 }
2346 
2347 
2348 /* Weakrefs may be associated to external decls and thus not output
2349    at expansion time.  Emit all necessary aliases.  */
2350 
2351 void
output_weakrefs(void)2352 symbol_table::output_weakrefs (void)
2353 {
2354   symtab_node *node;
2355   cgraph_node *cnode;
2356   FOR_EACH_SYMBOL (node)
2357     if (node->alias
2358         && !TREE_ASM_WRITTEN (node->decl)
2359 	&& (!(cnode = dyn_cast <cgraph_node *> (node))
2360 	    || !cnode->instrumented_version
2361 	    || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2362 	&& node->weakref)
2363       {
2364 	tree target;
2365 
2366 	/* Weakrefs are special by not requiring target definition in current
2367 	   compilation unit.  It is thus bit hard to work out what we want to
2368 	   alias.
2369 	   When alias target is defined, we need to fetch it from symtab reference,
2370 	   otherwise it is pointed to by alias_target.  */
2371 	if (node->alias_target)
2372 	  target = (DECL_P (node->alias_target)
2373 		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2374 		    : node->alias_target);
2375 	else if (node->analyzed)
2376 	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2377 	else
2378 	  {
2379 	    gcc_unreachable ();
2380 	    target = get_alias_symbol (node->decl);
2381 	  }
2382         do_assemble_alias (node->decl, target);
2383       }
2384 }
2385 
2386 /* Perform simple optimizations based on callgraph.  */
2387 
2388 void
compile(void)2389 symbol_table::compile (void)
2390 {
2391   if (seen_error ())
2392     return;
2393 
2394   symtab_node::checking_verify_symtab_nodes ();
2395 
2396   timevar_push (TV_CGRAPHOPT);
2397   if (pre_ipa_mem_report)
2398     {
2399       fprintf (stderr, "Memory consumption before IPA\n");
2400       dump_memory_report (false);
2401     }
2402   if (!quiet_flag)
2403     fprintf (stderr, "Performing interprocedural optimizations\n");
2404   state = IPA;
2405 
2406   /* Offloading requires LTO infrastructure.  */
2407   if (!in_lto_p && g->have_offload)
2408     flag_generate_offload = 1;
2409 
2410   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2411   if (flag_generate_lto || flag_generate_offload)
2412     lto_streamer_hooks_init ();
2413 
2414   /* Don't run the IPA passes if there was any error or sorry messages.  */
2415   if (!seen_error ())
2416     ipa_passes ();
2417 
2418   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2419   if (seen_error ()
2420       || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2421     {
2422       timevar_pop (TV_CGRAPHOPT);
2423       return;
2424     }
2425 
2426   global_info_ready = true;
2427   if (dump_file)
2428     {
2429       fprintf (dump_file, "Optimized ");
2430       symtab_node:: dump_table (dump_file);
2431     }
2432   if (post_ipa_mem_report)
2433     {
2434       fprintf (stderr, "Memory consumption after IPA\n");
2435       dump_memory_report (false);
2436     }
2437   timevar_pop (TV_CGRAPHOPT);
2438 
2439   /* Output everything.  */
2440   (*debug_hooks->assembly_start) ();
2441   if (!quiet_flag)
2442     fprintf (stderr, "Assembling functions:\n");
2443   symtab_node::checking_verify_symtab_nodes ();
2444 
2445   materialize_all_clones ();
2446   bitmap_obstack_initialize (NULL);
2447   execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2448   bitmap_obstack_release (NULL);
2449   mark_functions_to_output ();
2450 
2451   /* When weakref support is missing, we autmatically translate all
2452      references to NODE to references to its ultimate alias target.
2453      The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2454      TREE_CHAIN.
2455 
2456      Set up this mapping before we output any assembler but once we are sure
2457      that all symbol renaming is done.
2458 
2459      FIXME: All this uglyness can go away if we just do renaming at gimple
2460      level by physically rewritting the IL.  At the moment we can only redirect
2461      calls, so we need infrastructure for renaming references as well.  */
2462 #ifndef ASM_OUTPUT_WEAKREF
2463   symtab_node *node;
2464 
2465   FOR_EACH_SYMBOL (node)
2466     if (node->alias
2467 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2468       {
2469 	IDENTIFIER_TRANSPARENT_ALIAS
2470 	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2471 	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2472 	   = (node->alias_target ? node->alias_target
2473 	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2474       }
2475 #endif
2476 
2477   state = EXPANSION;
2478 
2479   if (!flag_toplevel_reorder)
2480     output_in_order (false);
2481   else
2482     {
2483       /* Output first asm statements and anything ordered. The process
2484          flag is cleared for these nodes, so we skip them later.  */
2485       output_in_order (true);
2486       expand_all_functions ();
2487       output_variables ();
2488     }
2489 
2490   process_new_functions ();
2491   state = FINISHED;
2492   output_weakrefs ();
2493 
2494   if (dump_file)
2495     {
2496       fprintf (dump_file, "\nFinal ");
2497       symtab_node::dump_table (dump_file);
2498     }
2499   if (!flag_checking)
2500     return;
2501   symtab_node::verify_symtab_nodes ();
2502   /* Double check that all inline clones are gone and that all
2503      function bodies have been released from memory.  */
2504   if (!seen_error ())
2505     {
2506       cgraph_node *node;
2507       bool error_found = false;
2508 
2509       FOR_EACH_DEFINED_FUNCTION (node)
2510 	if (node->global.inlined_to
2511 	    || gimple_has_body_p (node->decl))
2512 	  {
2513 	    error_found = true;
2514 	    node->debug ();
2515 	  }
2516       if (error_found)
2517 	internal_error ("nodes with unreleased memory found");
2518     }
2519 }
2520 
2521 
2522 /* Analyze the whole compilation unit once it is parsed completely.  */
2523 
2524 void
finalize_compilation_unit(void)2525 symbol_table::finalize_compilation_unit (void)
2526 {
2527   timevar_push (TV_CGRAPH);
2528 
2529   /* If we're here there's no current function anymore.  Some frontends
2530      are lazy in clearing these.  */
2531   current_function_decl = NULL;
2532   set_cfun (NULL);
2533 
2534   /* Do not skip analyzing the functions if there were errors, we
2535      miss diagnostics for following functions otherwise.  */
2536 
2537   /* Emit size functions we didn't inline.  */
2538   finalize_size_functions ();
2539 
2540   /* Mark alias targets necessary and emit diagnostics.  */
2541   handle_alias_pairs ();
2542 
2543   if (!quiet_flag)
2544     {
2545       fprintf (stderr, "\nAnalyzing compilation unit\n");
2546       fflush (stderr);
2547     }
2548 
2549   if (flag_dump_passes)
2550     dump_passes ();
2551 
2552   /* Gimplify and lower all functions, compute reachability and
2553      remove unreachable nodes.  */
2554   analyze_functions (/*first_time=*/true);
2555 
2556   /* Mark alias targets necessary and emit diagnostics.  */
2557   handle_alias_pairs ();
2558 
2559   /* Gimplify and lower thunks.  */
2560   analyze_functions (/*first_time=*/false);
2561 
2562   if (!seen_error ())
2563     {
2564       /* Emit early debug for reachable functions, and by consequence,
2565 	 locally scoped symbols.  */
2566       struct cgraph_node *cnode;
2567       FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2568 	(*debug_hooks->early_global_decl) (cnode->decl);
2569 
2570       /* Clean up anything that needs cleaning up after initial debug
2571 	 generation.  */
2572       (*debug_hooks->early_finish) ();
2573     }
2574 
2575   /* Finally drive the pass manager.  */
2576   compile ();
2577 
2578   timevar_pop (TV_CGRAPH);
2579 }
2580 
2581 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2582    within the same process.  For use by toplev::finalize.  */
2583 
2584 void
cgraphunit_c_finalize(void)2585 cgraphunit_c_finalize (void)
2586 {
2587   gcc_assert (cgraph_new_nodes.length () == 0);
2588   cgraph_new_nodes.truncate (0);
2589 
2590   vtable_entry_type = NULL;
2591   queued_nodes = &symtab_terminator;
2592 
2593   first_analyzed = NULL;
2594   first_analyzed_var = NULL;
2595 }
2596 
2597 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2598    kind of wrapper method.  */
2599 
2600 void
create_wrapper(cgraph_node * target)2601 cgraph_node::create_wrapper (cgraph_node *target)
2602 {
2603   /* Preserve DECL_RESULT so we get right by reference flag.  */
2604   tree decl_result = DECL_RESULT (decl);
2605 
2606   /* Remove the function's body but keep arguments to be reused
2607      for thunk.  */
2608   release_body (true);
2609   reset ();
2610 
2611   DECL_UNINLINABLE (decl) = false;
2612   DECL_RESULT (decl) = decl_result;
2613   DECL_INITIAL (decl) = NULL;
2614   allocate_struct_function (decl, false);
2615   set_cfun (NULL);
2616 
2617   /* Turn alias into thunk and expand it into GIMPLE representation.  */
2618   definition = true;
2619 
2620   memset (&thunk, 0, sizeof (cgraph_thunk_info));
2621   thunk.thunk_p = true;
2622   create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2623   callees->can_throw_external = !TREE_NOTHROW (target->decl);
2624 
2625   tree arguments = DECL_ARGUMENTS (decl);
2626 
2627   while (arguments)
2628     {
2629       TREE_ADDRESSABLE (arguments) = false;
2630       arguments = TREE_CHAIN (arguments);
2631     }
2632 
2633   expand_thunk (false, true);
2634 
2635   /* Inline summary set-up.  */
2636   analyze ();
2637   inline_analyze_function (this);
2638 }
2639 
2640 #include "gt-cgraphunit.h"
2641