1 /* Driver of optimization process
2    Copyright (C) 2003-2019 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimiations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for example, transational memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO streaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functoins (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h"     /* FIXME: For reg_obstack.  */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208 
209 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
210    secondary queue used during optimization to accommodate passes that
211    may generate new functions that need to be optimized and expanded.  */
212 vec<cgraph_node *> cgraph_new_nodes;
213 
214 static void expand_all_functions (void);
215 static void mark_functions_to_output (void);
216 static void handle_alias_pairs (void);
217 
218 /* Used for vtable lookup in thunk adjusting.  */
219 static GTY (()) tree vtable_entry_type;
220 
221 /* Return true if this symbol is a function from the C frontend specified
222    directly in RTL form (with "__RTL").  */
223 
224 bool
native_rtl_p()225 symtab_node::native_rtl_p () const
226 {
227   if (TREE_CODE (decl) != FUNCTION_DECL)
228     return false;
229   if (!DECL_STRUCT_FUNCTION (decl))
230     return false;
231   return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
232 }
233 
234 /* Determine if symbol declaration is needed.  That is, visible to something
235    either outside this translation unit, something magic in the system
236    configury */
237 bool
needed_p(void)238 symtab_node::needed_p (void)
239 {
240   /* Double check that no one output the function into assembly file
241      early.  */
242   if (!native_rtl_p ())
243       gcc_checking_assert
244 	(!DECL_ASSEMBLER_NAME_SET_P (decl)
245 	 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
246 
247   if (!definition)
248     return false;
249 
250   if (DECL_EXTERNAL (decl))
251     return false;
252 
253   /* If the user told us it is used, then it must be so.  */
254   if (force_output)
255     return true;
256 
257   /* ABI forced symbols are needed when they are external.  */
258   if (forced_by_abi && TREE_PUBLIC (decl))
259     return true;
260 
261   /* Keep constructors, destructors and virtual functions.  */
262    if (TREE_CODE (decl) == FUNCTION_DECL
263        && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
264     return true;
265 
266   /* Externally visible variables must be output.  The exception is
267      COMDAT variables that must be output only when they are needed.  */
268   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
269     return true;
270 
271   return false;
272 }
273 
274 /* Head and terminator of the queue of nodes to be processed while building
275    callgraph.  */
276 
277 static symtab_node symtab_terminator;
278 static symtab_node *queued_nodes = &symtab_terminator;
279 
280 /* Add NODE to queue starting at QUEUED_NODES.
281    The queue is linked via AUX pointers and terminated by pointer to 1.  */
282 
283 static void
enqueue_node(symtab_node * node)284 enqueue_node (symtab_node *node)
285 {
286   if (node->aux)
287     return;
288   gcc_checking_assert (queued_nodes);
289   node->aux = queued_nodes;
290   queued_nodes = node;
291 }
292 
293 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
294    functions into callgraph in a way so they look like ordinary reachable
295    functions inserted into callgraph already at construction time.  */
296 
297 void
process_new_functions(void)298 symbol_table::process_new_functions (void)
299 {
300   tree fndecl;
301 
302   if (!cgraph_new_nodes.exists ())
303     return;
304 
305   handle_alias_pairs ();
306   /*  Note that this queue may grow as its being processed, as the new
307       functions may generate new ones.  */
308   for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
309     {
310       cgraph_node *node = cgraph_new_nodes[i];
311       fndecl = node->decl;
312       switch (state)
313 	{
314 	case CONSTRUCTION:
315 	  /* At construction time we just need to finalize function and move
316 	     it into reachable functions list.  */
317 
318 	  cgraph_node::finalize_function (fndecl, false);
319 	  call_cgraph_insertion_hooks (node);
320 	  enqueue_node (node);
321 	  break;
322 
323 	case IPA:
324 	case IPA_SSA:
325 	case IPA_SSA_AFTER_INLINING:
326 	  /* When IPA optimization already started, do all essential
327 	     transformations that has been already performed on the whole
328 	     cgraph but not on this function.  */
329 
330 	  gimple_register_cfg_hooks ();
331 	  if (!node->analyzed)
332 	    node->analyze ();
333 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
334 	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
335 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
336 	    {
337 	      bool summaried_computed = ipa_fn_summaries != NULL;
338 	      g->get_passes ()->execute_early_local_passes ();
339 	      /* Early passes compure inline parameters to do inlining
340 		 and splitting.  This is redundant for functions added late.
341 		 Just throw away whatever it did.  */
342 	      if (!summaried_computed)
343 		ipa_free_fn_summary ();
344 	    }
345 	  else if (ipa_fn_summaries != NULL)
346 	    compute_fn_summary (node, true);
347 	  free_dominance_info (CDI_POST_DOMINATORS);
348 	  free_dominance_info (CDI_DOMINATORS);
349 	  pop_cfun ();
350 	  call_cgraph_insertion_hooks (node);
351 	  break;
352 
353 	case EXPANSION:
354 	  /* Functions created during expansion shall be compiled
355 	     directly.  */
356 	  node->process = 0;
357 	  call_cgraph_insertion_hooks (node);
358 	  node->expand ();
359 	  break;
360 
361 	default:
362 	  gcc_unreachable ();
363 	  break;
364 	}
365     }
366 
367   cgraph_new_nodes.release ();
368 }
369 
370 /* As an GCC extension we allow redefinition of the function.  The
371    semantics when both copies of bodies differ is not well defined.
372    We replace the old body with new body so in unit at a time mode
373    we always use new body, while in normal mode we may end up with
374    old body inlined into some functions and new body expanded and
375    inlined in others.
376 
377    ??? It may make more sense to use one body for inlining and other
378    body for expanding the function but this is difficult to do.  */
379 
380 void
reset(void)381 cgraph_node::reset (void)
382 {
383   /* If process is set, then we have already begun whole-unit analysis.
384      This is *not* testing for whether we've already emitted the function.
385      That case can be sort-of legitimately seen with real function redefinition
386      errors.  I would argue that the front end should never present us with
387      such a case, but don't enforce that for now.  */
388   gcc_assert (!process);
389 
390   /* Reset our data structures so we can analyze the function again.  */
391   memset (&local, 0, sizeof (local));
392   memset (&global, 0, sizeof (global));
393   memset (&rtl, 0, sizeof (rtl));
394   analyzed = false;
395   definition = false;
396   alias = false;
397   transparent_alias = false;
398   weakref = false;
399   cpp_implicit_alias = false;
400 
401   remove_callees ();
402   remove_all_references ();
403 }
404 
405 /* Return true when there are references to the node.  INCLUDE_SELF is
406    true if a self reference counts as a reference.  */
407 
408 bool
referred_to_p(bool include_self)409 symtab_node::referred_to_p (bool include_self)
410 {
411   ipa_ref *ref = NULL;
412 
413   /* See if there are any references at all.  */
414   if (iterate_referring (0, ref))
415     return true;
416   /* For functions check also calls.  */
417   cgraph_node *cn = dyn_cast <cgraph_node *> (this);
418   if (cn && cn->callers)
419     {
420       if (include_self)
421 	return true;
422       for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
423 	if (e->caller != this)
424 	  return true;
425     }
426   return false;
427 }
428 
429 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
430    logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
431    the garbage collector run at the moment.  We would need to either create
432    a new GC context, or just not compile right now.  */
433 
434 void
finalize_function(tree decl,bool no_collect)435 cgraph_node::finalize_function (tree decl, bool no_collect)
436 {
437   cgraph_node *node = cgraph_node::get_create (decl);
438 
439   if (node->definition)
440     {
441       /* Nested functions should only be defined once.  */
442       gcc_assert (!DECL_CONTEXT (decl)
443 		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
444       node->reset ();
445       node->local.redefined_extern_inline = true;
446     }
447 
448   /* Set definition first before calling notice_global_symbol so that
449      it is available to notice_global_symbol.  */
450   node->definition = true;
451   notice_global_symbol (decl);
452   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
453   if (!flag_toplevel_reorder)
454     node->no_reorder = true;
455 
456   /* With -fkeep-inline-functions we are keeping all inline functions except
457      for extern inline ones.  */
458   if (flag_keep_inline_functions
459       && DECL_DECLARED_INLINE_P (decl)
460       && !DECL_EXTERNAL (decl)
461       && !DECL_DISREGARD_INLINE_LIMITS (decl))
462     node->force_output = 1;
463 
464   /* __RTL functions were already output as soon as they were parsed (due
465      to the large amount of global state in the backend).
466      Mark such functions as "force_output" to reflect the fact that they
467      will be in the asm file when considering the symbols they reference.
468      The attempt to output them later on will bail out immediately.  */
469   if (node->native_rtl_p ())
470     node->force_output = 1;
471 
472   /* When not optimizing, also output the static functions. (see
473      PR24561), but don't do so for always_inline functions, functions
474      declared inline and nested functions.  These were optimized out
475      in the original implementation and it is unclear whether we want
476      to change the behavior here.  */
477   if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
478 	|| node->no_reorder)
479        && !node->cpp_implicit_alias
480        && !DECL_DISREGARD_INLINE_LIMITS (decl)
481        && !DECL_DECLARED_INLINE_P (decl)
482        && !(DECL_CONTEXT (decl)
483 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
484       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
485     node->force_output = 1;
486 
487   /* If we've not yet emitted decl, tell the debug info about it.  */
488   if (!TREE_ASM_WRITTEN (decl))
489     (*debug_hooks->deferred_inline_function) (decl);
490 
491   if (!no_collect)
492     ggc_collect ();
493 
494   if (symtab->state == CONSTRUCTION
495       && (node->needed_p () || node->referred_to_p ()))
496     enqueue_node (node);
497 }
498 
499 /* Add the function FNDECL to the call graph.
500    Unlike finalize_function, this function is intended to be used
501    by middle end and allows insertion of new function at arbitrary point
502    of compilation.  The function can be either in high, low or SSA form
503    GIMPLE.
504 
505    The function is assumed to be reachable and have address taken (so no
506    API breaking optimizations are performed on it).
507 
508    Main work done by this function is to enqueue the function for later
509    processing to avoid need the passes to be re-entrant.  */
510 
511 void
add_new_function(tree fndecl,bool lowered)512 cgraph_node::add_new_function (tree fndecl, bool lowered)
513 {
514   gcc::pass_manager *passes = g->get_passes ();
515   cgraph_node *node;
516 
517   if (dump_file)
518     {
519       struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
520       const char *function_type = ((gimple_has_body_p (fndecl))
521 				   ? (lowered
522 				      ? (gimple_in_ssa_p (fn)
523 					 ? "ssa gimple"
524 					 : "low gimple")
525 				      : "high gimple")
526 				   : "to-be-gimplified");
527       fprintf (dump_file,
528 	       "Added new %s function %s to callgraph\n",
529 	       function_type,
530 	       fndecl_name (fndecl));
531     }
532 
533   switch (symtab->state)
534     {
535       case PARSING:
536 	cgraph_node::finalize_function (fndecl, false);
537 	break;
538       case CONSTRUCTION:
539 	/* Just enqueue function to be processed at nearest occurrence.  */
540 	node = cgraph_node::get_create (fndecl);
541 	if (lowered)
542 	  node->lowered = true;
543 	cgraph_new_nodes.safe_push (node);
544         break;
545 
546       case IPA:
547       case IPA_SSA:
548       case IPA_SSA_AFTER_INLINING:
549       case EXPANSION:
550 	/* Bring the function into finalized state and enqueue for later
551 	   analyzing and compilation.  */
552 	node = cgraph_node::get_create (fndecl);
553 	node->local.local = false;
554 	node->definition = true;
555 	node->force_output = true;
556 	if (TREE_PUBLIC (fndecl))
557 	  node->externally_visible = true;
558 	if (!lowered && symtab->state == EXPANSION)
559 	  {
560 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
561 	    gimple_register_cfg_hooks ();
562 	    bitmap_obstack_initialize (NULL);
563 	    execute_pass_list (cfun, passes->all_lowering_passes);
564 	    passes->execute_early_local_passes ();
565 	    bitmap_obstack_release (NULL);
566 	    pop_cfun ();
567 
568 	    lowered = true;
569 	  }
570 	if (lowered)
571 	  node->lowered = true;
572 	cgraph_new_nodes.safe_push (node);
573         break;
574 
575       case FINISHED:
576 	/* At the very end of compilation we have to do all the work up
577 	   to expansion.  */
578 	node = cgraph_node::create (fndecl);
579 	if (lowered)
580 	  node->lowered = true;
581 	node->definition = true;
582 	node->analyze ();
583 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
584 	gimple_register_cfg_hooks ();
585 	bitmap_obstack_initialize (NULL);
586 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
587 	  g->get_passes ()->execute_early_local_passes ();
588 	bitmap_obstack_release (NULL);
589 	pop_cfun ();
590 	node->expand ();
591 	break;
592 
593       default:
594 	gcc_unreachable ();
595     }
596 
597   /* Set a personality if required and we already passed EH lowering.  */
598   if (lowered
599       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
600 	  == eh_personality_lang))
601     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
602 }
603 
604 /* Analyze the function scheduled to be output.  */
605 void
analyze(void)606 cgraph_node::analyze (void)
607 {
608   if (native_rtl_p ())
609     {
610       analyzed = true;
611       return;
612     }
613 
614   tree decl = this->decl;
615   location_t saved_loc = input_location;
616   input_location = DECL_SOURCE_LOCATION (decl);
617 
618   if (thunk.thunk_p)
619     {
620       cgraph_node *t = cgraph_node::get (thunk.alias);
621 
622       create_edge (t, NULL, t->count);
623       callees->can_throw_external = !TREE_NOTHROW (t->decl);
624       /* Target code in expand_thunk may need the thunk's target
625 	 to be analyzed, so recurse here.  */
626       if (!t->analyzed && t->definition)
627 	t->analyze ();
628       if (t->alias)
629 	{
630 	  t = t->get_alias_target ();
631 	  if (!t->analyzed && t->definition)
632 	    t->analyze ();
633 	}
634       bool ret = expand_thunk (false, false);
635       thunk.alias = NULL;
636       if (!ret)
637 	return;
638     }
639   if (alias)
640     resolve_alias (cgraph_node::get (alias_target), transparent_alias);
641   else if (dispatcher_function)
642     {
643       /* Generate the dispatcher body of multi-versioned functions.  */
644       cgraph_function_version_info *dispatcher_version_info
645 	= function_version ();
646       if (dispatcher_version_info != NULL
647           && (dispatcher_version_info->dispatcher_resolver
648 	      == NULL_TREE))
649 	{
650 	  tree resolver = NULL_TREE;
651 	  gcc_assert (targetm.generate_version_dispatcher_body);
652 	  resolver = targetm.generate_version_dispatcher_body (this);
653 	  gcc_assert (resolver != NULL_TREE);
654 	}
655     }
656   else
657     {
658       push_cfun (DECL_STRUCT_FUNCTION (decl));
659 
660       assign_assembler_name_if_needed (decl);
661 
662       /* Make sure to gimplify bodies only once.  During analyzing a
663 	 function we lower it, which will require gimplified nested
664 	 functions, so we can end up here with an already gimplified
665 	 body.  */
666       if (!gimple_has_body_p (decl))
667 	gimplify_function_tree (decl);
668 
669       /* Lower the function.  */
670       if (!lowered)
671 	{
672 	  if (nested)
673 	    lower_nested_functions (decl);
674 	  gcc_assert (!nested);
675 
676 	  gimple_register_cfg_hooks ();
677 	  bitmap_obstack_initialize (NULL);
678 	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
679 	  free_dominance_info (CDI_POST_DOMINATORS);
680 	  free_dominance_info (CDI_DOMINATORS);
681 	  compact_blocks ();
682 	  bitmap_obstack_release (NULL);
683 	  lowered = true;
684 	}
685 
686       pop_cfun ();
687     }
688   analyzed = true;
689 
690   input_location = saved_loc;
691 }
692 
693 /* C++ frontend produce same body aliases all over the place, even before PCH
694    gets streamed out. It relies on us linking the aliases with their function
695    in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
696    first produce aliases without links, but once C++ FE is sure he won't sream
697    PCH we build the links via this function.  */
698 
699 void
process_same_body_aliases(void)700 symbol_table::process_same_body_aliases (void)
701 {
702   symtab_node *node;
703   FOR_EACH_SYMBOL (node)
704     if (node->cpp_implicit_alias && !node->analyzed)
705       node->resolve_alias
706 	(VAR_P (node->alias_target)
707 	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
708 	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
709   cpp_implicit_aliases_done = true;
710 }
711 
712 /* Process attributes common for vars and functions.  */
713 
714 static void
process_common_attributes(symtab_node * node,tree decl)715 process_common_attributes (symtab_node *node, tree decl)
716 {
717   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
718 
719   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
720     {
721       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
722 		  "%<weakref%> attribute should be accompanied with"
723 		  " an %<alias%> attribute");
724       DECL_WEAK (decl) = 0;
725       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
726 						 DECL_ATTRIBUTES (decl));
727     }
728 
729   if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
730     node->no_reorder = 1;
731 }
732 
733 /* Look for externally_visible and used attributes and mark cgraph nodes
734    accordingly.
735 
736    We cannot mark the nodes at the point the attributes are processed (in
737    handle_*_attribute) because the copy of the declarations available at that
738    point may not be canonical.  For example, in:
739 
740     void f();
741     void f() __attribute__((used));
742 
743    the declaration we see in handle_used_attribute will be the second
744    declaration -- but the front end will subsequently merge that declaration
745    with the original declaration and discard the second declaration.
746 
747    Furthermore, we can't mark these nodes in finalize_function because:
748 
749     void f() {}
750     void f() __attribute__((externally_visible));
751 
752    is valid.
753 
754    So, we walk the nodes at the end of the translation unit, applying the
755    attributes at that point.  */
756 
757 static void
process_function_and_variable_attributes(cgraph_node * first,varpool_node * first_var)758 process_function_and_variable_attributes (cgraph_node *first,
759                                           varpool_node *first_var)
760 {
761   cgraph_node *node;
762   varpool_node *vnode;
763 
764   for (node = symtab->first_function (); node != first;
765        node = symtab->next_function (node))
766     {
767       tree decl = node->decl;
768       if (DECL_PRESERVE_P (decl))
769 	node->mark_force_output ();
770       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
771 	{
772 	  if (! TREE_PUBLIC (node->decl))
773 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
774 			"%<externally_visible%>"
775 			" attribute have effect only on public objects");
776 	}
777       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
778 	  && (node->definition && !node->alias))
779 	{
780 	  warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
781 		      "%<weakref%> attribute ignored"
782 		      " because function is defined");
783 	  DECL_WEAK (decl) = 0;
784 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
785 						     DECL_ATTRIBUTES (decl));
786 	}
787       else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
788 	  && node->definition
789 	  && !node->alias)
790 	warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
791 		    "%<alias%> attribute ignored"
792 		    " because function is defined");
793 
794       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
795 	  && !DECL_DECLARED_INLINE_P (decl)
796 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
797 	  && !DECL_UNINLINABLE (decl))
798 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
799 		    "always_inline function might not be inlinable");
800 
801       process_common_attributes (node, decl);
802     }
803   for (vnode = symtab->first_variable (); vnode != first_var;
804        vnode = symtab->next_variable (vnode))
805     {
806       tree decl = vnode->decl;
807       if (DECL_EXTERNAL (decl)
808 	  && DECL_INITIAL (decl))
809 	varpool_node::finalize_decl (decl);
810       if (DECL_PRESERVE_P (decl))
811 	vnode->force_output = true;
812       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
813 	{
814 	  if (! TREE_PUBLIC (vnode->decl))
815 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
816 			"%<externally_visible%>"
817 			" attribute have effect only on public objects");
818 	}
819       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
820 	  && vnode->definition
821 	  && DECL_INITIAL (decl))
822 	{
823 	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
824 		      "%<weakref%> attribute ignored"
825 		      " because variable is initialized");
826 	  DECL_WEAK (decl) = 0;
827 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
828 						      DECL_ATTRIBUTES (decl));
829 	}
830       process_common_attributes (vnode, decl);
831     }
832 }
833 
834 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
835    middle end to output the variable to asm file, if needed or externally
836    visible.  */
837 
838 void
finalize_decl(tree decl)839 varpool_node::finalize_decl (tree decl)
840 {
841   varpool_node *node = varpool_node::get_create (decl);
842 
843   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
844 
845   if (node->definition)
846     return;
847   /* Set definition first before calling notice_global_symbol so that
848      it is available to notice_global_symbol.  */
849   node->definition = true;
850   notice_global_symbol (decl);
851   if (!flag_toplevel_reorder)
852     node->no_reorder = true;
853   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
854       /* Traditionally we do not eliminate static variables when not
855 	 optimizing and when not doing toplevel reoder.  */
856       || (node->no_reorder && !DECL_COMDAT (node->decl)
857 	  && !DECL_ARTIFICIAL (node->decl)))
858     node->force_output = true;
859 
860   if (symtab->state == CONSTRUCTION
861       && (node->needed_p () || node->referred_to_p ()))
862     enqueue_node (node);
863   if (symtab->state >= IPA_SSA)
864     node->analyze ();
865   /* Some frontends produce various interface variables after compilation
866      finished.  */
867   if (symtab->state == FINISHED
868       || (node->no_reorder
869 	  && symtab->state == EXPANSION))
870     node->assemble_decl ();
871 }
872 
873 /* EDGE is an polymorphic call.  Mark all possible targets as reachable
874    and if there is only one target, perform trivial devirtualization.
875    REACHABLE_CALL_TARGETS collects target lists we already walked to
876    avoid udplicate work.  */
877 
878 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,cgraph_edge * edge)879 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
880 			       cgraph_edge *edge)
881 {
882   unsigned int i;
883   void *cache_token;
884   bool final;
885   vec <cgraph_node *>targets
886     = possible_polymorphic_call_targets
887 	(edge, &final, &cache_token);
888 
889   if (!reachable_call_targets->add (cache_token))
890     {
891       if (symtab->dump_file)
892 	dump_possible_polymorphic_call_targets
893 	  (symtab->dump_file, edge);
894 
895       for (i = 0; i < targets.length (); i++)
896 	{
897 	  /* Do not bother to mark virtual methods in anonymous namespace;
898 	     either we will find use of virtual table defining it, or it is
899 	     unused.  */
900 	  if (targets[i]->definition
901 	      && TREE_CODE
902 		  (TREE_TYPE (targets[i]->decl))
903 		   == METHOD_TYPE
904 	      && !type_in_anonymous_namespace_p
905 		   (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
906 	    enqueue_node (targets[i]);
907 	}
908     }
909 
910   /* Very trivial devirtualization; when the type is
911      final or anonymous (so we know all its derivation)
912      and there is only one possible virtual call target,
913      make the edge direct.  */
914   if (final)
915     {
916       if (targets.length () <= 1 && dbg_cnt (devirt))
917 	{
918 	  cgraph_node *target;
919 	  if (targets.length () == 1)
920 	    target = targets[0];
921 	  else
922 	    target = cgraph_node::create
923 			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
924 
925 	  if (symtab->dump_file)
926 	    {
927 	      fprintf (symtab->dump_file,
928 		       "Devirtualizing call: ");
929 	      print_gimple_stmt (symtab->dump_file,
930 				 edge->call_stmt, 0,
931 				 TDF_SLIM);
932 	    }
933           if (dump_enabled_p ())
934             {
935 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
936 			       "devirtualizing call in %s to %s\n",
937 			       edge->caller->name (), target->name ());
938 	    }
939 
940 	  edge->make_direct (target);
941 	  edge->redirect_call_stmt_to_callee ();
942 
943 	  if (symtab->dump_file)
944 	    {
945 	      fprintf (symtab->dump_file,
946 		       "Devirtualized as: ");
947 	      print_gimple_stmt (symtab->dump_file,
948 				 edge->call_stmt, 0,
949 				 TDF_SLIM);
950 	    }
951 	}
952     }
953 }
954 
955 /* Issue appropriate warnings for the global declaration DECL.  */
956 
957 static void
check_global_declaration(symtab_node * snode)958 check_global_declaration (symtab_node *snode)
959 {
960   const char *decl_file;
961   tree decl = snode->decl;
962 
963   /* Warn about any function declared static but not defined.  We don't
964      warn about variables, because many programs have static variables
965      that exist only to get some text into the object file.  */
966   if (TREE_CODE (decl) == FUNCTION_DECL
967       && DECL_INITIAL (decl) == 0
968       && DECL_EXTERNAL (decl)
969       && ! DECL_ARTIFICIAL (decl)
970       && ! TREE_PUBLIC (decl))
971     {
972       if (TREE_NO_WARNING (decl))
973 	;
974       else if (snode->referred_to_p (/*include_self=*/false))
975 	pedwarn (input_location, 0, "%q+F used but never defined", decl);
976       else
977 	warning (OPT_Wunused_function, "%q+F declared %<static%> but never "
978 				       "defined", decl);
979       /* This symbol is effectively an "extern" declaration now.  */
980       TREE_PUBLIC (decl) = 1;
981     }
982 
983   /* Warn about static fns or vars defined but not used.  */
984   if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
985        || (((warn_unused_variable && ! TREE_READONLY (decl))
986 	    || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
987 		&& (warn_unused_const_variable == 2
988 		    || (main_input_filename != NULL
989 			&& (decl_file = DECL_SOURCE_FILE (decl)) != NULL
990 			&& filename_cmp (main_input_filename,
991 					 decl_file) == 0))))
992 	   && VAR_P (decl)))
993       && ! DECL_IN_SYSTEM_HEADER (decl)
994       && ! snode->referred_to_p (/*include_self=*/false)
995       /* This TREE_USED check is needed in addition to referred_to_p
996 	 above, because the `__unused__' attribute is not being
997 	 considered for referred_to_p.  */
998       && ! TREE_USED (decl)
999       /* The TREE_USED bit for file-scope decls is kept in the identifier,
1000 	 to handle multiple external decls in different scopes.  */
1001       && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1002       && ! DECL_EXTERNAL (decl)
1003       && ! DECL_ARTIFICIAL (decl)
1004       && ! DECL_ABSTRACT_ORIGIN (decl)
1005       && ! TREE_PUBLIC (decl)
1006       /* A volatile variable might be used in some non-obvious way.  */
1007       && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1008       /* Global register variables must be declared to reserve them.  */
1009       && ! (VAR_P (decl) && DECL_REGISTER (decl))
1010       /* Global ctors and dtors are called by the runtime.  */
1011       && (TREE_CODE (decl) != FUNCTION_DECL
1012 	  || (!DECL_STATIC_CONSTRUCTOR (decl)
1013 	      && !DECL_STATIC_DESTRUCTOR (decl)))
1014       /* Otherwise, ask the language.  */
1015       && lang_hooks.decls.warn_unused_global (decl))
1016     warning_at (DECL_SOURCE_LOCATION (decl),
1017 		(TREE_CODE (decl) == FUNCTION_DECL)
1018 		? OPT_Wunused_function
1019 		: (TREE_READONLY (decl)
1020 		   ? OPT_Wunused_const_variable_
1021 		   : OPT_Wunused_variable),
1022 		"%qD defined but not used", decl);
1023 }
1024 
1025 /* Discover all functions and variables that are trivially needed, analyze
1026    them as well as all functions and variables referred by them  */
1027 static cgraph_node *first_analyzed;
1028 static varpool_node *first_analyzed_var;
1029 
1030 /* FIRST_TIME is set to TRUE for the first time we are called for a
1031    translation unit from finalize_compilation_unit() or false
1032    otherwise.  */
1033 
1034 static void
analyze_functions(bool first_time)1035 analyze_functions (bool first_time)
1036 {
1037   /* Keep track of already processed nodes when called multiple times for
1038      intermodule optimization.  */
1039   cgraph_node *first_handled = first_analyzed;
1040   varpool_node *first_handled_var = first_analyzed_var;
1041   hash_set<void *> reachable_call_targets;
1042 
1043   symtab_node *node;
1044   symtab_node *next;
1045   int i;
1046   ipa_ref *ref;
1047   bool changed = true;
1048   location_t saved_loc = input_location;
1049 
1050   bitmap_obstack_initialize (NULL);
1051   symtab->state = CONSTRUCTION;
1052   input_location = UNKNOWN_LOCATION;
1053 
1054   /* Ugly, but the fixup cannot happen at a time same body alias is created;
1055      C++ FE is confused about the COMDAT groups being right.  */
1056   if (symtab->cpp_implicit_aliases_done)
1057     FOR_EACH_SYMBOL (node)
1058       if (node->cpp_implicit_alias)
1059 	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1060   build_type_inheritance_graph ();
1061 
1062   /* Analysis adds static variables that in turn adds references to new functions.
1063      So we need to iterate the process until it stabilize.  */
1064   while (changed)
1065     {
1066       changed = false;
1067       process_function_and_variable_attributes (first_analyzed,
1068 						first_analyzed_var);
1069 
1070       /* First identify the trivially needed symbols.  */
1071       for (node = symtab->first_symbol ();
1072 	   node != first_analyzed
1073 	   && node != first_analyzed_var; node = node->next)
1074 	{
1075 	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
1076 	  node->get_comdat_group_id ();
1077 	  if (node->needed_p ())
1078 	    {
1079 	      enqueue_node (node);
1080 	      if (!changed && symtab->dump_file)
1081 		fprintf (symtab->dump_file, "Trivially needed symbols:");
1082 	      changed = true;
1083 	      if (symtab->dump_file)
1084 		fprintf (symtab->dump_file, " %s", node->asm_name ());
1085 	      if (!changed && symtab->dump_file)
1086 		fprintf (symtab->dump_file, "\n");
1087 	    }
1088 	  if (node == first_analyzed
1089 	      || node == first_analyzed_var)
1090 	    break;
1091 	}
1092       symtab->process_new_functions ();
1093       first_analyzed_var = symtab->first_variable ();
1094       first_analyzed = symtab->first_function ();
1095 
1096       if (changed && symtab->dump_file)
1097 	fprintf (symtab->dump_file, "\n");
1098 
1099       /* Lower representation, build callgraph edges and references for all trivially
1100          needed symbols and all symbols referred by them.  */
1101       while (queued_nodes != &symtab_terminator)
1102 	{
1103 	  changed = true;
1104 	  node = queued_nodes;
1105 	  queued_nodes = (symtab_node *)queued_nodes->aux;
1106 	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1107 	  if (cnode && cnode->definition)
1108 	    {
1109 	      cgraph_edge *edge;
1110 	      tree decl = cnode->decl;
1111 
1112 	      /* ??? It is possible to create extern inline function
1113 	      and later using weak alias attribute to kill its body.
1114 	      See gcc.c-torture/compile/20011119-1.c  */
1115 	      if (!DECL_STRUCT_FUNCTION (decl)
1116 		  && !cnode->alias
1117 		  && !cnode->thunk.thunk_p
1118 		  && !cnode->dispatcher_function)
1119 		{
1120 		  cnode->reset ();
1121 		  cnode->local.redefined_extern_inline = true;
1122 		  continue;
1123 		}
1124 
1125 	      if (!cnode->analyzed)
1126 		cnode->analyze ();
1127 
1128 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1129 		if (edge->callee->definition
1130 		    && (!DECL_EXTERNAL (edge->callee->decl)
1131 			/* When not optimizing, do not try to analyze extern
1132 			   inline functions.  Doing so is pointless.  */
1133 			|| opt_for_fn (edge->callee->decl, optimize)
1134 			/* Weakrefs needs to be preserved.  */
1135 			|| edge->callee->alias
1136 			/* always_inline functions are inlined aven at -O0.  */
1137 		        || lookup_attribute
1138 				 ("always_inline",
1139 			          DECL_ATTRIBUTES (edge->callee->decl))
1140 			/* Multiversioned functions needs the dispatcher to
1141 			   be produced locally even for extern functions.  */
1142 			|| edge->callee->function_version ()))
1143 		   enqueue_node (edge->callee);
1144 	      if (opt_for_fn (cnode->decl, optimize)
1145 		  && opt_for_fn (cnode->decl, flag_devirtualize))
1146 		{
1147 		  cgraph_edge *next;
1148 
1149 		  for (edge = cnode->indirect_calls; edge; edge = next)
1150 		    {
1151 		      next = edge->next_callee;
1152 		      if (edge->indirect_info->polymorphic)
1153 			walk_polymorphic_call_targets (&reachable_call_targets,
1154 						       edge);
1155 		    }
1156 		}
1157 
1158 	      /* If decl is a clone of an abstract function,
1159 		 mark that abstract function so that we don't release its body.
1160 		 The DECL_INITIAL() of that abstract function declaration
1161 		 will be later needed to output debug info.  */
1162 	      if (DECL_ABSTRACT_ORIGIN (decl))
1163 		{
1164 		  cgraph_node *origin_node
1165 		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1166 		  origin_node->used_as_abstract_origin = true;
1167 		}
1168 	      /* Preserve a functions function context node.  It will
1169 		 later be needed to output debug info.  */
1170 	      if (tree fn = decl_function_context (decl))
1171 		{
1172 		  cgraph_node *origin_node = cgraph_node::get_create (fn);
1173 		  enqueue_node (origin_node);
1174 		}
1175 	    }
1176 	  else
1177 	    {
1178 	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1179 	      if (vnode && vnode->definition && !vnode->analyzed)
1180 		vnode->analyze ();
1181 	    }
1182 
1183 	  if (node->same_comdat_group)
1184 	    {
1185 	      symtab_node *next;
1186 	      for (next = node->same_comdat_group;
1187 		   next != node;
1188 		   next = next->same_comdat_group)
1189 		if (!next->comdat_local_p ())
1190 		  enqueue_node (next);
1191 	    }
1192 	  for (i = 0; node->iterate_reference (i, ref); i++)
1193 	    if (ref->referred->definition
1194 		&& (!DECL_EXTERNAL (ref->referred->decl)
1195 		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1196 			 && optimize)
1197 			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1198 			    && opt_for_fn (ref->referred->decl, optimize))
1199 		    || node->alias
1200 		    || ref->referred->alias)))
1201 	      enqueue_node (ref->referred);
1202 	  symtab->process_new_functions ();
1203 	}
1204     }
1205   update_type_inheritance_graph ();
1206 
1207   /* Collect entry points to the unit.  */
1208   if (symtab->dump_file)
1209     {
1210       fprintf (symtab->dump_file, "\n\nInitial ");
1211       symtab->dump (symtab->dump_file);
1212     }
1213 
1214   if (first_time)
1215     {
1216       symtab_node *snode;
1217       FOR_EACH_SYMBOL (snode)
1218 	check_global_declaration (snode);
1219     }
1220 
1221   if (symtab->dump_file)
1222     fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1223 
1224   for (node = symtab->first_symbol ();
1225        node != first_handled
1226        && node != first_handled_var; node = next)
1227     {
1228       next = node->next;
1229       /* For symbols declared locally we clear TREE_READONLY when emitting
1230 	 the construtor (if one is needed).  For external declarations we can
1231 	 not safely assume that the type is readonly because we may be called
1232 	 during its construction.  */
1233       if (TREE_CODE (node->decl) == VAR_DECL
1234 	  && TYPE_P (TREE_TYPE (node->decl))
1235 	  && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1236 	  && DECL_EXTERNAL (node->decl))
1237 	TREE_READONLY (node->decl) = 0;
1238       if (!node->aux && !node->referred_to_p ())
1239 	{
1240 	  if (symtab->dump_file)
1241 	    fprintf (symtab->dump_file, " %s", node->name ());
1242 
1243 	  /* See if the debugger can use anything before the DECL
1244 	     passes away.  Perhaps it can notice a DECL that is now a
1245 	     constant and can tag the early DIE with an appropriate
1246 	     attribute.
1247 
1248 	     Otherwise, this is the last chance the debug_hooks have
1249 	     at looking at optimized away DECLs, since
1250 	     late_global_decl will subsequently be called from the
1251 	     contents of the now pruned symbol table.  */
1252 	  if (VAR_P (node->decl)
1253 	      && !decl_function_context (node->decl))
1254 	    {
1255 	      /* We are reclaiming totally unreachable code and variables
1256 	         so they effectively appear as readonly.  Show that to
1257 		 the debug machinery.  */
1258 	      TREE_READONLY (node->decl) = 1;
1259 	      node->definition = false;
1260 	      (*debug_hooks->late_global_decl) (node->decl);
1261 	    }
1262 
1263 	  node->remove ();
1264 	  continue;
1265 	}
1266       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1267 	{
1268 	  tree decl = node->decl;
1269 
1270 	  if (cnode->definition && !gimple_has_body_p (decl)
1271 	      && !cnode->alias
1272 	      && !cnode->thunk.thunk_p)
1273 	    cnode->reset ();
1274 
1275 	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1276 		      || cnode->alias
1277 		      || gimple_has_body_p (decl)
1278 		      || cnode->native_rtl_p ());
1279 	  gcc_assert (cnode->analyzed == cnode->definition);
1280 	}
1281       node->aux = NULL;
1282     }
1283   for (;node; node = node->next)
1284     node->aux = NULL;
1285   first_analyzed = symtab->first_function ();
1286   first_analyzed_var = symtab->first_variable ();
1287   if (symtab->dump_file)
1288     {
1289       fprintf (symtab->dump_file, "\n\nReclaimed ");
1290       symtab->dump (symtab->dump_file);
1291     }
1292   bitmap_obstack_release (NULL);
1293   ggc_collect ();
1294   /* Initialize assembler name hash, in particular we want to trigger C++
1295      mangling and same body alias creation before we free DECL_ARGUMENTS
1296      used by it.  */
1297   if (!seen_error ())
1298     symtab->symtab_initialize_asm_name_hash ();
1299 
1300   input_location = saved_loc;
1301 }
1302 
1303 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1304    (which may be an ifunc resolver) and issue a diagnostic when they are
1305    not compatible according to language rules (plus a C++ extension for
1306    non-static member functions).  */
1307 
1308 static void
maybe_diag_incompatible_alias(tree alias,tree target)1309 maybe_diag_incompatible_alias (tree alias, tree target)
1310 {
1311   tree altype = TREE_TYPE (alias);
1312   tree targtype = TREE_TYPE (target);
1313 
1314   bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1315   tree funcptr = altype;
1316 
1317   if (ifunc)
1318     {
1319       /* Handle attribute ifunc first.  */
1320       if (TREE_CODE (altype) == METHOD_TYPE)
1321 	{
1322 	  /* Set FUNCPTR to the type of the alias target.  If the type
1323 	     is a non-static member function of class C, construct a type
1324 	     of an ordinary function taking C* as the first argument,
1325 	     followed by the member function argument list, and use it
1326 	     instead to check for incompatibility.  This conversion is
1327 	     not defined by the language but an extension provided by
1328 	     G++.  */
1329 
1330 	  tree rettype = TREE_TYPE (altype);
1331 	  tree args = TYPE_ARG_TYPES (altype);
1332 	  altype = build_function_type (rettype, args);
1333 	  funcptr = altype;
1334 	}
1335 
1336       targtype = TREE_TYPE (targtype);
1337 
1338       if (POINTER_TYPE_P (targtype))
1339 	{
1340 	  targtype = TREE_TYPE (targtype);
1341 
1342 	  /* Only issue Wattribute-alias for conversions to void* with
1343 	     -Wextra.  */
1344 	  if (VOID_TYPE_P (targtype) && !extra_warnings)
1345 	    return;
1346 
1347 	  /* Proceed to handle incompatible ifunc resolvers below.  */
1348 	}
1349       else
1350 	{
1351 	  funcptr = build_pointer_type (funcptr);
1352 
1353 	  error_at (DECL_SOURCE_LOCATION (target),
1354 		    "%<ifunc%> resolver for %qD must return %qT",
1355 		 alias, funcptr);
1356 	  inform (DECL_SOURCE_LOCATION (alias),
1357 		  "resolver indirect function declared here");
1358 	  return;
1359 	}
1360     }
1361 
1362   if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1363        || (prototype_p (altype)
1364 	   && prototype_p (targtype)
1365 	   && !types_compatible_p (altype, targtype))))
1366     {
1367       /* Warn for incompatibilities.  Avoid warning for functions
1368 	 without a prototype to make it possible to declare aliases
1369 	 without knowing the exact type, as libstdc++ does.  */
1370       if (ifunc)
1371 	{
1372 	  funcptr = build_pointer_type (funcptr);
1373 
1374 	  auto_diagnostic_group d;
1375 	  if (warning_at (DECL_SOURCE_LOCATION (target),
1376 			  OPT_Wattribute_alias_,
1377 			  "%<ifunc%> resolver for %qD should return %qT",
1378 			  alias, funcptr))
1379 	    inform (DECL_SOURCE_LOCATION (alias),
1380 		    "resolver indirect function declared here");
1381 	}
1382       else
1383 	{
1384 	  auto_diagnostic_group d;
1385 	  if (warning_at (DECL_SOURCE_LOCATION (alias),
1386 			    OPT_Wattribute_alias_,
1387 			    "%qD alias between functions of incompatible "
1388 			    "types %qT and %qT", alias, altype, targtype))
1389 	    inform (DECL_SOURCE_LOCATION (target),
1390 		    "aliased declaration here");
1391 	}
1392     }
1393 }
1394 
1395 /* Translate the ugly representation of aliases as alias pairs into nice
1396    representation in callgraph.  We don't handle all cases yet,
1397    unfortunately.  */
1398 
1399 static void
handle_alias_pairs(void)1400 handle_alias_pairs (void)
1401 {
1402   alias_pair *p;
1403   unsigned i;
1404 
1405   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1406     {
1407       symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1408 
1409       /* Weakrefs with target not defined in current unit are easy to handle:
1410 	 they behave just as external variables except we need to note the
1411 	 alias flag to later output the weakref pseudo op into asm file.  */
1412       if (!target_node
1413 	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1414 	{
1415 	  symtab_node *node = symtab_node::get (p->decl);
1416 	  if (node)
1417 	    {
1418 	      node->alias_target = p->target;
1419 	      node->weakref = true;
1420 	      node->alias = true;
1421 	      node->transparent_alias = true;
1422 	    }
1423 	  alias_pairs->unordered_remove (i);
1424 	  continue;
1425 	}
1426       else if (!target_node)
1427 	{
1428 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1429 	  symtab_node *node = symtab_node::get (p->decl);
1430 	  if (node)
1431 	    node->alias = false;
1432 	  alias_pairs->unordered_remove (i);
1433 	  continue;
1434 	}
1435 
1436       if (DECL_EXTERNAL (target_node->decl)
1437 	  /* We use local aliases for C++ thunks to force the tailcall
1438 	     to bind locally.  This is a hack - to keep it working do
1439 	     the following (which is not strictly correct).  */
1440 	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1441 	      || ! DECL_VIRTUAL_P (target_node->decl))
1442 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1443 	{
1444 	  error ("%q+D aliased to external symbol %qE",
1445 		 p->decl, p->target);
1446 	}
1447 
1448       if (TREE_CODE (p->decl) == FUNCTION_DECL
1449           && target_node && is_a <cgraph_node *> (target_node))
1450 	{
1451 	  maybe_diag_incompatible_alias (p->decl, target_node->decl);
1452 
1453 	  maybe_diag_alias_attributes (p->decl, target_node->decl);
1454 
1455 	  cgraph_node *src_node = cgraph_node::get (p->decl);
1456 	  if (src_node && src_node->definition)
1457 	    src_node->reset ();
1458 	  cgraph_node::create_alias (p->decl, target_node->decl);
1459 	  alias_pairs->unordered_remove (i);
1460 	}
1461       else if (VAR_P (p->decl)
1462 	       && target_node && is_a <varpool_node *> (target_node))
1463 	{
1464 	  varpool_node::create_alias (p->decl, target_node->decl);
1465 	  alias_pairs->unordered_remove (i);
1466 	}
1467       else
1468 	{
1469 	  error ("%q+D alias between function and variable is not supported",
1470 		 p->decl);
1471 	  inform (DECL_SOURCE_LOCATION (target_node->decl),
1472 		  "aliased declaration here");
1473 
1474 	  alias_pairs->unordered_remove (i);
1475 	}
1476     }
1477   vec_free (alias_pairs);
1478 }
1479 
1480 
1481 /* Figure out what functions we want to assemble.  */
1482 
1483 static void
mark_functions_to_output(void)1484 mark_functions_to_output (void)
1485 {
1486   bool check_same_comdat_groups = false;
1487   cgraph_node *node;
1488 
1489   if (flag_checking)
1490     FOR_EACH_FUNCTION (node)
1491       gcc_assert (!node->process);
1492 
1493   FOR_EACH_FUNCTION (node)
1494     {
1495       tree decl = node->decl;
1496 
1497       gcc_assert (!node->process || node->same_comdat_group);
1498       if (node->process)
1499 	continue;
1500 
1501       /* We need to output all local functions that are used and not
1502 	 always inlined, as well as those that are reachable from
1503 	 outside the current compilation unit.  */
1504       if (node->analyzed
1505 	  && !node->thunk.thunk_p
1506 	  && !node->alias
1507 	  && !node->global.inlined_to
1508 	  && !TREE_ASM_WRITTEN (decl)
1509 	  && !DECL_EXTERNAL (decl))
1510 	{
1511 	  node->process = 1;
1512 	  if (node->same_comdat_group)
1513 	    {
1514 	      cgraph_node *next;
1515 	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1516 		   next != node;
1517 		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1518 		if (!next->thunk.thunk_p && !next->alias
1519 		    && !next->comdat_local_p ())
1520 		  next->process = 1;
1521 	    }
1522 	}
1523       else if (node->same_comdat_group)
1524 	{
1525 	  if (flag_checking)
1526 	    check_same_comdat_groups = true;
1527 	}
1528       else
1529 	{
1530 	  /* We should've reclaimed all functions that are not needed.  */
1531 	  if (flag_checking
1532 	      && !node->global.inlined_to
1533 	      && gimple_has_body_p (decl)
1534 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1535 		 are inside partition, we can end up not removing the body since we no longer
1536 		 have analyzed node pointing to it.  */
1537 	      && !node->in_other_partition
1538 	      && !node->alias
1539 	      && !node->clones
1540 	      && !DECL_EXTERNAL (decl))
1541 	    {
1542 	      node->debug ();
1543 	      internal_error ("failed to reclaim unneeded function");
1544 	    }
1545 	  gcc_assert (node->global.inlined_to
1546 		      || !gimple_has_body_p (decl)
1547 		      || node->in_other_partition
1548 		      || node->clones
1549 		      || DECL_ARTIFICIAL (decl)
1550 		      || DECL_EXTERNAL (decl));
1551 
1552 	}
1553 
1554     }
1555   if (flag_checking && check_same_comdat_groups)
1556     FOR_EACH_FUNCTION (node)
1557       if (node->same_comdat_group && !node->process)
1558 	{
1559 	  tree decl = node->decl;
1560 	  if (!node->global.inlined_to
1561 	      && gimple_has_body_p (decl)
1562 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1563 		 partition but inline copies are inside a partition, we can
1564 		 end up not removing the body since we no longer have an
1565 		 analyzed node pointing to it.  */
1566 	      && !node->in_other_partition
1567 	      && !node->clones
1568 	      && !DECL_EXTERNAL (decl))
1569 	    {
1570 	      node->debug ();
1571 	      internal_error ("failed to reclaim unneeded function in same "
1572 			      "comdat group");
1573 	    }
1574 	}
1575 }
1576 
1577 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1578    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1579 
1580    Set current_function_decl and cfun to newly constructed empty function body.
1581    return basic block in the function body.  */
1582 
1583 basic_block
init_lowered_empty_function(tree decl,bool in_ssa,profile_count count)1584 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1585 {
1586   basic_block bb;
1587   edge e;
1588 
1589   current_function_decl = decl;
1590   allocate_struct_function (decl, false);
1591   gimple_register_cfg_hooks ();
1592   init_empty_tree_cfg ();
1593   init_tree_ssa (cfun);
1594 
1595   if (in_ssa)
1596     {
1597       init_ssa_operands (cfun);
1598       cfun->gimple_df->in_ssa_p = true;
1599       cfun->curr_properties |= PROP_ssa;
1600     }
1601 
1602   DECL_INITIAL (decl) = make_node (BLOCK);
1603   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1604 
1605   DECL_SAVED_TREE (decl) = error_mark_node;
1606   cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1607 			    | PROP_cfg | PROP_loops);
1608 
1609   set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1610   init_loops_structure (cfun, loops_for_fn (cfun), 1);
1611   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1612 
1613   /* Create BB for body of the function and connect it properly.  */
1614   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1615   EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1616   bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1617   bb->count = count;
1618   e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1619   e->probability = profile_probability::always ();
1620   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1621   e->probability = profile_probability::always ();
1622   add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1623 
1624   return bb;
1625 }
1626 
1627 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1628    VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1629    it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1630    for a result adjusting thunk.  */
1631 
1632 tree
thunk_adjust(gimple_stmt_iterator * bsi,tree ptr,bool this_adjusting,HOST_WIDE_INT fixed_offset,tree virtual_offset,HOST_WIDE_INT indirect_offset)1633 thunk_adjust (gimple_stmt_iterator * bsi,
1634 	      tree ptr, bool this_adjusting,
1635 	      HOST_WIDE_INT fixed_offset, tree virtual_offset,
1636 	      HOST_WIDE_INT indirect_offset)
1637 {
1638   gassign *stmt;
1639   tree ret;
1640 
1641   if (this_adjusting
1642       && fixed_offset != 0)
1643     {
1644       stmt = gimple_build_assign
1645 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1646 						       ptr,
1647 						       fixed_offset));
1648       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1649     }
1650 
1651   if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1652     {
1653       tree vfunc_type = make_node (FUNCTION_TYPE);
1654       TREE_TYPE (vfunc_type) = integer_type_node;
1655       TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1656       layout_type (vfunc_type);
1657 
1658       vtable_entry_type = build_pointer_type (vfunc_type);
1659     }
1660 
1661   /* If there's a virtual offset, look up that value in the vtable and
1662      adjust the pointer again.  */
1663   if (virtual_offset)
1664     {
1665       tree vtabletmp;
1666       tree vtabletmp2;
1667       tree vtabletmp3;
1668 
1669       vtabletmp =
1670 	create_tmp_reg (build_pointer_type
1671 			  (build_pointer_type (vtable_entry_type)), "vptr");
1672 
1673       /* The vptr is always at offset zero in the object.  */
1674       stmt = gimple_build_assign (vtabletmp,
1675 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1676 					  ptr));
1677       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1678 
1679       /* Form the vtable address.  */
1680       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1681 				     "vtableaddr");
1682       stmt = gimple_build_assign (vtabletmp2,
1683 				  build_simple_mem_ref (vtabletmp));
1684       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1685 
1686       /* Find the entry with the vcall offset.  */
1687       stmt = gimple_build_assign (vtabletmp2,
1688 				  fold_build_pointer_plus_loc (input_location,
1689 							       vtabletmp2,
1690 							       virtual_offset));
1691       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1692 
1693       /* Get the offset itself.  */
1694       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1695 				     "vcalloffset");
1696       stmt = gimple_build_assign (vtabletmp3,
1697 				  build_simple_mem_ref (vtabletmp2));
1698       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1699 
1700       /* Adjust the `this' pointer.  */
1701       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1702       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1703 				      GSI_CONTINUE_LINKING);
1704     }
1705 
1706   /* Likewise for an offset that is stored in the object that contains the
1707      vtable.  */
1708   if (indirect_offset != 0)
1709     {
1710       tree offset_ptr, offset_tree;
1711 
1712       /* Get the address of the offset.  */
1713       offset_ptr
1714         = create_tmp_reg (build_pointer_type
1715 			  (build_pointer_type (vtable_entry_type)),
1716 			  "offset_ptr");
1717       stmt = gimple_build_assign (offset_ptr,
1718 				  build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1719 					  ptr));
1720       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1721 
1722       stmt = gimple_build_assign
1723 	     (offset_ptr,
1724 	      fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1725 					       indirect_offset));
1726       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1727 
1728       /* Get the offset itself.  */
1729       offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1730 				    "offset");
1731       stmt = gimple_build_assign (offset_tree,
1732 				  build_simple_mem_ref (offset_ptr));
1733       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1734 
1735       /* Adjust the `this' pointer.  */
1736       ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1737       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1738 				      GSI_CONTINUE_LINKING);
1739     }
1740 
1741   if (!this_adjusting
1742       && fixed_offset != 0)
1743     /* Adjust the pointer by the constant.  */
1744     {
1745       tree ptrtmp;
1746 
1747       if (VAR_P (ptr))
1748         ptrtmp = ptr;
1749       else
1750         {
1751           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1752           stmt = gimple_build_assign (ptrtmp, ptr);
1753 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1754 	}
1755       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1756 					     ptrtmp, fixed_offset);
1757     }
1758 
1759   /* Emit the statement and gimplify the adjustment expression.  */
1760   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1761   stmt = gimple_build_assign (ret, ptr);
1762   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1763 
1764   return ret;
1765 }
1766 
1767 /* Expand thunk NODE to gimple if possible.
1768    When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1769    no assembler is produced.
1770    When OUTPUT_ASM_THUNK is true, also produce assembler for
1771    thunks that are not lowered.  */
1772 
1773 bool
expand_thunk(bool output_asm_thunks,bool force_gimple_thunk)1774 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1775 {
1776   bool this_adjusting = thunk.this_adjusting;
1777   HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1778   HOST_WIDE_INT virtual_value = thunk.virtual_value;
1779   HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1780   tree virtual_offset = NULL;
1781   tree alias = callees->callee->decl;
1782   tree thunk_fndecl = decl;
1783   tree a;
1784 
1785   if (!force_gimple_thunk
1786       && this_adjusting
1787       && indirect_offset == 0
1788       && !DECL_EXTERNAL (alias)
1789       && !DECL_STATIC_CHAIN (alias)
1790       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1791 					      virtual_value, alias))
1792     {
1793       const char *fnname;
1794       tree fn_block;
1795       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1796 
1797       if (!output_asm_thunks)
1798 	{
1799 	  analyzed = true;
1800 	  return false;
1801 	}
1802 
1803       if (in_lto_p)
1804 	get_untransformed_body ();
1805       a = DECL_ARGUMENTS (thunk_fndecl);
1806 
1807       current_function_decl = thunk_fndecl;
1808 
1809       /* Ensure thunks are emitted in their correct sections.  */
1810       resolve_unique_section (thunk_fndecl, 0,
1811 			      flag_function_sections);
1812 
1813       DECL_RESULT (thunk_fndecl)
1814 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1815 		      RESULT_DECL, 0, restype);
1816       DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1817       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1818 
1819       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1820 	 create one.  */
1821       fn_block = make_node (BLOCK);
1822       BLOCK_VARS (fn_block) = a;
1823       DECL_INITIAL (thunk_fndecl) = fn_block;
1824       BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1825       allocate_struct_function (thunk_fndecl, false);
1826       init_function_start (thunk_fndecl);
1827       cfun->is_thunk = 1;
1828       insn_locations_init ();
1829       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1830       prologue_location = curr_insn_location ();
1831       assemble_start_function (thunk_fndecl, fnname);
1832 
1833       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1834 				       fixed_offset, virtual_value, alias);
1835 
1836       assemble_end_function (thunk_fndecl, fnname);
1837       insn_locations_finalize ();
1838       init_insn_lengths ();
1839       free_after_compilation (cfun);
1840       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1841       thunk.thunk_p = false;
1842       analyzed = false;
1843     }
1844   else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1845     {
1846       error ("generic thunk code fails for method %qD which uses %<...%>",
1847 	     thunk_fndecl);
1848       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1849       analyzed = true;
1850       return false;
1851     }
1852   else
1853     {
1854       tree restype;
1855       basic_block bb, then_bb, else_bb, return_bb;
1856       gimple_stmt_iterator bsi;
1857       int nargs = 0;
1858       tree arg;
1859       int i;
1860       tree resdecl;
1861       tree restmp = NULL;
1862 
1863       gcall *call;
1864       greturn *ret;
1865       bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1866 
1867       /* We may be called from expand_thunk that releses body except for
1868 	 DECL_ARGUMENTS.  In this case force_gimple_thunk is true.  */
1869       if (in_lto_p && !force_gimple_thunk)
1870 	get_untransformed_body ();
1871 
1872       /* We need to force DECL_IGNORED_P when the thunk is created
1873 	 after early debug was run.  */
1874       if (force_gimple_thunk)
1875 	DECL_IGNORED_P (thunk_fndecl) = 1;
1876 
1877       a = DECL_ARGUMENTS (thunk_fndecl);
1878 
1879       current_function_decl = thunk_fndecl;
1880 
1881       /* Ensure thunks are emitted in their correct sections.  */
1882       resolve_unique_section (thunk_fndecl, 0,
1883 			      flag_function_sections);
1884 
1885       bitmap_obstack_initialize (NULL);
1886 
1887       if (thunk.virtual_offset_p)
1888         virtual_offset = size_int (virtual_value);
1889 
1890       /* Build the return declaration for the function.  */
1891       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1892       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1893 	{
1894 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1895 	  DECL_ARTIFICIAL (resdecl) = 1;
1896 	  DECL_IGNORED_P (resdecl) = 1;
1897 	  DECL_CONTEXT (resdecl) = thunk_fndecl;
1898 	  DECL_RESULT (thunk_fndecl) = resdecl;
1899 	}
1900       else
1901 	resdecl = DECL_RESULT (thunk_fndecl);
1902 
1903       profile_count cfg_count = count;
1904       if (!cfg_count.initialized_p ())
1905 	cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1906 
1907       bb = then_bb = else_bb = return_bb
1908 	= init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1909 
1910       bsi = gsi_start_bb (bb);
1911 
1912       /* Build call to the function being thunked.  */
1913       if (!VOID_TYPE_P (restype)
1914 	  && (!alias_is_noreturn
1915 	      || TREE_ADDRESSABLE (restype)
1916 	      || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1917 	{
1918 	  if (DECL_BY_REFERENCE (resdecl))
1919 	    {
1920 	      restmp = gimple_fold_indirect_ref (resdecl);
1921 	      if (!restmp)
1922 		restmp = build2 (MEM_REF,
1923 				 TREE_TYPE (TREE_TYPE (resdecl)),
1924 				 resdecl,
1925 				 build_int_cst (TREE_TYPE (resdecl), 0));
1926 	    }
1927 	  else if (!is_gimple_reg_type (restype))
1928 	    {
1929 	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1930 		{
1931 		  restmp = resdecl;
1932 
1933 		  if (VAR_P (restmp))
1934 		    {
1935 		      add_local_decl (cfun, restmp);
1936 		      BLOCK_VARS (DECL_INITIAL (current_function_decl))
1937 			= restmp;
1938 		    }
1939 		}
1940 	      else
1941 		restmp = create_tmp_var (restype, "retval");
1942 	    }
1943 	  else
1944 	    restmp = create_tmp_reg (restype, "retval");
1945 	}
1946 
1947       for (arg = a; arg; arg = DECL_CHAIN (arg))
1948         nargs++;
1949       auto_vec<tree> vargs (nargs);
1950       i = 0;
1951       arg = a;
1952       if (this_adjusting)
1953 	{
1954 	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1955 					  virtual_offset, indirect_offset));
1956 	  arg = DECL_CHAIN (a);
1957 	  i = 1;
1958 	}
1959 
1960       if (nargs)
1961 	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1962 	  {
1963 	    tree tmp = arg;
1964 	    if (VECTOR_TYPE_P (TREE_TYPE (arg))
1965 		|| TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1966 	      DECL_GIMPLE_REG_P (arg) = 1;
1967 
1968 	    if (!is_gimple_val (arg))
1969 	      {
1970 		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1971 				      (TREE_TYPE (arg)), "arg");
1972 		gimple *stmt = gimple_build_assign (tmp, arg);
1973 		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1974 	      }
1975 	    vargs.quick_push (tmp);
1976 	  }
1977       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1978       callees->call_stmt = call;
1979       gimple_call_set_from_thunk (call, true);
1980       if (DECL_STATIC_CHAIN (alias))
1981 	{
1982 	  tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
1983 	  tree type = TREE_TYPE (p);
1984 	  tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1985 				  PARM_DECL, create_tmp_var_name ("CHAIN"),
1986 				  type);
1987 	  DECL_ARTIFICIAL (decl) = 1;
1988 	  DECL_IGNORED_P (decl) = 1;
1989 	  TREE_USED (decl) = 1;
1990 	  DECL_CONTEXT (decl) = thunk_fndecl;
1991 	  DECL_ARG_TYPE (decl) = type;
1992 	  TREE_READONLY (decl) = 1;
1993 
1994 	  struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
1995 	  sf->static_chain_decl = decl;
1996 
1997 	  gimple_call_set_chain (call, decl);
1998 	}
1999 
2000       /* Return slot optimization is always possible and in fact requred to
2001          return values with DECL_BY_REFERENCE.  */
2002       if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2003 	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2004 	      || DECL_BY_REFERENCE (resdecl)))
2005         gimple_call_set_return_slot_opt (call, true);
2006 
2007       if (restmp)
2008 	{
2009           gimple_call_set_lhs (call, restmp);
2010 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2011 						 TREE_TYPE (TREE_TYPE (alias))));
2012 	}
2013       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2014       if (!alias_is_noreturn)
2015 	{
2016 	  if (restmp && !this_adjusting
2017 	      && (fixed_offset || virtual_offset))
2018 	    {
2019 	      tree true_label = NULL_TREE;
2020 
2021 	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2022 		{
2023 		  gimple *stmt;
2024 		  edge e;
2025 		  /* If the return type is a pointer, we need to
2026 		     protect against NULL.  We know there will be an
2027 		     adjustment, because that's why we're emitting a
2028 		     thunk.  */
2029 		  then_bb = create_basic_block (NULL, bb);
2030 		  then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2031 		  return_bb = create_basic_block (NULL, then_bb);
2032 		  return_bb->count = cfg_count;
2033 		  else_bb = create_basic_block (NULL, else_bb);
2034 		  else_bb->count = cfg_count.apply_scale (1, 16);
2035 		  add_bb_to_loop (then_bb, bb->loop_father);
2036 		  add_bb_to_loop (return_bb, bb->loop_father);
2037 		  add_bb_to_loop (else_bb, bb->loop_father);
2038 		  remove_edge (single_succ_edge (bb));
2039 		  true_label = gimple_block_label (then_bb);
2040 		  stmt = gimple_build_cond (NE_EXPR, restmp,
2041 					    build_zero_cst (TREE_TYPE (restmp)),
2042 					    NULL_TREE, NULL_TREE);
2043 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2044 		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2045 		  e->probability = profile_probability::guessed_always ()
2046 					.apply_scale (1, 16);
2047 		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2048 		  e->probability = profile_probability::guessed_always ()
2049 					.apply_scale (1, 16);
2050 		  make_single_succ_edge (return_bb,
2051 					 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2052 		  make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2053 		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2054 		  e->probability = profile_probability::always ();
2055 		  bsi = gsi_last_bb (then_bb);
2056 		}
2057 
2058 	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2059 				     fixed_offset, virtual_offset,
2060 				     indirect_offset);
2061 	      if (true_label)
2062 		{
2063 		  gimple *stmt;
2064 		  bsi = gsi_last_bb (else_bb);
2065 		  stmt = gimple_build_assign (restmp,
2066 					      build_zero_cst (TREE_TYPE (restmp)));
2067 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2068 		  bsi = gsi_last_bb (return_bb);
2069 		}
2070 	    }
2071 	  else
2072 	    gimple_call_set_tail (call, true);
2073 
2074 	  /* Build return value.  */
2075 	  if (!DECL_BY_REFERENCE (resdecl))
2076 	    ret = gimple_build_return (restmp);
2077 	  else
2078 	    ret = gimple_build_return (resdecl);
2079 
2080 	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2081 	}
2082       else
2083 	{
2084 	  gimple_call_set_tail (call, true);
2085 	  remove_edge (single_succ_edge (bb));
2086 	}
2087 
2088       cfun->gimple_df->in_ssa_p = true;
2089       update_max_bb_count ();
2090       profile_status_for_fn (cfun)
2091         = cfg_count.initialized_p () && cfg_count.ipa_p ()
2092 	  ? PROFILE_READ : PROFILE_GUESSED;
2093       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
2094       TREE_ASM_WRITTEN (thunk_fndecl) = false;
2095       delete_unreachable_blocks ();
2096       update_ssa (TODO_update_ssa);
2097       checking_verify_flow_info ();
2098       free_dominance_info (CDI_DOMINATORS);
2099 
2100       /* Since we want to emit the thunk, we explicitly mark its name as
2101 	 referenced.  */
2102       thunk.thunk_p = false;
2103       lowered = true;
2104       bitmap_obstack_release (NULL);
2105     }
2106   current_function_decl = NULL;
2107   set_cfun (NULL);
2108   return true;
2109 }
2110 
2111 /* Assemble thunks and aliases associated to node.  */
2112 
2113 void
assemble_thunks_and_aliases(void)2114 cgraph_node::assemble_thunks_and_aliases (void)
2115 {
2116   cgraph_edge *e;
2117   ipa_ref *ref;
2118 
2119   for (e = callers; e;)
2120     if (e->caller->thunk.thunk_p
2121 	&& !e->caller->global.inlined_to)
2122       {
2123 	cgraph_node *thunk = e->caller;
2124 
2125 	e = e->next_caller;
2126 	thunk->expand_thunk (true, false);
2127 	thunk->assemble_thunks_and_aliases ();
2128       }
2129     else
2130       e = e->next_caller;
2131 
2132   FOR_EACH_ALIAS (this, ref)
2133     {
2134       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2135       if (!alias->transparent_alias)
2136 	{
2137 	  bool saved_written = TREE_ASM_WRITTEN (decl);
2138 
2139 	  /* Force assemble_alias to really output the alias this time instead
2140 	     of buffering it in same alias pairs.  */
2141 	  TREE_ASM_WRITTEN (decl) = 1;
2142 	  do_assemble_alias (alias->decl,
2143 			     DECL_ASSEMBLER_NAME (decl));
2144 	  alias->assemble_thunks_and_aliases ();
2145 	  TREE_ASM_WRITTEN (decl) = saved_written;
2146 	}
2147     }
2148 }
2149 
2150 /* Expand function specified by node.  */
2151 
2152 void
expand(void)2153 cgraph_node::expand (void)
2154 {
2155   location_t saved_loc;
2156 
2157   /* We ought to not compile any inline clones.  */
2158   gcc_assert (!global.inlined_to);
2159 
2160   /* __RTL functions are compiled as soon as they are parsed, so don't
2161      do it again.  */
2162   if (native_rtl_p ())
2163     return;
2164 
2165   announce_function (decl);
2166   process = 0;
2167   gcc_assert (lowered);
2168   get_untransformed_body ();
2169 
2170   /* Generate RTL for the body of DECL.  */
2171 
2172   timevar_push (TV_REST_OF_COMPILATION);
2173 
2174   gcc_assert (symtab->global_info_ready);
2175 
2176   /* Initialize the default bitmap obstack.  */
2177   bitmap_obstack_initialize (NULL);
2178 
2179   /* Initialize the RTL code for the function.  */
2180   saved_loc = input_location;
2181   input_location = DECL_SOURCE_LOCATION (decl);
2182 
2183   gcc_assert (DECL_STRUCT_FUNCTION (decl));
2184   push_cfun (DECL_STRUCT_FUNCTION (decl));
2185   init_function_start (decl);
2186 
2187   gimple_register_cfg_hooks ();
2188 
2189   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2190 
2191   execute_all_ipa_transforms (false);
2192 
2193   /* Perform all tree transforms and optimizations.  */
2194 
2195   /* Signal the start of passes.  */
2196   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2197 
2198   execute_pass_list (cfun, g->get_passes ()->all_passes);
2199 
2200   /* Signal the end of passes.  */
2201   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2202 
2203   bitmap_obstack_release (&reg_obstack);
2204 
2205   /* Release the default bitmap obstack.  */
2206   bitmap_obstack_release (NULL);
2207 
2208   /* If requested, warn about function definitions where the function will
2209      return a value (usually of some struct or union type) which itself will
2210      take up a lot of stack space.  */
2211   if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2212     {
2213       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2214 
2215       if (ret_type && TYPE_SIZE_UNIT (ret_type)
2216 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2217 	  && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2218 			       warn_larger_than_size) > 0)
2219 	{
2220 	  unsigned int size_as_int
2221 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2222 
2223 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2224 	    warning (OPT_Wlarger_than_,
2225 		     "size of return value of %q+D is %u bytes",
2226                      decl, size_as_int);
2227 	  else
2228 	    warning (OPT_Wlarger_than_,
2229 		     "size of return value of %q+D is larger than %wu bytes",
2230 	             decl, warn_larger_than_size);
2231 	}
2232     }
2233 
2234   gimple_set_body (decl, NULL);
2235   if (DECL_STRUCT_FUNCTION (decl) == 0
2236       && !cgraph_node::get (decl)->origin)
2237     {
2238       /* Stop pointing to the local nodes about to be freed.
2239 	 But DECL_INITIAL must remain nonzero so we know this
2240 	 was an actual function definition.
2241 	 For a nested function, this is done in c_pop_function_context.
2242 	 If rest_of_compilation set this to 0, leave it 0.  */
2243       if (DECL_INITIAL (decl) != 0)
2244 	DECL_INITIAL (decl) = error_mark_node;
2245     }
2246 
2247   input_location = saved_loc;
2248 
2249   ggc_collect ();
2250   timevar_pop (TV_REST_OF_COMPILATION);
2251 
2252   /* Make sure that BE didn't give up on compiling.  */
2253   gcc_assert (TREE_ASM_WRITTEN (decl));
2254   if (cfun)
2255     pop_cfun ();
2256 
2257   /* It would make a lot more sense to output thunks before function body to get more
2258      forward and lest backwarding jumps.  This however would need solving problem
2259      with comdats. See PR48668.  Also aliases must come after function itself to
2260      make one pass assemblers, like one on AIX, happy.  See PR 50689.
2261      FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2262      groups.  */
2263   assemble_thunks_and_aliases ();
2264   release_body ();
2265   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
2266      points to the dead function body.  */
2267   remove_callees ();
2268   remove_all_references ();
2269 }
2270 
2271 /* Node comparer that is responsible for the order that corresponds
2272    to time when a function was launched for the first time.  */
2273 
2274 static int
node_cmp(const void * pa,const void * pb)2275 node_cmp (const void *pa, const void *pb)
2276 {
2277   const cgraph_node *a = *(const cgraph_node * const *) pa;
2278   const cgraph_node *b = *(const cgraph_node * const *) pb;
2279 
2280   /* Functions with time profile must be before these without profile.  */
2281   if (!a->tp_first_run || !b->tp_first_run)
2282     return a->tp_first_run - b->tp_first_run;
2283 
2284   return a->tp_first_run != b->tp_first_run
2285 	 ? b->tp_first_run - a->tp_first_run
2286 	 : b->order - a->order;
2287 }
2288 
2289 /* Expand all functions that must be output.
2290 
2291    Attempt to topologically sort the nodes so function is output when
2292    all called functions are already assembled to allow data to be
2293    propagated across the callgraph.  Use a stack to get smaller distance
2294    between a function and its callees (later we may choose to use a more
2295    sophisticated algorithm for function reordering; we will likely want
2296    to use subsections to make the output functions appear in top-down
2297    order).  */
2298 
2299 static void
expand_all_functions(void)2300 expand_all_functions (void)
2301 {
2302   cgraph_node *node;
2303   cgraph_node **order = XCNEWVEC (cgraph_node *,
2304 					 symtab->cgraph_count);
2305   unsigned int expanded_func_count = 0, profiled_func_count = 0;
2306   int order_pos, new_order_pos = 0;
2307   int i;
2308 
2309   order_pos = ipa_reverse_postorder (order);
2310   gcc_assert (order_pos == symtab->cgraph_count);
2311 
2312   /* Garbage collector may remove inline clones we eliminate during
2313      optimization.  So we must be sure to not reference them.  */
2314   for (i = 0; i < order_pos; i++)
2315     if (order[i]->process)
2316       order[new_order_pos++] = order[i];
2317 
2318   if (flag_profile_reorder_functions)
2319     qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2320 
2321   for (i = new_order_pos - 1; i >= 0; i--)
2322     {
2323       node = order[i];
2324 
2325       if (node->process)
2326 	{
2327 	  expanded_func_count++;
2328 	  if(node->tp_first_run)
2329 	    profiled_func_count++;
2330 
2331 	  if (symtab->dump_file)
2332 	    fprintf (symtab->dump_file,
2333 		     "Time profile order in expand_all_functions:%s:%d\n",
2334 		     node->asm_name (), node->tp_first_run);
2335 	  node->process = 0;
2336 	  node->expand ();
2337 	}
2338     }
2339 
2340     if (dump_file)
2341       fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2342                main_input_filename, profiled_func_count, expanded_func_count);
2343 
2344   if (symtab->dump_file && flag_profile_reorder_functions)
2345     fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2346              profiled_func_count, expanded_func_count);
2347 
2348   symtab->process_new_functions ();
2349   free_gimplify_stack ();
2350 
2351   free (order);
2352 }
2353 
2354 /* This is used to sort the node types by the cgraph order number.  */
2355 
2356 enum cgraph_order_sort_kind
2357 {
2358   ORDER_UNDEFINED = 0,
2359   ORDER_FUNCTION,
2360   ORDER_VAR,
2361   ORDER_VAR_UNDEF,
2362   ORDER_ASM
2363 };
2364 
2365 struct cgraph_order_sort
2366 {
2367   enum cgraph_order_sort_kind kind;
2368   union
2369   {
2370     cgraph_node *f;
2371     varpool_node *v;
2372     asm_node *a;
2373   } u;
2374 };
2375 
2376 /* Output all functions, variables, and asm statements in the order
2377    according to their order fields, which is the order in which they
2378    appeared in the file.  This implements -fno-toplevel-reorder.  In
2379    this mode we may output functions and variables which don't really
2380    need to be output.  */
2381 
2382 static void
output_in_order(void)2383 output_in_order (void)
2384 {
2385   int max;
2386   cgraph_order_sort *nodes;
2387   int i;
2388   cgraph_node *pf;
2389   varpool_node *pv;
2390   asm_node *pa;
2391   max = symtab->order;
2392   nodes = XCNEWVEC (cgraph_order_sort, max);
2393 
2394   FOR_EACH_DEFINED_FUNCTION (pf)
2395     {
2396       if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2397 	{
2398 	  if (!pf->no_reorder)
2399 	    continue;
2400 	  i = pf->order;
2401 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2402 	  nodes[i].kind = ORDER_FUNCTION;
2403 	  nodes[i].u.f = pf;
2404 	}
2405     }
2406 
2407   /* There is a similar loop in symbol_table::output_variables.
2408      Please keep them in sync.  */
2409   FOR_EACH_VARIABLE (pv)
2410     {
2411       if (!pv->no_reorder)
2412 	continue;
2413       if (DECL_HARD_REGISTER (pv->decl)
2414 	  || DECL_HAS_VALUE_EXPR_P (pv->decl))
2415 	continue;
2416       i = pv->order;
2417       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2418       nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2419       nodes[i].u.v = pv;
2420     }
2421 
2422   for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2423     {
2424       i = pa->order;
2425       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2426       nodes[i].kind = ORDER_ASM;
2427       nodes[i].u.a = pa;
2428     }
2429 
2430   /* In toplevel reorder mode we output all statics; mark them as needed.  */
2431 
2432   for (i = 0; i < max; ++i)
2433     if (nodes[i].kind == ORDER_VAR)
2434       nodes[i].u.v->finalize_named_section_flags ();
2435 
2436   for (i = 0; i < max; ++i)
2437     {
2438       switch (nodes[i].kind)
2439 	{
2440 	case ORDER_FUNCTION:
2441 	  nodes[i].u.f->process = 0;
2442 	  nodes[i].u.f->expand ();
2443 	  break;
2444 
2445 	case ORDER_VAR:
2446 	  nodes[i].u.v->assemble_decl ();
2447 	  break;
2448 
2449 	case ORDER_VAR_UNDEF:
2450 	  assemble_undefined_decl (nodes[i].u.v->decl);
2451 	  break;
2452 
2453 	case ORDER_ASM:
2454 	  assemble_asm (nodes[i].u.a->asm_str);
2455 	  break;
2456 
2457 	case ORDER_UNDEFINED:
2458 	  break;
2459 
2460 	default:
2461 	  gcc_unreachable ();
2462 	}
2463     }
2464 
2465   symtab->clear_asm_symbols ();
2466 
2467   free (nodes);
2468 }
2469 
2470 static void
ipa_passes(void)2471 ipa_passes (void)
2472 {
2473   gcc::pass_manager *passes = g->get_passes ();
2474 
2475   set_cfun (NULL);
2476   current_function_decl = NULL;
2477   gimple_register_cfg_hooks ();
2478   bitmap_obstack_initialize (NULL);
2479 
2480   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2481 
2482   if (!in_lto_p)
2483     {
2484       execute_ipa_pass_list (passes->all_small_ipa_passes);
2485       if (seen_error ())
2486 	return;
2487     }
2488 
2489   /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2490      devirtualization and other changes where removal iterate.  */
2491   symtab->remove_unreachable_nodes (symtab->dump_file);
2492 
2493   /* If pass_all_early_optimizations was not scheduled, the state of
2494      the cgraph will not be properly updated.  Update it now.  */
2495   if (symtab->state < IPA_SSA)
2496     symtab->state = IPA_SSA;
2497 
2498   if (!in_lto_p)
2499     {
2500       /* Generate coverage variables and constructors.  */
2501       coverage_finish ();
2502 
2503       /* Process new functions added.  */
2504       set_cfun (NULL);
2505       current_function_decl = NULL;
2506       symtab->process_new_functions ();
2507 
2508       execute_ipa_summary_passes
2509 	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2510     }
2511 
2512   /* Some targets need to handle LTO assembler output specially.  */
2513   if (flag_generate_lto || flag_generate_offload)
2514     targetm.asm_out.lto_start ();
2515 
2516   if (!in_lto_p
2517       || flag_incremental_link == INCREMENTAL_LINK_LTO)
2518     {
2519       if (!quiet_flag)
2520 	fprintf (stderr, "Streaming LTO\n");
2521       if (g->have_offload)
2522 	{
2523 	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2524 	  lto_stream_offload_p = true;
2525 	  ipa_write_summaries ();
2526 	  lto_stream_offload_p = false;
2527 	}
2528       if (flag_lto)
2529 	{
2530 	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2531 	  lto_stream_offload_p = false;
2532 	  ipa_write_summaries ();
2533 	}
2534     }
2535 
2536   if (flag_generate_lto || flag_generate_offload)
2537     targetm.asm_out.lto_end ();
2538 
2539   if (!flag_ltrans
2540       && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2541 	  || !flag_lto || flag_fat_lto_objects))
2542     execute_ipa_pass_list (passes->all_regular_ipa_passes);
2543   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2544 
2545   bitmap_obstack_release (NULL);
2546 }
2547 
2548 
2549 /* Return string alias is alias of.  */
2550 
2551 static tree
get_alias_symbol(tree decl)2552 get_alias_symbol (tree decl)
2553 {
2554   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2555   return get_identifier (TREE_STRING_POINTER
2556 			  (TREE_VALUE (TREE_VALUE (alias))));
2557 }
2558 
2559 
2560 /* Weakrefs may be associated to external decls and thus not output
2561    at expansion time.  Emit all necessary aliases.  */
2562 
2563 void
output_weakrefs(void)2564 symbol_table::output_weakrefs (void)
2565 {
2566   symtab_node *node;
2567   FOR_EACH_SYMBOL (node)
2568     if (node->alias
2569         && !TREE_ASM_WRITTEN (node->decl)
2570 	&& node->weakref)
2571       {
2572 	tree target;
2573 
2574 	/* Weakrefs are special by not requiring target definition in current
2575 	   compilation unit.  It is thus bit hard to work out what we want to
2576 	   alias.
2577 	   When alias target is defined, we need to fetch it from symtab reference,
2578 	   otherwise it is pointed to by alias_target.  */
2579 	if (node->alias_target)
2580 	  target = (DECL_P (node->alias_target)
2581 		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2582 		    : node->alias_target);
2583 	else if (node->analyzed)
2584 	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2585 	else
2586 	  {
2587 	    gcc_unreachable ();
2588 	    target = get_alias_symbol (node->decl);
2589 	  }
2590         do_assemble_alias (node->decl, target);
2591       }
2592 }
2593 
2594 /* Perform simple optimizations based on callgraph.  */
2595 
2596 void
compile(void)2597 symbol_table::compile (void)
2598 {
2599   if (seen_error ())
2600     return;
2601 
2602   symtab_node::checking_verify_symtab_nodes ();
2603 
2604   timevar_push (TV_CGRAPHOPT);
2605   if (pre_ipa_mem_report)
2606     {
2607       fprintf (stderr, "Memory consumption before IPA\n");
2608       dump_memory_report (false);
2609     }
2610   if (!quiet_flag)
2611     fprintf (stderr, "Performing interprocedural optimizations\n");
2612   state = IPA;
2613 
2614   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2615   if (flag_generate_lto || flag_generate_offload)
2616     lto_streamer_hooks_init ();
2617 
2618   /* Don't run the IPA passes if there was any error or sorry messages.  */
2619   if (!seen_error ())
2620     ipa_passes ();
2621 
2622   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2623   if (seen_error ()
2624       || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2625 	  && flag_lto && !flag_fat_lto_objects))
2626     {
2627       timevar_pop (TV_CGRAPHOPT);
2628       return;
2629     }
2630 
2631   global_info_ready = true;
2632   if (dump_file)
2633     {
2634       fprintf (dump_file, "Optimized ");
2635       symtab->dump (dump_file);
2636     }
2637   if (post_ipa_mem_report)
2638     {
2639       fprintf (stderr, "Memory consumption after IPA\n");
2640       dump_memory_report (false);
2641     }
2642   timevar_pop (TV_CGRAPHOPT);
2643 
2644   /* Output everything.  */
2645   switch_to_section (text_section);
2646   (*debug_hooks->assembly_start) ();
2647   if (!quiet_flag)
2648     fprintf (stderr, "Assembling functions:\n");
2649   symtab_node::checking_verify_symtab_nodes ();
2650 
2651   bitmap_obstack_initialize (NULL);
2652   execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2653   bitmap_obstack_release (NULL);
2654   mark_functions_to_output ();
2655 
2656   /* When weakref support is missing, we automatically translate all
2657      references to NODE to references to its ultimate alias target.
2658      The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2659      TREE_CHAIN.
2660 
2661      Set up this mapping before we output any assembler but once we are sure
2662      that all symbol renaming is done.
2663 
2664      FIXME: All this uglyness can go away if we just do renaming at gimple
2665      level by physically rewritting the IL.  At the moment we can only redirect
2666      calls, so we need infrastructure for renaming references as well.  */
2667 #ifndef ASM_OUTPUT_WEAKREF
2668   symtab_node *node;
2669 
2670   FOR_EACH_SYMBOL (node)
2671     if (node->alias
2672 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2673       {
2674 	IDENTIFIER_TRANSPARENT_ALIAS
2675 	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2676 	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2677 	   = (node->alias_target ? node->alias_target
2678 	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2679       }
2680 #endif
2681 
2682   state = EXPANSION;
2683 
2684   /* Output first asm statements and anything ordered. The process
2685      flag is cleared for these nodes, so we skip them later.  */
2686   output_in_order ();
2687   expand_all_functions ();
2688   output_variables ();
2689 
2690   process_new_functions ();
2691   state = FINISHED;
2692   output_weakrefs ();
2693 
2694   if (dump_file)
2695     {
2696       fprintf (dump_file, "\nFinal ");
2697       symtab->dump (dump_file);
2698     }
2699   if (!flag_checking)
2700     return;
2701   symtab_node::verify_symtab_nodes ();
2702   /* Double check that all inline clones are gone and that all
2703      function bodies have been released from memory.  */
2704   if (!seen_error ())
2705     {
2706       cgraph_node *node;
2707       bool error_found = false;
2708 
2709       FOR_EACH_DEFINED_FUNCTION (node)
2710 	if (node->global.inlined_to
2711 	    || gimple_has_body_p (node->decl))
2712 	  {
2713 	    error_found = true;
2714 	    node->debug ();
2715 	  }
2716       if (error_found)
2717 	internal_error ("nodes with unreleased memory found");
2718     }
2719 }
2720 
2721 /* Earlydebug dump file, flags, and number.  */
2722 
2723 static int debuginfo_early_dump_nr;
2724 static FILE *debuginfo_early_dump_file;
2725 static dump_flags_t debuginfo_early_dump_flags;
2726 
2727 /* Debug dump file, flags, and number.  */
2728 
2729 static int debuginfo_dump_nr;
2730 static FILE *debuginfo_dump_file;
2731 static dump_flags_t debuginfo_dump_flags;
2732 
2733 /* Register the debug and earlydebug dump files.  */
2734 
2735 void
debuginfo_early_init(void)2736 debuginfo_early_init (void)
2737 {
2738   gcc::dump_manager *dumps = g->get_dumps ();
2739   debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2740 						  "earlydebug", DK_tree,
2741 						  OPTGROUP_NONE,
2742 						  false);
2743   debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2744 					     "debug", DK_tree,
2745 					     OPTGROUP_NONE,
2746 					     false);
2747 }
2748 
2749 /* Initialize the debug and earlydebug dump files.  */
2750 
2751 void
debuginfo_init(void)2752 debuginfo_init (void)
2753 {
2754   gcc::dump_manager *dumps = g->get_dumps ();
2755   debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2756   debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2757   debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2758   debuginfo_early_dump_flags
2759     = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2760 }
2761 
2762 /* Finalize the debug and earlydebug dump files.  */
2763 
2764 void
debuginfo_fini(void)2765 debuginfo_fini (void)
2766 {
2767   if (debuginfo_dump_file)
2768     dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2769   if (debuginfo_early_dump_file)
2770     dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2771 }
2772 
2773 /* Set dump_file to the debug dump file.  */
2774 
2775 void
debuginfo_start(void)2776 debuginfo_start (void)
2777 {
2778   set_dump_file (debuginfo_dump_file);
2779 }
2780 
2781 /* Undo setting dump_file to the debug dump file.  */
2782 
2783 void
debuginfo_stop(void)2784 debuginfo_stop (void)
2785 {
2786   set_dump_file (NULL);
2787 }
2788 
2789 /* Set dump_file to the earlydebug dump file.  */
2790 
2791 void
debuginfo_early_start(void)2792 debuginfo_early_start (void)
2793 {
2794   set_dump_file (debuginfo_early_dump_file);
2795 }
2796 
2797 /* Undo setting dump_file to the earlydebug dump file.  */
2798 
2799 void
debuginfo_early_stop(void)2800 debuginfo_early_stop (void)
2801 {
2802   set_dump_file (NULL);
2803 }
2804 
2805 /* Analyze the whole compilation unit once it is parsed completely.  */
2806 
2807 void
finalize_compilation_unit(void)2808 symbol_table::finalize_compilation_unit (void)
2809 {
2810   timevar_push (TV_CGRAPH);
2811 
2812   /* If we're here there's no current function anymore.  Some frontends
2813      are lazy in clearing these.  */
2814   current_function_decl = NULL;
2815   set_cfun (NULL);
2816 
2817   /* Do not skip analyzing the functions if there were errors, we
2818      miss diagnostics for following functions otherwise.  */
2819 
2820   /* Emit size functions we didn't inline.  */
2821   finalize_size_functions ();
2822 
2823   /* Mark alias targets necessary and emit diagnostics.  */
2824   handle_alias_pairs ();
2825 
2826   if (!quiet_flag)
2827     {
2828       fprintf (stderr, "\nAnalyzing compilation unit\n");
2829       fflush (stderr);
2830     }
2831 
2832   if (flag_dump_passes)
2833     dump_passes ();
2834 
2835   /* Gimplify and lower all functions, compute reachability and
2836      remove unreachable nodes.  */
2837   analyze_functions (/*first_time=*/true);
2838 
2839   /* Mark alias targets necessary and emit diagnostics.  */
2840   handle_alias_pairs ();
2841 
2842   /* Gimplify and lower thunks.  */
2843   analyze_functions (/*first_time=*/false);
2844 
2845   /* Offloading requires LTO infrastructure.  */
2846   if (!in_lto_p && g->have_offload)
2847     flag_generate_offload = 1;
2848 
2849   if (!seen_error ())
2850     {
2851       /* Emit early debug for reachable functions, and by consequence,
2852 	 locally scoped symbols.  */
2853       struct cgraph_node *cnode;
2854       FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2855 	(*debug_hooks->early_global_decl) (cnode->decl);
2856 
2857       /* Clean up anything that needs cleaning up after initial debug
2858 	 generation.  */
2859       debuginfo_early_start ();
2860       (*debug_hooks->early_finish) (main_input_filename);
2861       debuginfo_early_stop ();
2862     }
2863 
2864   /* Finally drive the pass manager.  */
2865   compile ();
2866 
2867   timevar_pop (TV_CGRAPH);
2868 }
2869 
2870 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2871    within the same process.  For use by toplev::finalize.  */
2872 
2873 void
cgraphunit_c_finalize(void)2874 cgraphunit_c_finalize (void)
2875 {
2876   gcc_assert (cgraph_new_nodes.length () == 0);
2877   cgraph_new_nodes.truncate (0);
2878 
2879   vtable_entry_type = NULL;
2880   queued_nodes = &symtab_terminator;
2881 
2882   first_analyzed = NULL;
2883   first_analyzed_var = NULL;
2884 }
2885 
2886 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2887    kind of wrapper method.  */
2888 
2889 void
create_wrapper(cgraph_node * target)2890 cgraph_node::create_wrapper (cgraph_node *target)
2891 {
2892   /* Preserve DECL_RESULT so we get right by reference flag.  */
2893   tree decl_result = DECL_RESULT (decl);
2894 
2895   /* Remove the function's body but keep arguments to be reused
2896      for thunk.  */
2897   release_body (true);
2898   reset ();
2899 
2900   DECL_UNINLINABLE (decl) = false;
2901   DECL_RESULT (decl) = decl_result;
2902   DECL_INITIAL (decl) = NULL;
2903   allocate_struct_function (decl, false);
2904   set_cfun (NULL);
2905 
2906   /* Turn alias into thunk and expand it into GIMPLE representation.  */
2907   definition = true;
2908 
2909   memset (&thunk, 0, sizeof (cgraph_thunk_info));
2910   thunk.thunk_p = true;
2911   create_edge (target, NULL, count);
2912   callees->can_throw_external = !TREE_NOTHROW (target->decl);
2913 
2914   tree arguments = DECL_ARGUMENTS (decl);
2915 
2916   while (arguments)
2917     {
2918       TREE_ADDRESSABLE (arguments) = false;
2919       arguments = TREE_CHAIN (arguments);
2920     }
2921 
2922   expand_thunk (false, true);
2923 
2924   /* Inline summary set-up.  */
2925   analyze ();
2926   inline_analyze_function (this);
2927 }
2928 
2929 #include "gt-cgraphunit.h"
2930