1 /* Driver of optimization process
2    Copyright (C) 2003-2020 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimizations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for example, transactional memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO streaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multiple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functions (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h"     /* FIXME: For reg_obstack.  */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208 #include "ipa-inline.h"
209 
210 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
211    secondary queue used during optimization to accommodate passes that
212    may generate new functions that need to be optimized and expanded.  */
213 vec<cgraph_node *> cgraph_new_nodes;
214 
215 static void expand_all_functions (void);
216 static void mark_functions_to_output (void);
217 static void handle_alias_pairs (void);
218 
219 /* Used for vtable lookup in thunk adjusting.  */
220 static GTY (()) tree vtable_entry_type;
221 
222 /* Return true if this symbol is a function from the C frontend specified
223    directly in RTL form (with "__RTL").  */
224 
225 bool
native_rtl_p()226 symtab_node::native_rtl_p () const
227 {
228   if (TREE_CODE (decl) != FUNCTION_DECL)
229     return false;
230   if (!DECL_STRUCT_FUNCTION (decl))
231     return false;
232   return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
233 }
234 
235 /* Determine if symbol declaration is needed.  That is, visible to something
236    either outside this translation unit, something magic in the system
237    configury */
238 bool
needed_p(void)239 symtab_node::needed_p (void)
240 {
241   /* Double check that no one output the function into assembly file
242      early.  */
243   if (!native_rtl_p ())
244       gcc_checking_assert
245 	(!DECL_ASSEMBLER_NAME_SET_P (decl)
246 	 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
247 
248   if (!definition)
249     return false;
250 
251   if (DECL_EXTERNAL (decl))
252     return false;
253 
254   /* If the user told us it is used, then it must be so.  */
255   if (force_output)
256     return true;
257 
258   /* ABI forced symbols are needed when they are external.  */
259   if (forced_by_abi && TREE_PUBLIC (decl))
260     return true;
261 
262   /* Keep constructors, destructors and virtual functions.  */
263    if (TREE_CODE (decl) == FUNCTION_DECL
264        && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
265     return true;
266 
267   /* Externally visible variables must be output.  The exception is
268      COMDAT variables that must be output only when they are needed.  */
269   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
270     return true;
271 
272   return false;
273 }
274 
275 /* Head and terminator of the queue of nodes to be processed while building
276    callgraph.  */
277 
278 static symtab_node symtab_terminator (SYMTAB_SYMBOL);
279 static symtab_node *queued_nodes = &symtab_terminator;
280 
281 /* Add NODE to queue starting at QUEUED_NODES.
282    The queue is linked via AUX pointers and terminated by pointer to 1.  */
283 
284 static void
enqueue_node(symtab_node * node)285 enqueue_node (symtab_node *node)
286 {
287   if (node->aux)
288     return;
289   gcc_checking_assert (queued_nodes);
290   node->aux = queued_nodes;
291   queued_nodes = node;
292 }
293 
294 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
295    functions into callgraph in a way so they look like ordinary reachable
296    functions inserted into callgraph already at construction time.  */
297 
298 void
process_new_functions(void)299 symbol_table::process_new_functions (void)
300 {
301   tree fndecl;
302 
303   if (!cgraph_new_nodes.exists ())
304     return;
305 
306   handle_alias_pairs ();
307   /*  Note that this queue may grow as its being processed, as the new
308       functions may generate new ones.  */
309   for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
310     {
311       cgraph_node *node = cgraph_new_nodes[i];
312       fndecl = node->decl;
313       switch (state)
314 	{
315 	case CONSTRUCTION:
316 	  /* At construction time we just need to finalize function and move
317 	     it into reachable functions list.  */
318 
319 	  cgraph_node::finalize_function (fndecl, false);
320 	  call_cgraph_insertion_hooks (node);
321 	  enqueue_node (node);
322 	  break;
323 
324 	case IPA:
325 	case IPA_SSA:
326 	case IPA_SSA_AFTER_INLINING:
327 	  /* When IPA optimization already started, do all essential
328 	     transformations that has been already performed on the whole
329 	     cgraph but not on this function.  */
330 
331 	  gimple_register_cfg_hooks ();
332 	  if (!node->analyzed)
333 	    node->analyze ();
334 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
335 	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
336 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
337 	    {
338 	      bool summaried_computed = ipa_fn_summaries != NULL;
339 	      g->get_passes ()->execute_early_local_passes ();
340 	      /* Early passes compute inline parameters to do inlining
341 		 and splitting.  This is redundant for functions added late.
342 		 Just throw away whatever it did.  */
343 	      if (!summaried_computed)
344 		{
345 		  ipa_free_fn_summary ();
346 		  ipa_free_size_summary ();
347 		}
348 	    }
349 	  else if (ipa_fn_summaries != NULL)
350 	    compute_fn_summary (node, true);
351 	  free_dominance_info (CDI_POST_DOMINATORS);
352 	  free_dominance_info (CDI_DOMINATORS);
353 	  pop_cfun ();
354 	  call_cgraph_insertion_hooks (node);
355 	  break;
356 
357 	case EXPANSION:
358 	  /* Functions created during expansion shall be compiled
359 	     directly.  */
360 	  node->process = 0;
361 	  call_cgraph_insertion_hooks (node);
362 	  node->expand ();
363 	  break;
364 
365 	default:
366 	  gcc_unreachable ();
367 	  break;
368 	}
369     }
370 
371   cgraph_new_nodes.release ();
372 }
373 
374 /* As an GCC extension we allow redefinition of the function.  The
375    semantics when both copies of bodies differ is not well defined.
376    We replace the old body with new body so in unit at a time mode
377    we always use new body, while in normal mode we may end up with
378    old body inlined into some functions and new body expanded and
379    inlined in others.
380 
381    ??? It may make more sense to use one body for inlining and other
382    body for expanding the function but this is difficult to do.  */
383 
384 void
reset(void)385 cgraph_node::reset (void)
386 {
387   /* If process is set, then we have already begun whole-unit analysis.
388      This is *not* testing for whether we've already emitted the function.
389      That case can be sort-of legitimately seen with real function redefinition
390      errors.  I would argue that the front end should never present us with
391      such a case, but don't enforce that for now.  */
392   gcc_assert (!process);
393 
394   /* Reset our data structures so we can analyze the function again.  */
395   inlined_to = NULL;
396   memset (&rtl, 0, sizeof (rtl));
397   analyzed = false;
398   definition = false;
399   alias = false;
400   transparent_alias = false;
401   weakref = false;
402   cpp_implicit_alias = false;
403 
404   remove_callees ();
405   remove_all_references ();
406 }
407 
408 /* Return true when there are references to the node.  INCLUDE_SELF is
409    true if a self reference counts as a reference.  */
410 
411 bool
referred_to_p(bool include_self)412 symtab_node::referred_to_p (bool include_self)
413 {
414   ipa_ref *ref = NULL;
415 
416   /* See if there are any references at all.  */
417   if (iterate_referring (0, ref))
418     return true;
419   /* For functions check also calls.  */
420   cgraph_node *cn = dyn_cast <cgraph_node *> (this);
421   if (cn && cn->callers)
422     {
423       if (include_self)
424 	return true;
425       for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
426 	if (e->caller != this)
427 	  return true;
428     }
429   return false;
430 }
431 
432 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
433    logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
434    the garbage collector run at the moment.  We would need to either create
435    a new GC context, or just not compile right now.  */
436 
437 void
finalize_function(tree decl,bool no_collect)438 cgraph_node::finalize_function (tree decl, bool no_collect)
439 {
440   cgraph_node *node = cgraph_node::get_create (decl);
441 
442   if (node->definition)
443     {
444       /* Nested functions should only be defined once.  */
445       gcc_assert (!DECL_CONTEXT (decl)
446 		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
447       node->reset ();
448       node->redefined_extern_inline = true;
449     }
450 
451   /* Set definition first before calling notice_global_symbol so that
452      it is available to notice_global_symbol.  */
453   node->definition = true;
454   notice_global_symbol (decl);
455   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
456   if (!flag_toplevel_reorder)
457     node->no_reorder = true;
458 
459   /* With -fkeep-inline-functions we are keeping all inline functions except
460      for extern inline ones.  */
461   if (flag_keep_inline_functions
462       && DECL_DECLARED_INLINE_P (decl)
463       && !DECL_EXTERNAL (decl)
464       && !DECL_DISREGARD_INLINE_LIMITS (decl))
465     node->force_output = 1;
466 
467   /* __RTL functions were already output as soon as they were parsed (due
468      to the large amount of global state in the backend).
469      Mark such functions as "force_output" to reflect the fact that they
470      will be in the asm file when considering the symbols they reference.
471      The attempt to output them later on will bail out immediately.  */
472   if (node->native_rtl_p ())
473     node->force_output = 1;
474 
475   /* When not optimizing, also output the static functions. (see
476      PR24561), but don't do so for always_inline functions, functions
477      declared inline and nested functions.  These were optimized out
478      in the original implementation and it is unclear whether we want
479      to change the behavior here.  */
480   if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
481 	|| node->no_reorder)
482        && !node->cpp_implicit_alias
483        && !DECL_DISREGARD_INLINE_LIMITS (decl)
484        && !DECL_DECLARED_INLINE_P (decl)
485        && !(DECL_CONTEXT (decl)
486 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
487       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
488     node->force_output = 1;
489 
490   /* If we've not yet emitted decl, tell the debug info about it.  */
491   if (!TREE_ASM_WRITTEN (decl))
492     (*debug_hooks->deferred_inline_function) (decl);
493 
494   if (!no_collect)
495     ggc_collect ();
496 
497   if (symtab->state == CONSTRUCTION
498       && (node->needed_p () || node->referred_to_p ()))
499     enqueue_node (node);
500 }
501 
502 /* Add the function FNDECL to the call graph.
503    Unlike finalize_function, this function is intended to be used
504    by middle end and allows insertion of new function at arbitrary point
505    of compilation.  The function can be either in high, low or SSA form
506    GIMPLE.
507 
508    The function is assumed to be reachable and have address taken (so no
509    API breaking optimizations are performed on it).
510 
511    Main work done by this function is to enqueue the function for later
512    processing to avoid need the passes to be re-entrant.  */
513 
514 void
add_new_function(tree fndecl,bool lowered)515 cgraph_node::add_new_function (tree fndecl, bool lowered)
516 {
517   gcc::pass_manager *passes = g->get_passes ();
518   cgraph_node *node;
519 
520   if (dump_file)
521     {
522       struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
523       const char *function_type = ((gimple_has_body_p (fndecl))
524 				   ? (lowered
525 				      ? (gimple_in_ssa_p (fn)
526 					 ? "ssa gimple"
527 					 : "low gimple")
528 				      : "high gimple")
529 				   : "to-be-gimplified");
530       fprintf (dump_file,
531 	       "Added new %s function %s to callgraph\n",
532 	       function_type,
533 	       fndecl_name (fndecl));
534     }
535 
536   switch (symtab->state)
537     {
538       case PARSING:
539 	cgraph_node::finalize_function (fndecl, false);
540 	break;
541       case CONSTRUCTION:
542 	/* Just enqueue function to be processed at nearest occurrence.  */
543 	node = cgraph_node::get_create (fndecl);
544 	if (lowered)
545 	  node->lowered = true;
546 	cgraph_new_nodes.safe_push (node);
547         break;
548 
549       case IPA:
550       case IPA_SSA:
551       case IPA_SSA_AFTER_INLINING:
552       case EXPANSION:
553 	/* Bring the function into finalized state and enqueue for later
554 	   analyzing and compilation.  */
555 	node = cgraph_node::get_create (fndecl);
556 	node->local = false;
557 	node->definition = true;
558 	node->force_output = true;
559 	if (TREE_PUBLIC (fndecl))
560 	  node->externally_visible = true;
561 	if (!lowered && symtab->state == EXPANSION)
562 	  {
563 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
564 	    gimple_register_cfg_hooks ();
565 	    bitmap_obstack_initialize (NULL);
566 	    execute_pass_list (cfun, passes->all_lowering_passes);
567 	    passes->execute_early_local_passes ();
568 	    bitmap_obstack_release (NULL);
569 	    pop_cfun ();
570 
571 	    lowered = true;
572 	  }
573 	if (lowered)
574 	  node->lowered = true;
575 	cgraph_new_nodes.safe_push (node);
576         break;
577 
578       case FINISHED:
579 	/* At the very end of compilation we have to do all the work up
580 	   to expansion.  */
581 	node = cgraph_node::create (fndecl);
582 	if (lowered)
583 	  node->lowered = true;
584 	node->definition = true;
585 	node->analyze ();
586 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
587 	gimple_register_cfg_hooks ();
588 	bitmap_obstack_initialize (NULL);
589 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
590 	  g->get_passes ()->execute_early_local_passes ();
591 	bitmap_obstack_release (NULL);
592 	pop_cfun ();
593 	node->expand ();
594 	break;
595 
596       default:
597 	gcc_unreachable ();
598     }
599 
600   /* Set a personality if required and we already passed EH lowering.  */
601   if (lowered
602       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
603 	  == eh_personality_lang))
604     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
605 }
606 
607 /* Analyze the function scheduled to be output.  */
608 void
analyze(void)609 cgraph_node::analyze (void)
610 {
611   if (native_rtl_p ())
612     {
613       analyzed = true;
614       return;
615     }
616 
617   tree decl = this->decl;
618   location_t saved_loc = input_location;
619   input_location = DECL_SOURCE_LOCATION (decl);
620 
621   if (thunk.thunk_p)
622     {
623       cgraph_node *t = cgraph_node::get (thunk.alias);
624 
625       create_edge (t, NULL, t->count);
626       callees->can_throw_external = !TREE_NOTHROW (t->decl);
627       /* Target code in expand_thunk may need the thunk's target
628 	 to be analyzed, so recurse here.  */
629       if (!t->analyzed && t->definition)
630 	t->analyze ();
631       if (t->alias)
632 	{
633 	  t = t->get_alias_target ();
634 	  if (!t->analyzed && t->definition)
635 	    t->analyze ();
636 	}
637       bool ret = expand_thunk (false, false);
638       thunk.alias = NULL;
639       if (!ret)
640 	return;
641     }
642   if (alias)
643     resolve_alias (cgraph_node::get (alias_target), transparent_alias);
644   else if (dispatcher_function)
645     {
646       /* Generate the dispatcher body of multi-versioned functions.  */
647       cgraph_function_version_info *dispatcher_version_info
648 	= function_version ();
649       if (dispatcher_version_info != NULL
650           && (dispatcher_version_info->dispatcher_resolver
651 	      == NULL_TREE))
652 	{
653 	  tree resolver = NULL_TREE;
654 	  gcc_assert (targetm.generate_version_dispatcher_body);
655 	  resolver = targetm.generate_version_dispatcher_body (this);
656 	  gcc_assert (resolver != NULL_TREE);
657 	}
658     }
659   else
660     {
661       push_cfun (DECL_STRUCT_FUNCTION (decl));
662 
663       assign_assembler_name_if_needed (decl);
664 
665       /* Make sure to gimplify bodies only once.  During analyzing a
666 	 function we lower it, which will require gimplified nested
667 	 functions, so we can end up here with an already gimplified
668 	 body.  */
669       if (!gimple_has_body_p (decl))
670 	gimplify_function_tree (decl);
671 
672       /* Lower the function.  */
673       if (!lowered)
674 	{
675 	  if (nested)
676 	    lower_nested_functions (decl);
677 	  gcc_assert (!nested);
678 
679 	  gimple_register_cfg_hooks ();
680 	  bitmap_obstack_initialize (NULL);
681 	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
682 	  free_dominance_info (CDI_POST_DOMINATORS);
683 	  free_dominance_info (CDI_DOMINATORS);
684 	  compact_blocks ();
685 	  bitmap_obstack_release (NULL);
686 	  lowered = true;
687 	}
688 
689       pop_cfun ();
690     }
691   analyzed = true;
692 
693   input_location = saved_loc;
694 }
695 
696 /* C++ frontend produce same body aliases all over the place, even before PCH
697    gets streamed out. It relies on us linking the aliases with their function
698    in order to do the fixups, but ipa-ref is not PCH safe.  Consequently we
699    first produce aliases without links, but once C++ FE is sure he won't stream
700    PCH we build the links via this function.  */
701 
702 void
process_same_body_aliases(void)703 symbol_table::process_same_body_aliases (void)
704 {
705   symtab_node *node;
706   FOR_EACH_SYMBOL (node)
707     if (node->cpp_implicit_alias && !node->analyzed)
708       node->resolve_alias
709 	(VAR_P (node->alias_target)
710 	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
711 	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
712   cpp_implicit_aliases_done = true;
713 }
714 
715 /* Process a symver attribute.  */
716 
717 static void
process_symver_attribute(symtab_node * n)718 process_symver_attribute (symtab_node *n)
719 {
720   tree value = lookup_attribute ("symver", DECL_ATTRIBUTES (n->decl));
721 
722   if (!value)
723     return;
724   if (lookup_attribute ("symver", TREE_CHAIN (value)))
725     {
726       error_at (DECL_SOURCE_LOCATION (n->decl),
727 		"multiple versions for one symbol");
728       return;
729     }
730   tree symver = get_identifier_with_length
731 		  (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (value))),
732 		   TREE_STRING_LENGTH (TREE_VALUE (TREE_VALUE (value))));
733   symtab_node *def = symtab_node::get_for_asmname (symver);
734 
735   if (def)
736     {
737       error_at (DECL_SOURCE_LOCATION (n->decl),
738 		"duplicate definition of a symbol version");
739       inform (DECL_SOURCE_LOCATION (def->decl),
740 	      "same version was previously defined here");
741       return;
742     }
743   if (!n->definition)
744     {
745       error_at (DECL_SOURCE_LOCATION (n->decl),
746 		"symbol needs to be defined to have a version");
747       return;
748     }
749   if (DECL_COMMON (n->decl))
750     {
751       error_at (DECL_SOURCE_LOCATION (n->decl),
752 		"common symbol cannot be versioned");
753       return;
754     }
755   if (DECL_COMDAT (n->decl))
756     {
757       error_at (DECL_SOURCE_LOCATION (n->decl),
758 		"comdat symbol cannot be versioned");
759       return;
760     }
761   if (n->weakref)
762     {
763       error_at (DECL_SOURCE_LOCATION (n->decl),
764 		"weakref cannot be versioned");
765       return;
766     }
767   if (!TREE_PUBLIC (n->decl))
768     {
769       error_at (DECL_SOURCE_LOCATION (n->decl),
770 		"versioned symbol must be public");
771       return;
772     }
773   if (DECL_VISIBILITY (n->decl) != VISIBILITY_DEFAULT)
774     {
775       error_at (DECL_SOURCE_LOCATION (n->decl),
776 		"versioned symbol must have default visibility");
777       return;
778     }
779 
780   /* Create new symbol table entry representing the version.  */
781   tree new_decl = copy_node (n->decl);
782 
783   DECL_INITIAL (new_decl) = NULL_TREE;
784   if (TREE_CODE (new_decl) == FUNCTION_DECL)
785     DECL_STRUCT_FUNCTION (new_decl) = NULL;
786   SET_DECL_ASSEMBLER_NAME (new_decl, symver);
787   TREE_PUBLIC (new_decl) = 1;
788   DECL_ATTRIBUTES (new_decl) = NULL;
789 
790   symtab_node *symver_node = symtab_node::get_create (new_decl);
791   symver_node->alias = true;
792   symver_node->definition = true;
793   symver_node->symver = true;
794   symver_node->create_reference (n, IPA_REF_ALIAS, NULL);
795   symver_node->analyzed = true;
796 }
797 
798 /* Process attributes common for vars and functions.  */
799 
800 static void
process_common_attributes(symtab_node * node,tree decl)801 process_common_attributes (symtab_node *node, tree decl)
802 {
803   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
804 
805   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
806     {
807       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
808 		  "%<weakref%> attribute should be accompanied with"
809 		  " an %<alias%> attribute");
810       DECL_WEAK (decl) = 0;
811       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
812 						 DECL_ATTRIBUTES (decl));
813     }
814 
815   if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
816     node->no_reorder = 1;
817   process_symver_attribute (node);
818 }
819 
820 /* Look for externally_visible and used attributes and mark cgraph nodes
821    accordingly.
822 
823    We cannot mark the nodes at the point the attributes are processed (in
824    handle_*_attribute) because the copy of the declarations available at that
825    point may not be canonical.  For example, in:
826 
827     void f();
828     void f() __attribute__((used));
829 
830    the declaration we see in handle_used_attribute will be the second
831    declaration -- but the front end will subsequently merge that declaration
832    with the original declaration and discard the second declaration.
833 
834    Furthermore, we can't mark these nodes in finalize_function because:
835 
836     void f() {}
837     void f() __attribute__((externally_visible));
838 
839    is valid.
840 
841    So, we walk the nodes at the end of the translation unit, applying the
842    attributes at that point.  */
843 
844 static void
process_function_and_variable_attributes(cgraph_node * first,varpool_node * first_var)845 process_function_and_variable_attributes (cgraph_node *first,
846                                           varpool_node *first_var)
847 {
848   cgraph_node *node;
849   varpool_node *vnode;
850 
851   for (node = symtab->first_function (); node != first;
852        node = symtab->next_function (node))
853     {
854       tree decl = node->decl;
855 
856       if (node->alias
857 	  && lookup_attribute ("flatten", DECL_ATTRIBUTES (decl)))
858 	{
859 	  tree tdecl = node->get_alias_target_tree ();
860 	  if (!tdecl || !DECL_P (tdecl)
861 	      || !lookup_attribute ("flatten", DECL_ATTRIBUTES (tdecl)))
862 	    warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
863 			"%<flatten%> attribute is ignored on aliases");
864 	}
865       if (DECL_PRESERVE_P (decl))
866 	node->mark_force_output ();
867       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
868 	{
869 	  if (! TREE_PUBLIC (node->decl))
870 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
871 			"%<externally_visible%>"
872 			" attribute have effect only on public objects");
873 	}
874       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
875 	  && node->definition
876 	  && (!node->alias || DECL_INITIAL (decl) != error_mark_node))
877 	{
878 	  /* NODE->DEFINITION && NODE->ALIAS is nonzero for valid weakref
879 	     function declarations; DECL_INITIAL is non-null for invalid
880 	     weakref functions that are also defined.  */
881 	  warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
882 		      "%<weakref%> attribute ignored"
883 		      " because function is defined");
884 	  DECL_WEAK (decl) = 0;
885 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
886 						     DECL_ATTRIBUTES (decl));
887 	  DECL_ATTRIBUTES (decl) = remove_attribute ("alias",
888 						     DECL_ATTRIBUTES (decl));
889 	  node->alias = false;
890 	  node->weakref = false;
891 	  node->transparent_alias = false;
892 	}
893       else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
894 	  && node->definition
895 	  && !node->alias)
896 	warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
897 		    "%<alias%> attribute ignored"
898 		    " because function is defined");
899 
900       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
901 	  && !DECL_DECLARED_INLINE_P (decl)
902 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
903 	  && !DECL_UNINLINABLE (decl))
904 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
905 		    "%<always_inline%> function might not be inlinable");
906 
907       process_common_attributes (node, decl);
908     }
909   for (vnode = symtab->first_variable (); vnode != first_var;
910        vnode = symtab->next_variable (vnode))
911     {
912       tree decl = vnode->decl;
913       if (DECL_EXTERNAL (decl)
914 	  && DECL_INITIAL (decl))
915 	varpool_node::finalize_decl (decl);
916       if (DECL_PRESERVE_P (decl))
917 	vnode->force_output = true;
918       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
919 	{
920 	  if (! TREE_PUBLIC (vnode->decl))
921 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
922 			"%<externally_visible%>"
923 			" attribute have effect only on public objects");
924 	}
925       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
926 	  && vnode->definition
927 	  && DECL_INITIAL (decl))
928 	{
929 	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
930 		      "%<weakref%> attribute ignored"
931 		      " because variable is initialized");
932 	  DECL_WEAK (decl) = 0;
933 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
934 						      DECL_ATTRIBUTES (decl));
935 	}
936       process_common_attributes (vnode, decl);
937     }
938 }
939 
940 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
941    middle end to output the variable to asm file, if needed or externally
942    visible.  */
943 
944 void
finalize_decl(tree decl)945 varpool_node::finalize_decl (tree decl)
946 {
947   varpool_node *node = varpool_node::get_create (decl);
948 
949   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
950 
951   if (node->definition)
952     return;
953   /* Set definition first before calling notice_global_symbol so that
954      it is available to notice_global_symbol.  */
955   node->definition = true;
956   notice_global_symbol (decl);
957   if (!flag_toplevel_reorder)
958     node->no_reorder = true;
959   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
960       /* Traditionally we do not eliminate static variables when not
961 	 optimizing and when not doing toplevel reorder.  */
962       || (node->no_reorder && !DECL_COMDAT (node->decl)
963 	  && !DECL_ARTIFICIAL (node->decl)))
964     node->force_output = true;
965 
966   if (symtab->state == CONSTRUCTION
967       && (node->needed_p () || node->referred_to_p ()))
968     enqueue_node (node);
969   if (symtab->state >= IPA_SSA)
970     node->analyze ();
971   /* Some frontends produce various interface variables after compilation
972      finished.  */
973   if (symtab->state == FINISHED
974       || (node->no_reorder
975 	  && symtab->state == EXPANSION))
976     node->assemble_decl ();
977 }
978 
979 /* EDGE is an polymorphic call.  Mark all possible targets as reachable
980    and if there is only one target, perform trivial devirtualization.
981    REACHABLE_CALL_TARGETS collects target lists we already walked to
982    avoid duplicate work.  */
983 
984 static void
walk_polymorphic_call_targets(hash_set<void * > * reachable_call_targets,cgraph_edge * edge)985 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
986 			       cgraph_edge *edge)
987 {
988   unsigned int i;
989   void *cache_token;
990   bool final;
991   vec <cgraph_node *>targets
992     = possible_polymorphic_call_targets
993 	(edge, &final, &cache_token);
994 
995   if (!reachable_call_targets->add (cache_token))
996     {
997       if (symtab->dump_file)
998 	dump_possible_polymorphic_call_targets
999 	  (symtab->dump_file, edge);
1000 
1001       for (i = 0; i < targets.length (); i++)
1002 	{
1003 	  /* Do not bother to mark virtual methods in anonymous namespace;
1004 	     either we will find use of virtual table defining it, or it is
1005 	     unused.  */
1006 	  if (targets[i]->definition
1007 	      && TREE_CODE
1008 		  (TREE_TYPE (targets[i]->decl))
1009 		   == METHOD_TYPE
1010 	      && !type_in_anonymous_namespace_p
1011 		   (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
1012 	    enqueue_node (targets[i]);
1013 	}
1014     }
1015 
1016   /* Very trivial devirtualization; when the type is
1017      final or anonymous (so we know all its derivation)
1018      and there is only one possible virtual call target,
1019      make the edge direct.  */
1020   if (final)
1021     {
1022       if (targets.length () <= 1 && dbg_cnt (devirt))
1023 	{
1024 	  cgraph_node *target;
1025 	  if (targets.length () == 1)
1026 	    target = targets[0];
1027 	  else
1028 	    target = cgraph_node::create
1029 			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
1030 
1031 	  if (symtab->dump_file)
1032 	    {
1033 	      fprintf (symtab->dump_file,
1034 		       "Devirtualizing call: ");
1035 	      print_gimple_stmt (symtab->dump_file,
1036 				 edge->call_stmt, 0,
1037 				 TDF_SLIM);
1038 	    }
1039           if (dump_enabled_p ())
1040             {
1041 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
1042 			       "devirtualizing call in %s to %s\n",
1043 			       edge->caller->dump_name (),
1044 			       target->dump_name ());
1045 	    }
1046 
1047 	  edge = cgraph_edge::make_direct (edge, target);
1048 	  gimple *new_call = cgraph_edge::redirect_call_stmt_to_callee (edge);
1049 
1050 	  if (symtab->dump_file)
1051 	    {
1052 	      fprintf (symtab->dump_file, "Devirtualized as: ");
1053 	      print_gimple_stmt (symtab->dump_file, new_call, 0, TDF_SLIM);
1054 	    }
1055 	}
1056     }
1057 }
1058 
1059 /* Issue appropriate warnings for the global declaration DECL.  */
1060 
1061 static void
check_global_declaration(symtab_node * snode)1062 check_global_declaration (symtab_node *snode)
1063 {
1064   const char *decl_file;
1065   tree decl = snode->decl;
1066 
1067   /* Warn about any function declared static but not defined.  We don't
1068      warn about variables, because many programs have static variables
1069      that exist only to get some text into the object file.  */
1070   if (TREE_CODE (decl) == FUNCTION_DECL
1071       && DECL_INITIAL (decl) == 0
1072       && DECL_EXTERNAL (decl)
1073       && ! DECL_ARTIFICIAL (decl)
1074       && ! TREE_PUBLIC (decl))
1075     {
1076       if (TREE_NO_WARNING (decl))
1077 	;
1078       else if (snode->referred_to_p (/*include_self=*/false))
1079 	pedwarn (input_location, 0, "%q+F used but never defined", decl);
1080       else
1081 	warning (OPT_Wunused_function, "%q+F declared %<static%> but never "
1082 				       "defined", decl);
1083       /* This symbol is effectively an "extern" declaration now.  */
1084       TREE_PUBLIC (decl) = 1;
1085     }
1086 
1087   /* Warn about static fns or vars defined but not used.  */
1088   if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
1089        || (((warn_unused_variable && ! TREE_READONLY (decl))
1090 	    || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
1091 		&& (warn_unused_const_variable == 2
1092 		    || (main_input_filename != NULL
1093 			&& (decl_file = DECL_SOURCE_FILE (decl)) != NULL
1094 			&& filename_cmp (main_input_filename,
1095 					 decl_file) == 0))))
1096 	   && VAR_P (decl)))
1097       && ! DECL_IN_SYSTEM_HEADER (decl)
1098       && ! snode->referred_to_p (/*include_self=*/false)
1099       /* This TREE_USED check is needed in addition to referred_to_p
1100 	 above, because the `__unused__' attribute is not being
1101 	 considered for referred_to_p.  */
1102       && ! TREE_USED (decl)
1103       /* The TREE_USED bit for file-scope decls is kept in the identifier,
1104 	 to handle multiple external decls in different scopes.  */
1105       && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1106       && ! DECL_EXTERNAL (decl)
1107       && ! DECL_ARTIFICIAL (decl)
1108       && ! DECL_ABSTRACT_ORIGIN (decl)
1109       && ! TREE_PUBLIC (decl)
1110       /* A volatile variable might be used in some non-obvious way.  */
1111       && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1112       /* Global register variables must be declared to reserve them.  */
1113       && ! (VAR_P (decl) && DECL_REGISTER (decl))
1114       /* Global ctors and dtors are called by the runtime.  */
1115       && (TREE_CODE (decl) != FUNCTION_DECL
1116 	  || (!DECL_STATIC_CONSTRUCTOR (decl)
1117 	      && !DECL_STATIC_DESTRUCTOR (decl)))
1118       /* Otherwise, ask the language.  */
1119       && lang_hooks.decls.warn_unused_global (decl))
1120     warning_at (DECL_SOURCE_LOCATION (decl),
1121 		(TREE_CODE (decl) == FUNCTION_DECL)
1122 		? OPT_Wunused_function
1123 		: (TREE_READONLY (decl)
1124 		   ? OPT_Wunused_const_variable_
1125 		   : OPT_Wunused_variable),
1126 		"%qD defined but not used", decl);
1127 }
1128 
1129 /* Discover all functions and variables that are trivially needed, analyze
1130    them as well as all functions and variables referred by them  */
1131 static cgraph_node *first_analyzed;
1132 static varpool_node *first_analyzed_var;
1133 
1134 /* FIRST_TIME is set to TRUE for the first time we are called for a
1135    translation unit from finalize_compilation_unit() or false
1136    otherwise.  */
1137 
1138 static void
analyze_functions(bool first_time)1139 analyze_functions (bool first_time)
1140 {
1141   /* Keep track of already processed nodes when called multiple times for
1142      intermodule optimization.  */
1143   cgraph_node *first_handled = first_analyzed;
1144   varpool_node *first_handled_var = first_analyzed_var;
1145   hash_set<void *> reachable_call_targets;
1146 
1147   symtab_node *node;
1148   symtab_node *next;
1149   int i;
1150   ipa_ref *ref;
1151   bool changed = true;
1152   location_t saved_loc = input_location;
1153 
1154   bitmap_obstack_initialize (NULL);
1155   symtab->state = CONSTRUCTION;
1156   input_location = UNKNOWN_LOCATION;
1157 
1158   /* Ugly, but the fixup cannot happen at a time same body alias is created;
1159      C++ FE is confused about the COMDAT groups being right.  */
1160   if (symtab->cpp_implicit_aliases_done)
1161     FOR_EACH_SYMBOL (node)
1162       if (node->cpp_implicit_alias)
1163 	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1164   build_type_inheritance_graph ();
1165 
1166   /* Analysis adds static variables that in turn adds references to new functions.
1167      So we need to iterate the process until it stabilize.  */
1168   while (changed)
1169     {
1170       changed = false;
1171       process_function_and_variable_attributes (first_analyzed,
1172 						first_analyzed_var);
1173 
1174       /* First identify the trivially needed symbols.  */
1175       for (node = symtab->first_symbol ();
1176 	   node != first_analyzed
1177 	   && node != first_analyzed_var; node = node->next)
1178 	{
1179 	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
1180 	  node->get_comdat_group_id ();
1181 	  if (node->needed_p ())
1182 	    {
1183 	      enqueue_node (node);
1184 	      if (!changed && symtab->dump_file)
1185 		fprintf (symtab->dump_file, "Trivially needed symbols:");
1186 	      changed = true;
1187 	      if (symtab->dump_file)
1188 		fprintf (symtab->dump_file, " %s", node->dump_asm_name ());
1189 	      if (!changed && symtab->dump_file)
1190 		fprintf (symtab->dump_file, "\n");
1191 	    }
1192 	  if (node == first_analyzed
1193 	      || node == first_analyzed_var)
1194 	    break;
1195 	}
1196       symtab->process_new_functions ();
1197       first_analyzed_var = symtab->first_variable ();
1198       first_analyzed = symtab->first_function ();
1199 
1200       if (changed && symtab->dump_file)
1201 	fprintf (symtab->dump_file, "\n");
1202 
1203       /* Lower representation, build callgraph edges and references for all trivially
1204          needed symbols and all symbols referred by them.  */
1205       while (queued_nodes != &symtab_terminator)
1206 	{
1207 	  changed = true;
1208 	  node = queued_nodes;
1209 	  queued_nodes = (symtab_node *)queued_nodes->aux;
1210 	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1211 	  if (cnode && cnode->definition)
1212 	    {
1213 	      cgraph_edge *edge;
1214 	      tree decl = cnode->decl;
1215 
1216 	      /* ??? It is possible to create extern inline function
1217 	      and later using weak alias attribute to kill its body.
1218 	      See gcc.c-torture/compile/20011119-1.c  */
1219 	      if (!DECL_STRUCT_FUNCTION (decl)
1220 		  && !cnode->alias
1221 		  && !cnode->thunk.thunk_p
1222 		  && !cnode->dispatcher_function)
1223 		{
1224 		  cnode->reset ();
1225 		  cnode->redefined_extern_inline = true;
1226 		  continue;
1227 		}
1228 
1229 	      if (!cnode->analyzed)
1230 		cnode->analyze ();
1231 
1232 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1233 		if (edge->callee->definition
1234 		    && (!DECL_EXTERNAL (edge->callee->decl)
1235 			/* When not optimizing, do not try to analyze extern
1236 			   inline functions.  Doing so is pointless.  */
1237 			|| opt_for_fn (edge->callee->decl, optimize)
1238 			/* Weakrefs needs to be preserved.  */
1239 			|| edge->callee->alias
1240 			/* always_inline functions are inlined even at -O0.  */
1241 		        || lookup_attribute
1242 				 ("always_inline",
1243 			          DECL_ATTRIBUTES (edge->callee->decl))
1244 			/* Multiversioned functions needs the dispatcher to
1245 			   be produced locally even for extern functions.  */
1246 			|| edge->callee->function_version ()))
1247 		   enqueue_node (edge->callee);
1248 	      if (opt_for_fn (cnode->decl, optimize)
1249 		  && opt_for_fn (cnode->decl, flag_devirtualize))
1250 		{
1251 		  cgraph_edge *next;
1252 
1253 		  for (edge = cnode->indirect_calls; edge; edge = next)
1254 		    {
1255 		      next = edge->next_callee;
1256 		      if (edge->indirect_info->polymorphic)
1257 			walk_polymorphic_call_targets (&reachable_call_targets,
1258 						       edge);
1259 		    }
1260 		}
1261 
1262 	      /* If decl is a clone of an abstract function,
1263 		 mark that abstract function so that we don't release its body.
1264 		 The DECL_INITIAL() of that abstract function declaration
1265 		 will be later needed to output debug info.  */
1266 	      if (DECL_ABSTRACT_ORIGIN (decl))
1267 		{
1268 		  cgraph_node *origin_node
1269 		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1270 		  origin_node->used_as_abstract_origin = true;
1271 		}
1272 	      /* Preserve a functions function context node.  It will
1273 		 later be needed to output debug info.  */
1274 	      if (tree fn = decl_function_context (decl))
1275 		{
1276 		  cgraph_node *origin_node = cgraph_node::get_create (fn);
1277 		  enqueue_node (origin_node);
1278 		}
1279 	    }
1280 	  else
1281 	    {
1282 	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1283 	      if (vnode && vnode->definition && !vnode->analyzed)
1284 		vnode->analyze ();
1285 	    }
1286 
1287 	  if (node->same_comdat_group)
1288 	    {
1289 	      symtab_node *next;
1290 	      for (next = node->same_comdat_group;
1291 		   next != node;
1292 		   next = next->same_comdat_group)
1293 		if (!next->comdat_local_p ())
1294 		  enqueue_node (next);
1295 	    }
1296 	  for (i = 0; node->iterate_reference (i, ref); i++)
1297 	    if (ref->referred->definition
1298 		&& (!DECL_EXTERNAL (ref->referred->decl)
1299 		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1300 			 && optimize)
1301 			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1302 			    && opt_for_fn (ref->referred->decl, optimize))
1303 		    || node->alias
1304 		    || ref->referred->alias)))
1305 	      enqueue_node (ref->referred);
1306 	  symtab->process_new_functions ();
1307 	}
1308     }
1309   update_type_inheritance_graph ();
1310 
1311   /* Collect entry points to the unit.  */
1312   if (symtab->dump_file)
1313     {
1314       fprintf (symtab->dump_file, "\n\nInitial ");
1315       symtab->dump (symtab->dump_file);
1316     }
1317 
1318   if (first_time)
1319     {
1320       symtab_node *snode;
1321       FOR_EACH_SYMBOL (snode)
1322 	check_global_declaration (snode);
1323     }
1324 
1325   if (symtab->dump_file)
1326     fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1327 
1328   for (node = symtab->first_symbol ();
1329        node != first_handled
1330        && node != first_handled_var; node = next)
1331     {
1332       next = node->next;
1333       /* For symbols declared locally we clear TREE_READONLY when emitting
1334 	 the constructor (if one is needed).  For external declarations we can
1335 	 not safely assume that the type is readonly because we may be called
1336 	 during its construction.  */
1337       if (TREE_CODE (node->decl) == VAR_DECL
1338 	  && TYPE_P (TREE_TYPE (node->decl))
1339 	  && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1340 	  && DECL_EXTERNAL (node->decl))
1341 	TREE_READONLY (node->decl) = 0;
1342       if (!node->aux && !node->referred_to_p ())
1343 	{
1344 	  if (symtab->dump_file)
1345 	    fprintf (symtab->dump_file, " %s", node->dump_name ());
1346 
1347 	  /* See if the debugger can use anything before the DECL
1348 	     passes away.  Perhaps it can notice a DECL that is now a
1349 	     constant and can tag the early DIE with an appropriate
1350 	     attribute.
1351 
1352 	     Otherwise, this is the last chance the debug_hooks have
1353 	     at looking at optimized away DECLs, since
1354 	     late_global_decl will subsequently be called from the
1355 	     contents of the now pruned symbol table.  */
1356 	  if (VAR_P (node->decl)
1357 	      && !decl_function_context (node->decl))
1358 	    {
1359 	      /* We are reclaiming totally unreachable code and variables
1360 	         so they effectively appear as readonly.  Show that to
1361 		 the debug machinery.  */
1362 	      TREE_READONLY (node->decl) = 1;
1363 	      node->definition = false;
1364 	      (*debug_hooks->late_global_decl) (node->decl);
1365 	    }
1366 
1367 	  node->remove ();
1368 	  continue;
1369 	}
1370       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1371 	{
1372 	  tree decl = node->decl;
1373 
1374 	  if (cnode->definition && !gimple_has_body_p (decl)
1375 	      && !cnode->alias
1376 	      && !cnode->thunk.thunk_p)
1377 	    cnode->reset ();
1378 
1379 	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1380 		      || cnode->alias
1381 		      || gimple_has_body_p (decl)
1382 		      || cnode->native_rtl_p ());
1383 	  gcc_assert (cnode->analyzed == cnode->definition);
1384 	}
1385       node->aux = NULL;
1386     }
1387   for (;node; node = node->next)
1388     node->aux = NULL;
1389   first_analyzed = symtab->first_function ();
1390   first_analyzed_var = symtab->first_variable ();
1391   if (symtab->dump_file)
1392     {
1393       fprintf (symtab->dump_file, "\n\nReclaimed ");
1394       symtab->dump (symtab->dump_file);
1395     }
1396   bitmap_obstack_release (NULL);
1397   ggc_collect ();
1398   /* Initialize assembler name hash, in particular we want to trigger C++
1399      mangling and same body alias creation before we free DECL_ARGUMENTS
1400      used by it.  */
1401   if (!seen_error ())
1402     symtab->symtab_initialize_asm_name_hash ();
1403 
1404   input_location = saved_loc;
1405 }
1406 
1407 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1408    (which may be an ifunc resolver) and issue a diagnostic when they are
1409    not compatible according to language rules (plus a C++ extension for
1410    non-static member functions).  */
1411 
1412 static void
maybe_diag_incompatible_alias(tree alias,tree target)1413 maybe_diag_incompatible_alias (tree alias, tree target)
1414 {
1415   tree altype = TREE_TYPE (alias);
1416   tree targtype = TREE_TYPE (target);
1417 
1418   bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1419   tree funcptr = altype;
1420 
1421   if (ifunc)
1422     {
1423       /* Handle attribute ifunc first.  */
1424       if (TREE_CODE (altype) == METHOD_TYPE)
1425 	{
1426 	  /* Set FUNCPTR to the type of the alias target.  If the type
1427 	     is a non-static member function of class C, construct a type
1428 	     of an ordinary function taking C* as the first argument,
1429 	     followed by the member function argument list, and use it
1430 	     instead to check for incompatibility.  This conversion is
1431 	     not defined by the language but an extension provided by
1432 	     G++.  */
1433 
1434 	  tree rettype = TREE_TYPE (altype);
1435 	  tree args = TYPE_ARG_TYPES (altype);
1436 	  altype = build_function_type (rettype, args);
1437 	  funcptr = altype;
1438 	}
1439 
1440       targtype = TREE_TYPE (targtype);
1441 
1442       if (POINTER_TYPE_P (targtype))
1443 	{
1444 	  targtype = TREE_TYPE (targtype);
1445 
1446 	  /* Only issue Wattribute-alias for conversions to void* with
1447 	     -Wextra.  */
1448 	  if (VOID_TYPE_P (targtype) && !extra_warnings)
1449 	    return;
1450 
1451 	  /* Proceed to handle incompatible ifunc resolvers below.  */
1452 	}
1453       else
1454 	{
1455 	  funcptr = build_pointer_type (funcptr);
1456 
1457 	  error_at (DECL_SOURCE_LOCATION (target),
1458 		    "%<ifunc%> resolver for %qD must return %qT",
1459 		 alias, funcptr);
1460 	  inform (DECL_SOURCE_LOCATION (alias),
1461 		  "resolver indirect function declared here");
1462 	  return;
1463 	}
1464     }
1465 
1466   if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1467        || (prototype_p (altype)
1468 	   && prototype_p (targtype)
1469 	   && !types_compatible_p (altype, targtype))))
1470     {
1471       /* Warn for incompatibilities.  Avoid warning for functions
1472 	 without a prototype to make it possible to declare aliases
1473 	 without knowing the exact type, as libstdc++ does.  */
1474       if (ifunc)
1475 	{
1476 	  funcptr = build_pointer_type (funcptr);
1477 
1478 	  auto_diagnostic_group d;
1479 	  if (warning_at (DECL_SOURCE_LOCATION (target),
1480 			  OPT_Wattribute_alias_,
1481 			  "%<ifunc%> resolver for %qD should return %qT",
1482 			  alias, funcptr))
1483 	    inform (DECL_SOURCE_LOCATION (alias),
1484 		    "resolver indirect function declared here");
1485 	}
1486       else
1487 	{
1488 	  auto_diagnostic_group d;
1489 	  if (warning_at (DECL_SOURCE_LOCATION (alias),
1490 			    OPT_Wattribute_alias_,
1491 			    "%qD alias between functions of incompatible "
1492 			    "types %qT and %qT", alias, altype, targtype))
1493 	    inform (DECL_SOURCE_LOCATION (target),
1494 		    "aliased declaration here");
1495 	}
1496     }
1497 }
1498 
1499 /* Translate the ugly representation of aliases as alias pairs into nice
1500    representation in callgraph.  We don't handle all cases yet,
1501    unfortunately.  */
1502 
1503 static void
handle_alias_pairs(void)1504 handle_alias_pairs (void)
1505 {
1506   alias_pair *p;
1507   unsigned i;
1508 
1509   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1510     {
1511       symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1512 
1513       /* Weakrefs with target not defined in current unit are easy to handle:
1514 	 they behave just as external variables except we need to note the
1515 	 alias flag to later output the weakref pseudo op into asm file.  */
1516       if (!target_node
1517 	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1518 	{
1519 	  symtab_node *node = symtab_node::get (p->decl);
1520 	  if (node)
1521 	    {
1522 	      node->alias_target = p->target;
1523 	      node->weakref = true;
1524 	      node->alias = true;
1525 	      node->transparent_alias = true;
1526 	    }
1527 	  alias_pairs->unordered_remove (i);
1528 	  continue;
1529 	}
1530       else if (!target_node)
1531 	{
1532 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1533 	  symtab_node *node = symtab_node::get (p->decl);
1534 	  if (node)
1535 	    node->alias = false;
1536 	  alias_pairs->unordered_remove (i);
1537 	  continue;
1538 	}
1539 
1540       if (DECL_EXTERNAL (target_node->decl)
1541 	  /* We use local aliases for C++ thunks to force the tailcall
1542 	     to bind locally.  This is a hack - to keep it working do
1543 	     the following (which is not strictly correct).  */
1544 	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1545 	      || ! DECL_VIRTUAL_P (target_node->decl))
1546 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1547 	{
1548 	  error ("%q+D aliased to external symbol %qE",
1549 		 p->decl, p->target);
1550 	}
1551 
1552       if (TREE_CODE (p->decl) == FUNCTION_DECL
1553           && target_node && is_a <cgraph_node *> (target_node))
1554 	{
1555 	  maybe_diag_incompatible_alias (p->decl, target_node->decl);
1556 
1557 	  maybe_diag_alias_attributes (p->decl, target_node->decl);
1558 
1559 	  cgraph_node *src_node = cgraph_node::get (p->decl);
1560 	  if (src_node && src_node->definition)
1561 	    src_node->reset ();
1562 	  cgraph_node::create_alias (p->decl, target_node->decl);
1563 	  alias_pairs->unordered_remove (i);
1564 	}
1565       else if (VAR_P (p->decl)
1566 	       && target_node && is_a <varpool_node *> (target_node))
1567 	{
1568 	  varpool_node::create_alias (p->decl, target_node->decl);
1569 	  alias_pairs->unordered_remove (i);
1570 	}
1571       else
1572 	{
1573 	  error ("%q+D alias between function and variable is not supported",
1574 		 p->decl);
1575 	  inform (DECL_SOURCE_LOCATION (target_node->decl),
1576 		  "aliased declaration here");
1577 
1578 	  alias_pairs->unordered_remove (i);
1579 	}
1580     }
1581   vec_free (alias_pairs);
1582 }
1583 
1584 
1585 /* Figure out what functions we want to assemble.  */
1586 
1587 static void
mark_functions_to_output(void)1588 mark_functions_to_output (void)
1589 {
1590   bool check_same_comdat_groups = false;
1591   cgraph_node *node;
1592 
1593   if (flag_checking)
1594     FOR_EACH_FUNCTION (node)
1595       gcc_assert (!node->process);
1596 
1597   FOR_EACH_FUNCTION (node)
1598     {
1599       tree decl = node->decl;
1600 
1601       gcc_assert (!node->process || node->same_comdat_group);
1602       if (node->process)
1603 	continue;
1604 
1605       /* We need to output all local functions that are used and not
1606 	 always inlined, as well as those that are reachable from
1607 	 outside the current compilation unit.  */
1608       if (node->analyzed
1609 	  && !node->thunk.thunk_p
1610 	  && !node->alias
1611 	  && !node->inlined_to
1612 	  && !TREE_ASM_WRITTEN (decl)
1613 	  && !DECL_EXTERNAL (decl))
1614 	{
1615 	  node->process = 1;
1616 	  if (node->same_comdat_group)
1617 	    {
1618 	      cgraph_node *next;
1619 	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1620 		   next != node;
1621 		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1622 		if (!next->thunk.thunk_p && !next->alias
1623 		    && !next->comdat_local_p ())
1624 		  next->process = 1;
1625 	    }
1626 	}
1627       else if (node->same_comdat_group)
1628 	{
1629 	  if (flag_checking)
1630 	    check_same_comdat_groups = true;
1631 	}
1632       else
1633 	{
1634 	  /* We should've reclaimed all functions that are not needed.  */
1635 	  if (flag_checking
1636 	      && !node->inlined_to
1637 	      && gimple_has_body_p (decl)
1638 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1639 		 are inside partition, we can end up not removing the body since we no longer
1640 		 have analyzed node pointing to it.  */
1641 	      && !node->in_other_partition
1642 	      && !node->alias
1643 	      && !node->clones
1644 	      && !DECL_EXTERNAL (decl))
1645 	    {
1646 	      node->debug ();
1647 	      internal_error ("failed to reclaim unneeded function");
1648 	    }
1649 	  gcc_assert (node->inlined_to
1650 		      || !gimple_has_body_p (decl)
1651 		      || node->in_other_partition
1652 		      || node->clones
1653 		      || DECL_ARTIFICIAL (decl)
1654 		      || DECL_EXTERNAL (decl));
1655 
1656 	}
1657 
1658     }
1659   if (flag_checking && check_same_comdat_groups)
1660     FOR_EACH_FUNCTION (node)
1661       if (node->same_comdat_group && !node->process)
1662 	{
1663 	  tree decl = node->decl;
1664 	  if (!node->inlined_to
1665 	      && gimple_has_body_p (decl)
1666 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1667 		 partition but inline copies are inside a partition, we can
1668 		 end up not removing the body since we no longer have an
1669 		 analyzed node pointing to it.  */
1670 	      && !node->in_other_partition
1671 	      && !node->clones
1672 	      && !DECL_EXTERNAL (decl))
1673 	    {
1674 	      node->debug ();
1675 	      internal_error ("failed to reclaim unneeded function in same "
1676 			      "comdat group");
1677 	    }
1678 	}
1679 }
1680 
1681 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1682    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1683 
1684    Set current_function_decl and cfun to newly constructed empty function body.
1685    return basic block in the function body.  */
1686 
1687 basic_block
init_lowered_empty_function(tree decl,bool in_ssa,profile_count count)1688 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1689 {
1690   basic_block bb;
1691   edge e;
1692 
1693   current_function_decl = decl;
1694   allocate_struct_function (decl, false);
1695   gimple_register_cfg_hooks ();
1696   init_empty_tree_cfg ();
1697   init_tree_ssa (cfun);
1698 
1699   if (in_ssa)
1700     {
1701       init_ssa_operands (cfun);
1702       cfun->gimple_df->in_ssa_p = true;
1703       cfun->curr_properties |= PROP_ssa;
1704     }
1705 
1706   DECL_INITIAL (decl) = make_node (BLOCK);
1707   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1708 
1709   DECL_SAVED_TREE (decl) = error_mark_node;
1710   cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1711 			    | PROP_cfg | PROP_loops);
1712 
1713   set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1714   init_loops_structure (cfun, loops_for_fn (cfun), 1);
1715   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1716 
1717   /* Create BB for body of the function and connect it properly.  */
1718   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1719   EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1720   bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1721   bb->count = count;
1722   e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1723   e->probability = profile_probability::always ();
1724   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1725   e->probability = profile_probability::always ();
1726   add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1727 
1728   return bb;
1729 }
1730 
1731 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1732    VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1733    it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1734    for a result adjusting thunk.  */
1735 
1736 tree
thunk_adjust(gimple_stmt_iterator * bsi,tree ptr,bool this_adjusting,HOST_WIDE_INT fixed_offset,tree virtual_offset,HOST_WIDE_INT indirect_offset)1737 thunk_adjust (gimple_stmt_iterator * bsi,
1738 	      tree ptr, bool this_adjusting,
1739 	      HOST_WIDE_INT fixed_offset, tree virtual_offset,
1740 	      HOST_WIDE_INT indirect_offset)
1741 {
1742   gassign *stmt;
1743   tree ret;
1744 
1745   if (this_adjusting
1746       && fixed_offset != 0)
1747     {
1748       stmt = gimple_build_assign
1749 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1750 						       ptr,
1751 						       fixed_offset));
1752       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1753     }
1754 
1755   if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1756     {
1757       tree vfunc_type = make_node (FUNCTION_TYPE);
1758       TREE_TYPE (vfunc_type) = integer_type_node;
1759       TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1760       layout_type (vfunc_type);
1761 
1762       vtable_entry_type = build_pointer_type (vfunc_type);
1763     }
1764 
1765   /* If there's a virtual offset, look up that value in the vtable and
1766      adjust the pointer again.  */
1767   if (virtual_offset)
1768     {
1769       tree vtabletmp;
1770       tree vtabletmp2;
1771       tree vtabletmp3;
1772 
1773       vtabletmp =
1774 	create_tmp_reg (build_pointer_type
1775 			  (build_pointer_type (vtable_entry_type)), "vptr");
1776 
1777       /* The vptr is always at offset zero in the object.  */
1778       stmt = gimple_build_assign (vtabletmp,
1779 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1780 					  ptr));
1781       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1782 
1783       /* Form the vtable address.  */
1784       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1785 				     "vtableaddr");
1786       stmt = gimple_build_assign (vtabletmp2,
1787 				  build_simple_mem_ref (vtabletmp));
1788       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1789 
1790       /* Find the entry with the vcall offset.  */
1791       stmt = gimple_build_assign (vtabletmp2,
1792 				  fold_build_pointer_plus_loc (input_location,
1793 							       vtabletmp2,
1794 							       virtual_offset));
1795       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1796 
1797       /* Get the offset itself.  */
1798       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1799 				     "vcalloffset");
1800       stmt = gimple_build_assign (vtabletmp3,
1801 				  build_simple_mem_ref (vtabletmp2));
1802       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1803 
1804       /* Adjust the `this' pointer.  */
1805       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1806       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1807 				      GSI_CONTINUE_LINKING);
1808     }
1809 
1810   /* Likewise for an offset that is stored in the object that contains the
1811      vtable.  */
1812   if (indirect_offset != 0)
1813     {
1814       tree offset_ptr, offset_tree;
1815 
1816       /* Get the address of the offset.  */
1817       offset_ptr
1818         = create_tmp_reg (build_pointer_type
1819 			  (build_pointer_type (vtable_entry_type)),
1820 			  "offset_ptr");
1821       stmt = gimple_build_assign (offset_ptr,
1822 				  build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1823 					  ptr));
1824       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1825 
1826       stmt = gimple_build_assign
1827 	     (offset_ptr,
1828 	      fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1829 					       indirect_offset));
1830       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1831 
1832       /* Get the offset itself.  */
1833       offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1834 				    "offset");
1835       stmt = gimple_build_assign (offset_tree,
1836 				  build_simple_mem_ref (offset_ptr));
1837       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1838 
1839       /* Adjust the `this' pointer.  */
1840       ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1841       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1842 				      GSI_CONTINUE_LINKING);
1843     }
1844 
1845   if (!this_adjusting
1846       && fixed_offset != 0)
1847     /* Adjust the pointer by the constant.  */
1848     {
1849       tree ptrtmp;
1850 
1851       if (VAR_P (ptr))
1852         ptrtmp = ptr;
1853       else
1854         {
1855           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1856           stmt = gimple_build_assign (ptrtmp, ptr);
1857 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1858 	}
1859       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1860 					     ptrtmp, fixed_offset);
1861     }
1862 
1863   /* Emit the statement and gimplify the adjustment expression.  */
1864   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1865   stmt = gimple_build_assign (ret, ptr);
1866   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1867 
1868   return ret;
1869 }
1870 
1871 /* Expand thunk NODE to gimple if possible.
1872    When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1873    no assembler is produced.
1874    When OUTPUT_ASM_THUNK is true, also produce assembler for
1875    thunks that are not lowered.  */
1876 
1877 bool
expand_thunk(bool output_asm_thunks,bool force_gimple_thunk)1878 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1879 {
1880   bool this_adjusting = thunk.this_adjusting;
1881   HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1882   HOST_WIDE_INT virtual_value = thunk.virtual_value;
1883   HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1884   tree virtual_offset = NULL;
1885   tree alias = callees->callee->decl;
1886   tree thunk_fndecl = decl;
1887   tree a;
1888 
1889   if (!force_gimple_thunk
1890       && this_adjusting
1891       && indirect_offset == 0
1892       && !DECL_EXTERNAL (alias)
1893       && !DECL_STATIC_CHAIN (alias)
1894       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1895 					      virtual_value, alias))
1896     {
1897       tree fn_block;
1898       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1899 
1900       if (!output_asm_thunks)
1901 	{
1902 	  analyzed = true;
1903 	  return false;
1904 	}
1905 
1906       if (in_lto_p)
1907 	get_untransformed_body ();
1908       a = DECL_ARGUMENTS (thunk_fndecl);
1909 
1910       current_function_decl = thunk_fndecl;
1911 
1912       /* Ensure thunks are emitted in their correct sections.  */
1913       resolve_unique_section (thunk_fndecl, 0,
1914 			      flag_function_sections);
1915 
1916       DECL_RESULT (thunk_fndecl)
1917 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1918 		      RESULT_DECL, 0, restype);
1919       DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1920 
1921       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1922 	 create one.  */
1923       fn_block = make_node (BLOCK);
1924       BLOCK_VARS (fn_block) = a;
1925       DECL_INITIAL (thunk_fndecl) = fn_block;
1926       BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1927       allocate_struct_function (thunk_fndecl, false);
1928       init_function_start (thunk_fndecl);
1929       cfun->is_thunk = 1;
1930       insn_locations_init ();
1931       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1932       prologue_location = curr_insn_location ();
1933 
1934       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1935 				       fixed_offset, virtual_value, alias);
1936 
1937       insn_locations_finalize ();
1938       init_insn_lengths ();
1939       free_after_compilation (cfun);
1940       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1941       thunk.thunk_p = false;
1942       analyzed = false;
1943     }
1944   else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1945     {
1946       error ("generic thunk code fails for method %qD which uses %<...%>",
1947 	     thunk_fndecl);
1948       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1949       analyzed = true;
1950       return false;
1951     }
1952   else
1953     {
1954       tree restype;
1955       basic_block bb, then_bb, else_bb, return_bb;
1956       gimple_stmt_iterator bsi;
1957       int nargs = 0;
1958       tree arg;
1959       int i;
1960       tree resdecl;
1961       tree restmp = NULL;
1962 
1963       gcall *call;
1964       greturn *ret;
1965       bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1966 
1967       /* We may be called from expand_thunk that releases body except for
1968 	 DECL_ARGUMENTS.  In this case force_gimple_thunk is true.  */
1969       if (in_lto_p && !force_gimple_thunk)
1970 	get_untransformed_body ();
1971 
1972       /* We need to force DECL_IGNORED_P when the thunk is created
1973 	 after early debug was run.  */
1974       if (force_gimple_thunk)
1975 	DECL_IGNORED_P (thunk_fndecl) = 1;
1976 
1977       a = DECL_ARGUMENTS (thunk_fndecl);
1978 
1979       current_function_decl = thunk_fndecl;
1980 
1981       /* Ensure thunks are emitted in their correct sections.  */
1982       resolve_unique_section (thunk_fndecl, 0,
1983 			      flag_function_sections);
1984 
1985       bitmap_obstack_initialize (NULL);
1986 
1987       if (thunk.virtual_offset_p)
1988         virtual_offset = size_int (virtual_value);
1989 
1990       /* Build the return declaration for the function.  */
1991       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1992       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1993 	{
1994 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1995 	  DECL_ARTIFICIAL (resdecl) = 1;
1996 	  DECL_IGNORED_P (resdecl) = 1;
1997 	  DECL_CONTEXT (resdecl) = thunk_fndecl;
1998 	  DECL_RESULT (thunk_fndecl) = resdecl;
1999 	}
2000       else
2001 	resdecl = DECL_RESULT (thunk_fndecl);
2002 
2003       profile_count cfg_count = count;
2004       if (!cfg_count.initialized_p ())
2005 	cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
2006 
2007       bb = then_bb = else_bb = return_bb
2008 	= init_lowered_empty_function (thunk_fndecl, true, cfg_count);
2009 
2010       bsi = gsi_start_bb (bb);
2011 
2012       /* Build call to the function being thunked.  */
2013       if (!VOID_TYPE_P (restype)
2014 	  && (!alias_is_noreturn
2015 	      || TREE_ADDRESSABLE (restype)
2016 	      || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
2017 	{
2018 	  if (DECL_BY_REFERENCE (resdecl))
2019 	    {
2020 	      restmp = gimple_fold_indirect_ref (resdecl);
2021 	      if (!restmp)
2022 		restmp = build2 (MEM_REF,
2023 				 TREE_TYPE (TREE_TYPE (resdecl)),
2024 				 resdecl,
2025 				 build_int_cst (TREE_TYPE (resdecl), 0));
2026 	    }
2027 	  else if (!is_gimple_reg_type (restype))
2028 	    {
2029 	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
2030 		{
2031 		  restmp = resdecl;
2032 
2033 		  if (VAR_P (restmp))
2034 		    {
2035 		      add_local_decl (cfun, restmp);
2036 		      BLOCK_VARS (DECL_INITIAL (current_function_decl))
2037 			= restmp;
2038 		    }
2039 		}
2040 	      else
2041 		restmp = create_tmp_var (restype, "retval");
2042 	    }
2043 	  else
2044 	    restmp = create_tmp_reg (restype, "retval");
2045 	}
2046 
2047       for (arg = a; arg; arg = DECL_CHAIN (arg))
2048         nargs++;
2049       auto_vec<tree> vargs (nargs);
2050       i = 0;
2051       arg = a;
2052       if (this_adjusting)
2053 	{
2054 	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
2055 					  virtual_offset, indirect_offset));
2056 	  arg = DECL_CHAIN (a);
2057 	  i = 1;
2058 	}
2059 
2060       if (nargs)
2061 	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
2062 	  {
2063 	    tree tmp = arg;
2064 	    if (VECTOR_TYPE_P (TREE_TYPE (arg))
2065 		|| TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
2066 	      DECL_GIMPLE_REG_P (arg) = 1;
2067 
2068 	    if (!is_gimple_val (arg))
2069 	      {
2070 		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
2071 				      (TREE_TYPE (arg)), "arg");
2072 		gimple *stmt = gimple_build_assign (tmp, arg);
2073 		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2074 	      }
2075 	    vargs.quick_push (tmp);
2076 	  }
2077       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
2078       callees->call_stmt = call;
2079       gimple_call_set_from_thunk (call, true);
2080       if (DECL_STATIC_CHAIN (alias))
2081 	{
2082 	  tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
2083 	  tree type = TREE_TYPE (p);
2084 	  tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
2085 				  PARM_DECL, create_tmp_var_name ("CHAIN"),
2086 				  type);
2087 	  DECL_ARTIFICIAL (decl) = 1;
2088 	  DECL_IGNORED_P (decl) = 1;
2089 	  TREE_USED (decl) = 1;
2090 	  DECL_CONTEXT (decl) = thunk_fndecl;
2091 	  DECL_ARG_TYPE (decl) = type;
2092 	  TREE_READONLY (decl) = 1;
2093 
2094 	  struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
2095 	  sf->static_chain_decl = decl;
2096 
2097 	  gimple_call_set_chain (call, decl);
2098 	}
2099 
2100       /* Return slot optimization is always possible and in fact required to
2101          return values with DECL_BY_REFERENCE.  */
2102       if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2103 	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2104 	      || DECL_BY_REFERENCE (resdecl)))
2105         gimple_call_set_return_slot_opt (call, true);
2106 
2107       if (restmp)
2108 	{
2109           gimple_call_set_lhs (call, restmp);
2110 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2111 						 TREE_TYPE (TREE_TYPE (alias))));
2112 	}
2113       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2114       if (!alias_is_noreturn)
2115 	{
2116 	  if (restmp && !this_adjusting
2117 	      && (fixed_offset || virtual_offset))
2118 	    {
2119 	      tree true_label = NULL_TREE;
2120 
2121 	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2122 		{
2123 		  gimple *stmt;
2124 		  edge e;
2125 		  /* If the return type is a pointer, we need to
2126 		     protect against NULL.  We know there will be an
2127 		     adjustment, because that's why we're emitting a
2128 		     thunk.  */
2129 		  then_bb = create_basic_block (NULL, bb);
2130 		  then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2131 		  return_bb = create_basic_block (NULL, then_bb);
2132 		  return_bb->count = cfg_count;
2133 		  else_bb = create_basic_block (NULL, else_bb);
2134 		  else_bb->count = cfg_count.apply_scale (1, 16);
2135 		  add_bb_to_loop (then_bb, bb->loop_father);
2136 		  add_bb_to_loop (return_bb, bb->loop_father);
2137 		  add_bb_to_loop (else_bb, bb->loop_father);
2138 		  remove_edge (single_succ_edge (bb));
2139 		  true_label = gimple_block_label (then_bb);
2140 		  stmt = gimple_build_cond (NE_EXPR, restmp,
2141 					    build_zero_cst (TREE_TYPE (restmp)),
2142 					    NULL_TREE, NULL_TREE);
2143 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2144 		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2145 		  e->probability = profile_probability::guessed_always ()
2146 					.apply_scale (1, 16);
2147 		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2148 		  e->probability = profile_probability::guessed_always ()
2149 					.apply_scale (1, 16);
2150 		  make_single_succ_edge (return_bb,
2151 					 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2152 		  make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2153 		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2154 		  e->probability = profile_probability::always ();
2155 		  bsi = gsi_last_bb (then_bb);
2156 		}
2157 
2158 	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2159 				     fixed_offset, virtual_offset,
2160 				     indirect_offset);
2161 	      if (true_label)
2162 		{
2163 		  gimple *stmt;
2164 		  bsi = gsi_last_bb (else_bb);
2165 		  stmt = gimple_build_assign (restmp,
2166 					      build_zero_cst (TREE_TYPE (restmp)));
2167 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2168 		  bsi = gsi_last_bb (return_bb);
2169 		}
2170 	    }
2171 	  else
2172 	    gimple_call_set_tail (call, true);
2173 
2174 	  /* Build return value.  */
2175 	  if (!DECL_BY_REFERENCE (resdecl))
2176 	    ret = gimple_build_return (restmp);
2177 	  else
2178 	    ret = gimple_build_return (resdecl);
2179 
2180 	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2181 	}
2182       else
2183 	{
2184 	  gimple_call_set_tail (call, true);
2185 	  remove_edge (single_succ_edge (bb));
2186 	}
2187 
2188       cfun->gimple_df->in_ssa_p = true;
2189       update_max_bb_count ();
2190       profile_status_for_fn (cfun)
2191         = cfg_count.initialized_p () && cfg_count.ipa_p ()
2192 	  ? PROFILE_READ : PROFILE_GUESSED;
2193       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
2194       TREE_ASM_WRITTEN (thunk_fndecl) = false;
2195       delete_unreachable_blocks ();
2196       update_ssa (TODO_update_ssa);
2197       checking_verify_flow_info ();
2198       free_dominance_info (CDI_DOMINATORS);
2199 
2200       /* Since we want to emit the thunk, we explicitly mark its name as
2201 	 referenced.  */
2202       thunk.thunk_p = false;
2203       lowered = true;
2204       bitmap_obstack_release (NULL);
2205     }
2206   current_function_decl = NULL;
2207   set_cfun (NULL);
2208   return true;
2209 }
2210 
2211 /* Assemble thunks and aliases associated to node.  */
2212 
2213 void
assemble_thunks_and_aliases(void)2214 cgraph_node::assemble_thunks_and_aliases (void)
2215 {
2216   cgraph_edge *e;
2217   ipa_ref *ref;
2218 
2219   for (e = callers; e;)
2220     if (e->caller->thunk.thunk_p
2221 	&& !e->caller->inlined_to)
2222       {
2223 	cgraph_node *thunk = e->caller;
2224 
2225 	e = e->next_caller;
2226 	thunk->expand_thunk (true, false);
2227 	thunk->assemble_thunks_and_aliases ();
2228       }
2229     else
2230       e = e->next_caller;
2231 
2232   FOR_EACH_ALIAS (this, ref)
2233     {
2234       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2235       if (!alias->transparent_alias)
2236 	{
2237 	  bool saved_written = TREE_ASM_WRITTEN (decl);
2238 
2239 	  /* Force assemble_alias to really output the alias this time instead
2240 	     of buffering it in same alias pairs.  */
2241 	  TREE_ASM_WRITTEN (decl) = 1;
2242 	  if (alias->symver)
2243 	    do_assemble_symver (alias->decl,
2244 				DECL_ASSEMBLER_NAME (decl));
2245 	  else
2246 	    do_assemble_alias (alias->decl,
2247 			       DECL_ASSEMBLER_NAME (decl));
2248 	  alias->assemble_thunks_and_aliases ();
2249 	  TREE_ASM_WRITTEN (decl) = saved_written;
2250 	}
2251     }
2252 }
2253 
2254 /* Expand function specified by node.  */
2255 
2256 void
expand(void)2257 cgraph_node::expand (void)
2258 {
2259   location_t saved_loc;
2260 
2261   /* We ought to not compile any inline clones.  */
2262   gcc_assert (!inlined_to);
2263 
2264   /* __RTL functions are compiled as soon as they are parsed, so don't
2265      do it again.  */
2266   if (native_rtl_p ())
2267     return;
2268 
2269   announce_function (decl);
2270   process = 0;
2271   gcc_assert (lowered);
2272   get_untransformed_body ();
2273 
2274   /* Generate RTL for the body of DECL.  */
2275 
2276   timevar_push (TV_REST_OF_COMPILATION);
2277 
2278   gcc_assert (symtab->global_info_ready);
2279 
2280   /* Initialize the default bitmap obstack.  */
2281   bitmap_obstack_initialize (NULL);
2282 
2283   /* Initialize the RTL code for the function.  */
2284   saved_loc = input_location;
2285   input_location = DECL_SOURCE_LOCATION (decl);
2286 
2287   gcc_assert (DECL_STRUCT_FUNCTION (decl));
2288   push_cfun (DECL_STRUCT_FUNCTION (decl));
2289   init_function_start (decl);
2290 
2291   gimple_register_cfg_hooks ();
2292 
2293   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2294 
2295   update_ssa (TODO_update_ssa_only_virtuals);
2296   execute_all_ipa_transforms (false);
2297 
2298   /* Perform all tree transforms and optimizations.  */
2299 
2300   /* Signal the start of passes.  */
2301   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2302 
2303   execute_pass_list (cfun, g->get_passes ()->all_passes);
2304 
2305   /* Signal the end of passes.  */
2306   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2307 
2308   bitmap_obstack_release (&reg_obstack);
2309 
2310   /* Release the default bitmap obstack.  */
2311   bitmap_obstack_release (NULL);
2312 
2313   /* If requested, warn about function definitions where the function will
2314      return a value (usually of some struct or union type) which itself will
2315      take up a lot of stack space.  */
2316   if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2317     {
2318       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2319 
2320       if (ret_type && TYPE_SIZE_UNIT (ret_type)
2321 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2322 	  && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2323 			       warn_larger_than_size) > 0)
2324 	{
2325 	  unsigned int size_as_int
2326 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2327 
2328 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2329 	    warning (OPT_Wlarger_than_,
2330 		     "size of return value of %q+D is %u bytes",
2331                      decl, size_as_int);
2332 	  else
2333 	    warning (OPT_Wlarger_than_,
2334 		     "size of return value of %q+D is larger than %wu bytes",
2335 	             decl, warn_larger_than_size);
2336 	}
2337     }
2338 
2339   gimple_set_body (decl, NULL);
2340   if (DECL_STRUCT_FUNCTION (decl) == 0
2341       && !cgraph_node::get (decl)->origin)
2342     {
2343       /* Stop pointing to the local nodes about to be freed.
2344 	 But DECL_INITIAL must remain nonzero so we know this
2345 	 was an actual function definition.
2346 	 For a nested function, this is done in c_pop_function_context.
2347 	 If rest_of_compilation set this to 0, leave it 0.  */
2348       if (DECL_INITIAL (decl) != 0)
2349 	DECL_INITIAL (decl) = error_mark_node;
2350     }
2351 
2352   input_location = saved_loc;
2353 
2354   ggc_collect ();
2355   timevar_pop (TV_REST_OF_COMPILATION);
2356 
2357   /* Make sure that BE didn't give up on compiling.  */
2358   gcc_assert (TREE_ASM_WRITTEN (decl));
2359   if (cfun)
2360     pop_cfun ();
2361 
2362   /* It would make a lot more sense to output thunks before function body to
2363      get more forward and fewer backward jumps.  This however would need
2364      solving problem with comdats.  See PR48668.  Also aliases must come after
2365      function itself to make one pass assemblers, like one on AIX, happy.
2366      See PR 50689.
2367      FIXME: Perhaps thunks should be move before function IFF they are not in
2368      comdat groups.  */
2369   assemble_thunks_and_aliases ();
2370   release_body ();
2371   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
2372      points to the dead function body.  */
2373   remove_callees ();
2374   remove_all_references ();
2375 }
2376 
2377 /* Node comparator that is responsible for the order that corresponds
2378    to time when a function was launched for the first time.  */
2379 
2380 int
tp_first_run_node_cmp(const void * pa,const void * pb)2381 tp_first_run_node_cmp (const void *pa, const void *pb)
2382 {
2383   const cgraph_node *a = *(const cgraph_node * const *) pa;
2384   const cgraph_node *b = *(const cgraph_node * const *) pb;
2385   unsigned int tp_first_run_a = a->tp_first_run;
2386   unsigned int tp_first_run_b = b->tp_first_run;
2387 
2388   if (!opt_for_fn (a->decl, flag_profile_reorder_functions)
2389       || a->no_reorder)
2390     tp_first_run_a = 0;
2391   if (!opt_for_fn (b->decl, flag_profile_reorder_functions)
2392       || b->no_reorder)
2393     tp_first_run_b = 0;
2394 
2395   if (tp_first_run_a == tp_first_run_b)
2396     return a->order - b->order;
2397 
2398   /* Functions with time profile must be before these without profile.  */
2399   tp_first_run_a = (tp_first_run_a - 1) & INT_MAX;
2400   tp_first_run_b = (tp_first_run_b - 1) & INT_MAX;
2401 
2402   return tp_first_run_a - tp_first_run_b;
2403 }
2404 
2405 /* Expand all functions that must be output.
2406 
2407    Attempt to topologically sort the nodes so function is output when
2408    all called functions are already assembled to allow data to be
2409    propagated across the callgraph.  Use a stack to get smaller distance
2410    between a function and its callees (later we may choose to use a more
2411    sophisticated algorithm for function reordering; we will likely want
2412    to use subsections to make the output functions appear in top-down
2413    order).  */
2414 
2415 static void
expand_all_functions(void)2416 expand_all_functions (void)
2417 {
2418   cgraph_node *node;
2419   cgraph_node **order = XCNEWVEC (cgraph_node *,
2420 					 symtab->cgraph_count);
2421   cgraph_node **tp_first_run_order = XCNEWVEC (cgraph_node *,
2422 					 symtab->cgraph_count);
2423   unsigned int expanded_func_count = 0, profiled_func_count = 0;
2424   int order_pos, tp_first_run_order_pos = 0, new_order_pos = 0;
2425   int i;
2426 
2427   order_pos = ipa_reverse_postorder (order);
2428   gcc_assert (order_pos == symtab->cgraph_count);
2429 
2430   /* Garbage collector may remove inline clones we eliminate during
2431      optimization.  So we must be sure to not reference them.  */
2432   for (i = 0; i < order_pos; i++)
2433     if (order[i]->process)
2434       {
2435 	if (order[i]->tp_first_run
2436 	    && opt_for_fn (order[i]->decl, flag_profile_reorder_functions))
2437 	  tp_first_run_order[tp_first_run_order_pos++] = order[i];
2438 	else
2439           order[new_order_pos++] = order[i];
2440       }
2441 
2442   /* First output functions with time profile in specified order.  */
2443   qsort (tp_first_run_order, tp_first_run_order_pos,
2444 	 sizeof (cgraph_node *), tp_first_run_node_cmp);
2445   for (i = 0; i < tp_first_run_order_pos; i++)
2446     {
2447       node = tp_first_run_order[i];
2448 
2449       if (node->process)
2450 	{
2451 	  expanded_func_count++;
2452 	  profiled_func_count++;
2453 
2454 	  if (symtab->dump_file)
2455 	    fprintf (symtab->dump_file,
2456 		     "Time profile order in expand_all_functions:%s:%d\n",
2457 		     node->dump_asm_name (), node->tp_first_run);
2458 	  node->process = 0;
2459 	  node->expand ();
2460 	}
2461     }
2462 
2463   /* Output functions in RPO so callees get optimized before callers.  This
2464      makes ipa-ra and other propagators to work.
2465      FIXME: This is far from optimal code layout.  */
2466   for (i = new_order_pos - 1; i >= 0; i--)
2467     {
2468       node = order[i];
2469 
2470       if (node->process)
2471 	{
2472 	  expanded_func_count++;
2473 	  node->process = 0;
2474 	  node->expand ();
2475 	}
2476     }
2477 
2478   if (dump_file)
2479     fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2480 	     main_input_filename, profiled_func_count, expanded_func_count);
2481 
2482   if (symtab->dump_file && tp_first_run_order_pos)
2483     fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2484              profiled_func_count, expanded_func_count);
2485 
2486   symtab->process_new_functions ();
2487   free_gimplify_stack ();
2488   delete ipa_saved_clone_sources;
2489   ipa_saved_clone_sources = NULL;
2490   free (order);
2491 }
2492 
2493 /* This is used to sort the node types by the cgraph order number.  */
2494 
2495 enum cgraph_order_sort_kind
2496 {
2497   ORDER_FUNCTION,
2498   ORDER_VAR,
2499   ORDER_VAR_UNDEF,
2500   ORDER_ASM
2501 };
2502 
2503 struct cgraph_order_sort
2504 {
2505   /* Construct from a cgraph_node.  */
cgraph_order_sortcgraph_order_sort2506   cgraph_order_sort (cgraph_node *node)
2507   : kind (ORDER_FUNCTION), order (node->order)
2508   {
2509     u.f = node;
2510   }
2511 
2512   /* Construct from a varpool_node.  */
cgraph_order_sortcgraph_order_sort2513   cgraph_order_sort (varpool_node *node)
2514   : kind (node->definition ? ORDER_VAR : ORDER_VAR_UNDEF), order (node->order)
2515   {
2516     u.v = node;
2517   }
2518 
2519   /* Construct from a asm_node.  */
cgraph_order_sortcgraph_order_sort2520   cgraph_order_sort (asm_node *node)
2521   : kind (ORDER_ASM), order (node->order)
2522   {
2523     u.a = node;
2524   }
2525 
2526   /* Assembly cgraph_order_sort based on its type.  */
2527   void process ();
2528 
2529   enum cgraph_order_sort_kind kind;
2530   union
2531   {
2532     cgraph_node *f;
2533     varpool_node *v;
2534     asm_node *a;
2535   } u;
2536   int order;
2537 };
2538 
2539 /* Assembly cgraph_order_sort based on its type.  */
2540 
2541 void
process()2542 cgraph_order_sort::process ()
2543 {
2544   switch (kind)
2545     {
2546     case ORDER_FUNCTION:
2547       u.f->process = 0;
2548       u.f->expand ();
2549       break;
2550     case ORDER_VAR:
2551       u.v->assemble_decl ();
2552       break;
2553     case ORDER_VAR_UNDEF:
2554       assemble_undefined_decl (u.v->decl);
2555       break;
2556     case ORDER_ASM:
2557       assemble_asm (u.a->asm_str);
2558       break;
2559     default:
2560       gcc_unreachable ();
2561     }
2562 }
2563 
2564 /* Compare cgraph_order_sort by order.  */
2565 
2566 static int
cgraph_order_cmp(const void * a_p,const void * b_p)2567 cgraph_order_cmp (const void *a_p, const void *b_p)
2568 {
2569   const cgraph_order_sort *nodea = (const cgraph_order_sort *)a_p;
2570   const cgraph_order_sort *nodeb = (const cgraph_order_sort *)b_p;
2571 
2572   return nodea->order - nodeb->order;
2573 }
2574 
2575 /* Output all functions, variables, and asm statements in the order
2576    according to their order fields, which is the order in which they
2577    appeared in the file.  This implements -fno-toplevel-reorder.  In
2578    this mode we may output functions and variables which don't really
2579    need to be output.  */
2580 
2581 static void
output_in_order(void)2582 output_in_order (void)
2583 {
2584   int i;
2585   cgraph_node *cnode;
2586   varpool_node *vnode;
2587   asm_node *anode;
2588   auto_vec<cgraph_order_sort> nodes;
2589   cgraph_order_sort *node;
2590 
2591   FOR_EACH_DEFINED_FUNCTION (cnode)
2592     if (cnode->process && !cnode->thunk.thunk_p
2593 	&& !cnode->alias && cnode->no_reorder)
2594       nodes.safe_push (cgraph_order_sort (cnode));
2595 
2596   /* There is a similar loop in symbol_table::output_variables.
2597      Please keep them in sync.  */
2598   FOR_EACH_VARIABLE (vnode)
2599     if (vnode->no_reorder
2600 	&& !DECL_HARD_REGISTER (vnode->decl)
2601 	&& !DECL_HAS_VALUE_EXPR_P (vnode->decl))
2602       nodes.safe_push (cgraph_order_sort (vnode));
2603 
2604   for (anode = symtab->first_asm_symbol (); anode; anode = anode->next)
2605     nodes.safe_push (cgraph_order_sort (anode));
2606 
2607   /* Sort nodes by order.  */
2608   nodes.qsort (cgraph_order_cmp);
2609 
2610   /* In toplevel reorder mode we output all statics; mark them as needed.  */
2611   FOR_EACH_VEC_ELT (nodes, i, node)
2612     if (node->kind == ORDER_VAR)
2613       node->u.v->finalize_named_section_flags ();
2614 
2615   FOR_EACH_VEC_ELT (nodes, i, node)
2616     node->process ();
2617 
2618   symtab->clear_asm_symbols ();
2619 }
2620 
2621 static void
ipa_passes(void)2622 ipa_passes (void)
2623 {
2624   gcc::pass_manager *passes = g->get_passes ();
2625 
2626   set_cfun (NULL);
2627   current_function_decl = NULL;
2628   gimple_register_cfg_hooks ();
2629   bitmap_obstack_initialize (NULL);
2630 
2631   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2632 
2633   if (!in_lto_p)
2634     {
2635       execute_ipa_pass_list (passes->all_small_ipa_passes);
2636       if (seen_error ())
2637 	return;
2638     }
2639 
2640   /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2641      devirtualization and other changes where removal iterate.  */
2642   symtab->remove_unreachable_nodes (symtab->dump_file);
2643 
2644   /* If pass_all_early_optimizations was not scheduled, the state of
2645      the cgraph will not be properly updated.  Update it now.  */
2646   if (symtab->state < IPA_SSA)
2647     symtab->state = IPA_SSA;
2648 
2649   if (!in_lto_p)
2650     {
2651       /* Generate coverage variables and constructors.  */
2652       coverage_finish ();
2653 
2654       /* Process new functions added.  */
2655       set_cfun (NULL);
2656       current_function_decl = NULL;
2657       symtab->process_new_functions ();
2658 
2659       execute_ipa_summary_passes
2660 	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2661     }
2662 
2663   /* Some targets need to handle LTO assembler output specially.  */
2664   if (flag_generate_lto || flag_generate_offload)
2665     targetm.asm_out.lto_start ();
2666 
2667   if (!in_lto_p
2668       || flag_incremental_link == INCREMENTAL_LINK_LTO)
2669     {
2670       if (!quiet_flag)
2671 	fprintf (stderr, "Streaming LTO\n");
2672       if (g->have_offload)
2673 	{
2674 	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2675 	  lto_stream_offload_p = true;
2676 	  ipa_write_summaries ();
2677 	  lto_stream_offload_p = false;
2678 	}
2679       if (flag_lto)
2680 	{
2681 	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2682 	  lto_stream_offload_p = false;
2683 	  ipa_write_summaries ();
2684 	}
2685     }
2686 
2687   if (flag_generate_lto || flag_generate_offload)
2688     targetm.asm_out.lto_end ();
2689 
2690   if (!flag_ltrans
2691       && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2692 	  || !flag_lto || flag_fat_lto_objects))
2693     execute_ipa_pass_list (passes->all_regular_ipa_passes);
2694   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2695 
2696   bitmap_obstack_release (NULL);
2697 }
2698 
2699 
2700 /* Return string alias is alias of.  */
2701 
2702 static tree
get_alias_symbol(tree decl)2703 get_alias_symbol (tree decl)
2704 {
2705   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2706   return get_identifier (TREE_STRING_POINTER
2707 			  (TREE_VALUE (TREE_VALUE (alias))));
2708 }
2709 
2710 
2711 /* Weakrefs may be associated to external decls and thus not output
2712    at expansion time.  Emit all necessary aliases.  */
2713 
2714 void
output_weakrefs(void)2715 symbol_table::output_weakrefs (void)
2716 {
2717   symtab_node *node;
2718   FOR_EACH_SYMBOL (node)
2719     if (node->alias
2720         && !TREE_ASM_WRITTEN (node->decl)
2721 	&& node->weakref)
2722       {
2723 	tree target;
2724 
2725 	/* Weakrefs are special by not requiring target definition in current
2726 	   compilation unit.  It is thus bit hard to work out what we want to
2727 	   alias.
2728 	   When alias target is defined, we need to fetch it from symtab reference,
2729 	   otherwise it is pointed to by alias_target.  */
2730 	if (node->alias_target)
2731 	  target = (DECL_P (node->alias_target)
2732 		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2733 		    : node->alias_target);
2734 	else if (node->analyzed)
2735 	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2736 	else
2737 	  {
2738 	    gcc_unreachable ();
2739 	    target = get_alias_symbol (node->decl);
2740 	  }
2741         do_assemble_alias (node->decl, target);
2742       }
2743 }
2744 
2745 /* Perform simple optimizations based on callgraph.  */
2746 
2747 void
compile(void)2748 symbol_table::compile (void)
2749 {
2750   if (seen_error ())
2751     return;
2752 
2753   symtab_node::checking_verify_symtab_nodes ();
2754 
2755   timevar_push (TV_CGRAPHOPT);
2756   if (pre_ipa_mem_report)
2757     dump_memory_report ("Memory consumption before IPA");
2758   if (!quiet_flag)
2759     fprintf (stderr, "Performing interprocedural optimizations\n");
2760   state = IPA;
2761 
2762   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2763   if (flag_generate_lto || flag_generate_offload)
2764     lto_streamer_hooks_init ();
2765 
2766   /* Don't run the IPA passes if there was any error or sorry messages.  */
2767   if (!seen_error ())
2768   {
2769     timevar_start (TV_CGRAPH_IPA_PASSES);
2770     ipa_passes ();
2771     timevar_stop (TV_CGRAPH_IPA_PASSES);
2772   }
2773   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2774   if (seen_error ()
2775       || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2776 	  && flag_lto && !flag_fat_lto_objects))
2777     {
2778       timevar_pop (TV_CGRAPHOPT);
2779       return;
2780     }
2781 
2782   global_info_ready = true;
2783   if (dump_file)
2784     {
2785       fprintf (dump_file, "Optimized ");
2786       symtab->dump (dump_file);
2787     }
2788   if (post_ipa_mem_report)
2789     dump_memory_report ("Memory consumption after IPA");
2790   timevar_pop (TV_CGRAPHOPT);
2791 
2792   /* Output everything.  */
2793   switch_to_section (text_section);
2794   (*debug_hooks->assembly_start) ();
2795   if (!quiet_flag)
2796     fprintf (stderr, "Assembling functions:\n");
2797   symtab_node::checking_verify_symtab_nodes ();
2798 
2799   bitmap_obstack_initialize (NULL);
2800   execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2801   bitmap_obstack_release (NULL);
2802   mark_functions_to_output ();
2803 
2804   /* When weakref support is missing, we automatically translate all
2805      references to NODE to references to its ultimate alias target.
2806      The renaming mechanism uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2807      TREE_CHAIN.
2808 
2809      Set up this mapping before we output any assembler but once we are sure
2810      that all symbol renaming is done.
2811 
2812      FIXME: All this ugliness can go away if we just do renaming at gimple
2813      level by physically rewriting the IL.  At the moment we can only redirect
2814      calls, so we need infrastructure for renaming references as well.  */
2815 #ifndef ASM_OUTPUT_WEAKREF
2816   symtab_node *node;
2817 
2818   FOR_EACH_SYMBOL (node)
2819     if (node->alias
2820 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2821       {
2822 	IDENTIFIER_TRANSPARENT_ALIAS
2823 	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2824 	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2825 	   = (node->alias_target ? node->alias_target
2826 	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2827       }
2828 #endif
2829 
2830   state = EXPANSION;
2831 
2832   /* Output first asm statements and anything ordered. The process
2833      flag is cleared for these nodes, so we skip them later.  */
2834   output_in_order ();
2835 
2836   timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2837   expand_all_functions ();
2838   timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2839 
2840   output_variables ();
2841 
2842   process_new_functions ();
2843   state = FINISHED;
2844   output_weakrefs ();
2845 
2846   if (dump_file)
2847     {
2848       fprintf (dump_file, "\nFinal ");
2849       symtab->dump (dump_file);
2850     }
2851   if (!flag_checking)
2852     return;
2853   symtab_node::verify_symtab_nodes ();
2854   /* Double check that all inline clones are gone and that all
2855      function bodies have been released from memory.  */
2856   if (!seen_error ())
2857     {
2858       cgraph_node *node;
2859       bool error_found = false;
2860 
2861       FOR_EACH_DEFINED_FUNCTION (node)
2862 	if (node->inlined_to
2863 	    || gimple_has_body_p (node->decl))
2864 	  {
2865 	    error_found = true;
2866 	    node->debug ();
2867 	  }
2868       if (error_found)
2869 	internal_error ("nodes with unreleased memory found");
2870     }
2871 }
2872 
2873 /* Earlydebug dump file, flags, and number.  */
2874 
2875 static int debuginfo_early_dump_nr;
2876 static FILE *debuginfo_early_dump_file;
2877 static dump_flags_t debuginfo_early_dump_flags;
2878 
2879 /* Debug dump file, flags, and number.  */
2880 
2881 static int debuginfo_dump_nr;
2882 static FILE *debuginfo_dump_file;
2883 static dump_flags_t debuginfo_dump_flags;
2884 
2885 /* Register the debug and earlydebug dump files.  */
2886 
2887 void
debuginfo_early_init(void)2888 debuginfo_early_init (void)
2889 {
2890   gcc::dump_manager *dumps = g->get_dumps ();
2891   debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2892 						  "earlydebug", DK_tree,
2893 						  OPTGROUP_NONE,
2894 						  false);
2895   debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2896 					     "debug", DK_tree,
2897 					     OPTGROUP_NONE,
2898 					     false);
2899 }
2900 
2901 /* Initialize the debug and earlydebug dump files.  */
2902 
2903 void
debuginfo_init(void)2904 debuginfo_init (void)
2905 {
2906   gcc::dump_manager *dumps = g->get_dumps ();
2907   debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2908   debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2909   debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2910   debuginfo_early_dump_flags
2911     = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2912 }
2913 
2914 /* Finalize the debug and earlydebug dump files.  */
2915 
2916 void
debuginfo_fini(void)2917 debuginfo_fini (void)
2918 {
2919   if (debuginfo_dump_file)
2920     dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2921   if (debuginfo_early_dump_file)
2922     dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2923 }
2924 
2925 /* Set dump_file to the debug dump file.  */
2926 
2927 void
debuginfo_start(void)2928 debuginfo_start (void)
2929 {
2930   set_dump_file (debuginfo_dump_file);
2931 }
2932 
2933 /* Undo setting dump_file to the debug dump file.  */
2934 
2935 void
debuginfo_stop(void)2936 debuginfo_stop (void)
2937 {
2938   set_dump_file (NULL);
2939 }
2940 
2941 /* Set dump_file to the earlydebug dump file.  */
2942 
2943 void
debuginfo_early_start(void)2944 debuginfo_early_start (void)
2945 {
2946   set_dump_file (debuginfo_early_dump_file);
2947 }
2948 
2949 /* Undo setting dump_file to the earlydebug dump file.  */
2950 
2951 void
debuginfo_early_stop(void)2952 debuginfo_early_stop (void)
2953 {
2954   set_dump_file (NULL);
2955 }
2956 
2957 /* Analyze the whole compilation unit once it is parsed completely.  */
2958 
2959 void
finalize_compilation_unit(void)2960 symbol_table::finalize_compilation_unit (void)
2961 {
2962   timevar_push (TV_CGRAPH);
2963 
2964   /* If we're here there's no current function anymore.  Some frontends
2965      are lazy in clearing these.  */
2966   current_function_decl = NULL;
2967   set_cfun (NULL);
2968 
2969   /* Do not skip analyzing the functions if there were errors, we
2970      miss diagnostics for following functions otherwise.  */
2971 
2972   /* Emit size functions we didn't inline.  */
2973   finalize_size_functions ();
2974 
2975   /* Mark alias targets necessary and emit diagnostics.  */
2976   handle_alias_pairs ();
2977 
2978   if (!quiet_flag)
2979     {
2980       fprintf (stderr, "\nAnalyzing compilation unit\n");
2981       fflush (stderr);
2982     }
2983 
2984   if (flag_dump_passes)
2985     dump_passes ();
2986 
2987   /* Gimplify and lower all functions, compute reachability and
2988      remove unreachable nodes.  */
2989   analyze_functions (/*first_time=*/true);
2990 
2991   /* Mark alias targets necessary and emit diagnostics.  */
2992   handle_alias_pairs ();
2993 
2994   /* Gimplify and lower thunks.  */
2995   analyze_functions (/*first_time=*/false);
2996 
2997   /* Offloading requires LTO infrastructure.  */
2998   if (!in_lto_p && g->have_offload)
2999     flag_generate_offload = 1;
3000 
3001   if (!seen_error ())
3002     {
3003       /* Emit early debug for reachable functions, and by consequence,
3004 	 locally scoped symbols.  */
3005       struct cgraph_node *cnode;
3006       FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
3007 	(*debug_hooks->early_global_decl) (cnode->decl);
3008 
3009       /* Clean up anything that needs cleaning up after initial debug
3010 	 generation.  */
3011       debuginfo_early_start ();
3012       (*debug_hooks->early_finish) (main_input_filename);
3013       debuginfo_early_stop ();
3014     }
3015 
3016   /* Finally drive the pass manager.  */
3017   compile ();
3018 
3019   timevar_pop (TV_CGRAPH);
3020 }
3021 
3022 /* Reset all state within cgraphunit.c so that we can rerun the compiler
3023    within the same process.  For use by toplev::finalize.  */
3024 
3025 void
cgraphunit_c_finalize(void)3026 cgraphunit_c_finalize (void)
3027 {
3028   gcc_assert (cgraph_new_nodes.length () == 0);
3029   cgraph_new_nodes.truncate (0);
3030 
3031   vtable_entry_type = NULL;
3032   queued_nodes = &symtab_terminator;
3033 
3034   first_analyzed = NULL;
3035   first_analyzed_var = NULL;
3036 }
3037 
3038 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
3039    kind of wrapper method.  */
3040 
3041 void
create_wrapper(cgraph_node * target)3042 cgraph_node::create_wrapper (cgraph_node *target)
3043 {
3044   /* Preserve DECL_RESULT so we get right by reference flag.  */
3045   tree decl_result = DECL_RESULT (decl);
3046 
3047   /* Remove the function's body but keep arguments to be reused
3048      for thunk.  */
3049   release_body (true);
3050   reset ();
3051 
3052   DECL_UNINLINABLE (decl) = false;
3053   DECL_RESULT (decl) = decl_result;
3054   DECL_INITIAL (decl) = NULL;
3055   allocate_struct_function (decl, false);
3056   set_cfun (NULL);
3057 
3058   /* Turn alias into thunk and expand it into GIMPLE representation.  */
3059   definition = true;
3060 
3061   memset (&thunk, 0, sizeof (cgraph_thunk_info));
3062   thunk.thunk_p = true;
3063   create_edge (target, NULL, count);
3064   callees->can_throw_external = !TREE_NOTHROW (target->decl);
3065 
3066   tree arguments = DECL_ARGUMENTS (decl);
3067 
3068   while (arguments)
3069     {
3070       TREE_ADDRESSABLE (arguments) = false;
3071       arguments = TREE_CHAIN (arguments);
3072     }
3073 
3074   expand_thunk (false, true);
3075 
3076   /* Inline summary set-up.  */
3077   analyze ();
3078   inline_analyze_function (this);
3079 }
3080 
3081 #include "gt-cgraphunit.h"
3082