1 /* Callgraph transformations to handle inlining
2    Copyright (C) 2003-2020 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* The inline decisions are stored in callgraph in "inline plan" and
22    applied later.
23 
24    To mark given call inline, use inline_call function.
25    The function marks the edge inlinable and, if necessary, produces
26    virtual clone in the callgraph representing the new copy of callee's
27    function body.
28 
29    The inline plan is applied on given function body by inline_transform.  */
30 
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "function.h"
36 #include "tree.h"
37 #include "alloc-pool.h"
38 #include "tree-pass.h"
39 #include "cgraph.h"
40 #include "tree-cfg.h"
41 #include "symbol-summary.h"
42 #include "tree-vrp.h"
43 #include "ipa-prop.h"
44 #include "ipa-fnsummary.h"
45 #include "ipa-inline.h"
46 #include "tree-inline.h"
47 #include "function.h"
48 #include "cfg.h"
49 #include "basic-block.h"
50 #include "ipa-utils.h"
51 
52 int ncalls_inlined;
53 int nfunctions_inlined;
54 
55 /* Scale counts of NODE edges by NUM/DEN.  */
56 
57 static void
update_noncloned_counts(struct cgraph_node * node,profile_count num,profile_count den)58 update_noncloned_counts (struct cgraph_node *node,
59 			 profile_count num, profile_count den)
60 {
61   struct cgraph_edge *e;
62 
63   profile_count::adjust_for_ipa_scaling (&num, &den);
64 
65   for (e = node->callees; e; e = e->next_callee)
66     {
67       if (!e->inline_failed)
68         update_noncloned_counts (e->callee, num, den);
69       e->count = e->count.apply_scale (num, den);
70     }
71   for (e = node->indirect_calls; e; e = e->next_callee)
72     e->count = e->count.apply_scale (num, den);
73   node->count = node->count.apply_scale (num, den);
74 }
75 
76 /* We removed or are going to remove the last call to NODE.
77    Return true if we can and want proactively remove the NODE now.
78    This is important to do, since we want inliner to know when offline
79    copy of function was removed.  */
80 
81 static bool
can_remove_node_now_p_1(struct cgraph_node * node,struct cgraph_edge * e)82 can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
83 {
84   ipa_ref *ref;
85 
86   FOR_EACH_ALIAS (node, ref)
87     {
88       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
89       if ((alias->callers && alias->callers != e)
90           || !can_remove_node_now_p_1 (alias, e))
91 	return false;
92     }
93   /* FIXME: When address is taken of DECL_EXTERNAL function we still
94      can remove its offline copy, but we would need to keep unanalyzed node in
95      the callgraph so references can point to it.
96 
97      Also for comdat group we can ignore references inside a group as we
98      want to prove the group as a whole to be dead.  */
99   return (!node->address_taken
100 	  && node->can_remove_if_no_direct_calls_and_refs_p ()
101 	  /* Inlining might enable more devirtualizing, so we want to remove
102 	     those only after all devirtualizable virtual calls are processed.
103 	     Lacking may edges in callgraph we just preserve them post
104 	     inlining.  */
105 	  && (!DECL_VIRTUAL_P (node->decl)
106 	      || !opt_for_fn (node->decl, flag_devirtualize))
107 	  /* During early inlining some unanalyzed cgraph nodes might be in the
108 	     callgraph and they might refer the function in question.  */
109 	  && !cgraph_new_nodes.exists ());
110 }
111 
112 /* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
113    Verify that the NODE can be removed from unit and if it is contained in comdat
114    group that the whole comdat group is removable.  */
115 
116 static bool
can_remove_node_now_p(struct cgraph_node * node,struct cgraph_edge * e)117 can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
118 {
119   struct cgraph_node *next;
120   if (!can_remove_node_now_p_1 (node, e))
121     return false;
122 
123   /* When we see same comdat group, we need to be sure that all
124      items can be removed.  */
125   if (!node->same_comdat_group || !node->externally_visible)
126     return true;
127   for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
128        next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
129     {
130       if (next->alias)
131 	continue;
132       if ((next->callers && next->callers != e)
133 	  || !can_remove_node_now_p_1 (next, e))
134         return false;
135     }
136   return true;
137 }
138 
139 /* Return true if NODE is a master clone with non-inline clones.  */
140 
141 static bool
master_clone_with_noninline_clones_p(struct cgraph_node * node)142 master_clone_with_noninline_clones_p (struct cgraph_node *node)
143 {
144   if (node->clone_of)
145     return false;
146 
147   for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
148     if (n->decl != node->decl)
149       return true;
150 
151   return false;
152 }
153 
154 /* E is expected to be an edge being inlined.  Clone destination node of
155    the edge and redirect it to the new clone.
156    DUPLICATE is used for bookkeeping on whether we are actually creating new
157    clones or re-using node originally representing out-of-line function call.
158    By default the offline copy is removed, when it appears dead after inlining.
159    UPDATE_ORIGINAL prevents this transformation.
160    If OVERALL_SIZE is non-NULL, the size is updated to reflect the
161    transformation.  */
162 
163 void
clone_inlined_nodes(struct cgraph_edge * e,bool duplicate,bool update_original,int * overall_size)164 clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
165 		     bool update_original, int *overall_size)
166 {
167   struct cgraph_node *inlining_into;
168   struct cgraph_edge *next;
169 
170   if (e->caller->inlined_to)
171     inlining_into = e->caller->inlined_to;
172   else
173     inlining_into = e->caller;
174 
175   if (duplicate)
176     {
177       /* We may eliminate the need for out-of-line copy to be output.
178 	 In that case just go ahead and re-use it.  This is not just an
179 	 memory optimization.  Making offline copy of function disappear
180 	 from the program will improve future decisions on inlining.  */
181       if (!e->callee->callers->next_caller
182 	  /* Recursive inlining never wants the master clone to
183 	     be overwritten.  */
184 	  && update_original
185 	  && can_remove_node_now_p (e->callee, e)
186 	  /* We cannot overwrite a master clone with non-inline clones
187 	     until after these clones are materialized.  */
188 	  && !master_clone_with_noninline_clones_p (e->callee))
189 	{
190 	  /* TODO: When callee is in a comdat group, we could remove all of it,
191 	     including all inline clones inlined into it.  That would however
192 	     need small function inlining to register edge removal hook to
193 	     maintain the priority queue.
194 
195 	     For now we keep the other functions in the group in program until
196 	     cgraph_remove_unreachable_functions gets rid of them.  */
197 	  gcc_assert (!e->callee->inlined_to);
198 	  e->callee->remove_from_same_comdat_group ();
199 	  if (e->callee->definition
200 	      && inline_account_function_p (e->callee))
201 	    {
202 	      gcc_assert (!e->callee->alias);
203 	      if (overall_size)
204 		*overall_size -= ipa_size_summaries->get (e->callee)->size;
205 	      nfunctions_inlined++;
206 	    }
207 	  duplicate = false;
208 	  e->callee->externally_visible = false;
209           update_noncloned_counts (e->callee, e->count, e->callee->count);
210 
211 	  dump_callgraph_transformation (e->callee, inlining_into,
212 					 "inlining to");
213 	}
214       else
215 	{
216 	  struct cgraph_node *n;
217 
218 	  n = e->callee->create_clone (e->callee->decl,
219 				       e->count,
220 				       update_original, vNULL, true,
221 				       inlining_into,
222 				       NULL);
223 	  n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
224 	  e->redirect_callee (n);
225 	}
226     }
227   else
228     e->callee->remove_from_same_comdat_group ();
229 
230   e->callee->inlined_to = inlining_into;
231 
232   /* Recursively clone all bodies.  */
233   for (e = e->callee->callees; e; e = next)
234     {
235       next = e->next_callee;
236       if (!e->inline_failed)
237         clone_inlined_nodes (e, duplicate, update_original, overall_size);
238     }
239 }
240 
241 /* Check all speculations in N and if any seem useless, resolve them.  When a
242    first edge is resolved, pop all edges from NEW_EDGES and insert them to
243    EDGE_SET.  Then remove each resolved edge from EDGE_SET, if it is there.  */
244 
245 static bool
check_speculations_1(cgraph_node * n,vec<cgraph_edge * > * new_edges,hash_set<cgraph_edge * > * edge_set)246 check_speculations_1 (cgraph_node *n, vec<cgraph_edge *> *new_edges,
247 		      hash_set <cgraph_edge *> *edge_set)
248 {
249   bool speculation_removed = false;
250   cgraph_edge *next;
251 
252   for (cgraph_edge *e = n->callees; e; e = next)
253     {
254       next = e->next_callee;
255       if (e->speculative && !speculation_useful_p (e, true))
256 	{
257 	  while (new_edges && !new_edges->is_empty ())
258 	    edge_set->add (new_edges->pop ());
259 	  edge_set->remove (e);
260 
261 	  cgraph_edge::resolve_speculation (e, NULL);
262 	  speculation_removed = true;
263 	}
264       else if (!e->inline_failed)
265 	speculation_removed |= check_speculations_1 (e->callee, new_edges,
266 						     edge_set);
267     }
268   return speculation_removed;
269 }
270 
271 /* Push E to NEW_EDGES.  Called from hash_set traverse method, which
272    unfortunately means this function has to have external linkage, otherwise
273    the code will not compile with gcc 4.8.  */
274 
275 bool
push_all_edges_in_set_to_vec(cgraph_edge * const & e,vec<cgraph_edge * > * new_edges)276 push_all_edges_in_set_to_vec (cgraph_edge * const &e,
277 			      vec<cgraph_edge *> *new_edges)
278 {
279   new_edges->safe_push (e);
280   return true;
281 }
282 
283 /* Check all speculations in N and if any seem useless, resolve them and remove
284    them from NEW_EDGES.  */
285 
286 static bool
check_speculations(cgraph_node * n,vec<cgraph_edge * > * new_edges)287 check_speculations (cgraph_node *n, vec<cgraph_edge *> *new_edges)
288 {
289   hash_set <cgraph_edge *> edge_set;
290   bool res = check_speculations_1 (n, new_edges, &edge_set);
291   if (!edge_set.is_empty ())
292     edge_set.traverse <vec<cgraph_edge *> *,
293 		       push_all_edges_in_set_to_vec> (new_edges);
294   return res;
295 }
296 
297 /* Mark all call graph edges coming out of NODE and all nodes that have been
298    inlined to it as in_polymorphic_cdtor.  */
299 
300 static void
mark_all_inlined_calls_cdtor(cgraph_node * node)301 mark_all_inlined_calls_cdtor (cgraph_node *node)
302 {
303   for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
304     {
305       cs->in_polymorphic_cdtor = true;
306       if (!cs->inline_failed)
307 	mark_all_inlined_calls_cdtor (cs->callee);
308     }
309   for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
310     cs->in_polymorphic_cdtor = true;
311 }
312 
313 
314 /* Mark edge E as inlined and update callgraph accordingly.  UPDATE_ORIGINAL
315    specify whether profile of original function should be updated.  If any new
316    indirect edges are discovered in the process, add them to NEW_EDGES, unless
317    it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
318    size of caller after inlining. Caller is required to eventually do it via
319    ipa_update_overall_fn_summary.
320    If callee_removed is non-NULL, set it to true if we removed callee node.
321 
322    Return true iff any new callgraph edges were discovered as a
323    result of inlining.  */
324 
325 bool
inline_call(struct cgraph_edge * e,bool update_original,vec<cgraph_edge * > * new_edges,int * overall_size,bool update_overall_summary,bool * callee_removed)326 inline_call (struct cgraph_edge *e, bool update_original,
327 	     vec<cgraph_edge *> *new_edges,
328 	     int *overall_size, bool update_overall_summary,
329 	     bool *callee_removed)
330 {
331   int old_size = 0, new_size = 0;
332   struct cgraph_node *to = NULL;
333   struct cgraph_edge *curr = e;
334   bool comdat_local = e->callee->comdat_local_p ();
335   struct cgraph_node *callee = e->callee->ultimate_alias_target ();
336   bool new_edges_found = false;
337 
338   int estimated_growth = 0;
339   if (! update_overall_summary)
340     estimated_growth = estimate_edge_growth (e);
341   /* This is used only for assert bellow.  */
342 #if 0
343   bool predicated = inline_edge_summary (e)->predicate != NULL;
344 #endif
345 
346   /* Don't inline inlined edges.  */
347   gcc_assert (e->inline_failed);
348   /* Don't even think of inlining inline clone.  */
349   gcc_assert (!callee->inlined_to);
350 
351   to = e->caller;
352   if (to->inlined_to)
353     to = to->inlined_to;
354   if (to->thunk.thunk_p)
355     {
356       struct cgraph_node *target = to->callees->callee;
357       thunk_expansion = true;
358       symtab->call_cgraph_removal_hooks (to);
359       if (in_lto_p)
360 	to->get_untransformed_body ();
361       to->expand_thunk (false, true);
362       /* When thunk is instrumented we may have multiple callees.  */
363       for (e = to->callees; e && e->callee != target; e = e->next_callee)
364 	;
365       symtab->call_cgraph_insertion_hooks (to);
366       thunk_expansion = false;
367       gcc_assert (e);
368     }
369 
370 
371   e->inline_failed = CIF_OK;
372   DECL_POSSIBLY_INLINED (callee->decl) = true;
373 
374   if (DECL_FUNCTION_PERSONALITY (callee->decl))
375     DECL_FUNCTION_PERSONALITY (to->decl)
376       = DECL_FUNCTION_PERSONALITY (callee->decl);
377 
378   bool reload_optimization_node = false;
379   if (!opt_for_fn (callee->decl, flag_strict_aliasing)
380       && opt_for_fn (to->decl, flag_strict_aliasing))
381     {
382       struct gcc_options opts = global_options;
383 
384       cl_optimization_restore (&opts, opts_for_fn (to->decl));
385       opts.x_flag_strict_aliasing = false;
386       if (dump_file)
387 	fprintf (dump_file, "Dropping flag_strict_aliasing on %s\n",
388 		 to->dump_name ());
389       DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
390 	 = build_optimization_node (&opts);
391       reload_optimization_node = true;
392     }
393 
394   ipa_fn_summary *caller_info = ipa_fn_summaries->get (to);
395   ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
396   if (!caller_info->fp_expressions && callee_info->fp_expressions)
397     {
398       caller_info->fp_expressions = true;
399       if (opt_for_fn (callee->decl, flag_rounding_math)
400 	  != opt_for_fn (to->decl, flag_rounding_math)
401 	  || opt_for_fn (callee->decl, flag_trapping_math)
402 	     != opt_for_fn (to->decl, flag_trapping_math)
403 	  || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
404 	     != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
405 	  || opt_for_fn (callee->decl, flag_finite_math_only)
406 	     != opt_for_fn (to->decl, flag_finite_math_only)
407 	  || opt_for_fn (callee->decl, flag_signaling_nans)
408 	     != opt_for_fn (to->decl, flag_signaling_nans)
409 	  || opt_for_fn (callee->decl, flag_cx_limited_range)
410 	     != opt_for_fn (to->decl, flag_cx_limited_range)
411 	  || opt_for_fn (callee->decl, flag_signed_zeros)
412 	     != opt_for_fn (to->decl, flag_signed_zeros)
413 	  || opt_for_fn (callee->decl, flag_associative_math)
414 	     != opt_for_fn (to->decl, flag_associative_math)
415 	  || opt_for_fn (callee->decl, flag_reciprocal_math)
416 	     != opt_for_fn (to->decl, flag_reciprocal_math)
417 	  || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
418 	     != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
419 	  || opt_for_fn (callee->decl, flag_errno_math)
420 	     != opt_for_fn (to->decl, flag_errno_math))
421 	{
422 	  struct gcc_options opts = global_options;
423 
424 	  cl_optimization_restore (&opts, opts_for_fn (to->decl));
425 	  opts.x_flag_rounding_math
426 	    = opt_for_fn (callee->decl, flag_rounding_math);
427 	  opts.x_flag_trapping_math
428 	    = opt_for_fn (callee->decl, flag_trapping_math);
429 	  opts.x_flag_unsafe_math_optimizations
430 	    = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
431 	  opts.x_flag_finite_math_only
432 	    = opt_for_fn (callee->decl, flag_finite_math_only);
433 	  opts.x_flag_signaling_nans
434 	    = opt_for_fn (callee->decl, flag_signaling_nans);
435 	  opts.x_flag_cx_limited_range
436 	    = opt_for_fn (callee->decl, flag_cx_limited_range);
437 	  opts.x_flag_signed_zeros
438 	    = opt_for_fn (callee->decl, flag_signed_zeros);
439 	  opts.x_flag_associative_math
440 	    = opt_for_fn (callee->decl, flag_associative_math);
441 	  opts.x_flag_reciprocal_math
442 	    = opt_for_fn (callee->decl, flag_reciprocal_math);
443 	  opts.x_flag_fp_int_builtin_inexact
444 	    = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
445 	  opts.x_flag_errno_math
446 	    = opt_for_fn (callee->decl, flag_errno_math);
447 	  if (dump_file)
448 	    fprintf (dump_file, "Copying FP flags from %s to %s\n",
449 		     callee->dump_name (), to->dump_name ());
450 	  DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
451 	     = build_optimization_node (&opts);
452 	  reload_optimization_node = true;
453 	}
454     }
455 
456   /* Reload global optimization flags.  */
457   if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
458     set_cfun (cfun, true);
459 
460   /* If aliases are involved, redirect edge to the actual destination and
461      possibly remove the aliases.  */
462   if (e->callee != callee)
463     {
464       struct cgraph_node *alias = e->callee, *next_alias;
465       e->redirect_callee (callee);
466       while (alias && alias != callee)
467 	{
468 	  if (!alias->callers
469 	      && can_remove_node_now_p (alias,
470 					!e->next_caller && !e->prev_caller ? e : NULL))
471 	    {
472 	      next_alias = alias->get_alias_target ();
473 	      alias->remove ();
474 	      if (callee_removed)
475 		*callee_removed = true;
476 	      alias = next_alias;
477 	    }
478 	  else
479 	    break;
480 	}
481     }
482 
483   clone_inlined_nodes (e, true, update_original, overall_size);
484 
485   gcc_assert (curr->callee->inlined_to == to);
486 
487   old_size = ipa_size_summaries->get (to)->size;
488   ipa_merge_fn_summary_after_inlining (e);
489   if (e->in_polymorphic_cdtor)
490     mark_all_inlined_calls_cdtor (e->callee);
491   if (opt_for_fn (e->caller->decl, optimize))
492     new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
493   bool removed_p = check_speculations (e->callee, new_edges);
494   if (update_overall_summary)
495     ipa_update_overall_fn_summary (to, new_edges_found || removed_p);
496   else
497     /* Update self size by the estimate so overall function growth limits
498        work for further inlining into this function.  Before inlining
499        the function we inlined to again we expect the caller to update
500        the overall summary.  */
501     ipa_size_summaries->get (to)->size += estimated_growth;
502   new_size = ipa_size_summaries->get (to)->size;
503 
504   if (callee->calls_comdat_local)
505     to->calls_comdat_local = true;
506   else if (to->calls_comdat_local && comdat_local)
507     to->calls_comdat_local = to->check_calls_comdat_local_p ();
508 
509   /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
510      and revisit it after conversion to sreals in GCC 6.
511      See PR 65654.  */
512 #if 0
513   /* Verify that estimated growth match real growth.  Allow off-by-one
514      error due to ipa_fn_summary::size_scale roudoff errors.  */
515   gcc_assert (!update_overall_summary || !overall_size || new_edges_found
516 	      || abs (estimated_growth - (new_size - old_size)) <= 1
517 	      || speculation_removed
518 	      /* FIXME: a hack.  Edges with false predicate are accounted
519 		 wrong, we should remove them from callgraph.  */
520 	      || predicated);
521 #endif
522 
523   /* Account the change of overall unit size; external functions will be
524      removed and are thus not accounted.  */
525   if (overall_size && inline_account_function_p (to))
526     *overall_size += new_size - old_size;
527   ncalls_inlined++;
528 
529   /* This must happen after ipa_merge_fn_summary_after_inlining that rely on jump
530      functions of callee to not be updated.  */
531   return new_edges_found;
532 }
533 
534 /* For each node that was made the holder of function body by
535    save_inline_function_body, this summary contains pointer to the previous
536    holder of the body.  */
537 
538 function_summary <tree *> *ipa_saved_clone_sources;
539 
540 /* Copy function body of NODE and redirect all inline clones to it.
541    This is done before inline plan is applied to NODE when there are
542    still some inline clones if it.
543 
544    This is necessary because inline decisions are not really transitive
545    and the other inline clones may have different bodies.  */
546 
547 static struct cgraph_node *
save_inline_function_body(struct cgraph_node * node)548 save_inline_function_body (struct cgraph_node *node)
549 {
550   struct cgraph_node *first_clone, *n;
551 
552   if (dump_file)
553     fprintf (dump_file, "\nSaving body of %s for later reuse\n",
554 	     node->dump_name ());
555 
556   gcc_assert (node == cgraph_node::get (node->decl));
557 
558   /* first_clone will be turned into real function.  */
559   first_clone = node->clones;
560 
561   /* Arrange first clone to not be thunk as those do not have bodies.  */
562   if (first_clone->thunk.thunk_p)
563     {
564       while (first_clone->thunk.thunk_p)
565         first_clone = first_clone->next_sibling_clone;
566       first_clone->prev_sibling_clone->next_sibling_clone
567 	= first_clone->next_sibling_clone;
568       if (first_clone->next_sibling_clone)
569 	first_clone->next_sibling_clone->prev_sibling_clone
570 	   = first_clone->prev_sibling_clone;
571       first_clone->next_sibling_clone = node->clones;
572       first_clone->prev_sibling_clone = NULL;
573       node->clones->prev_sibling_clone = first_clone;
574       node->clones = first_clone;
575     }
576   first_clone->decl = copy_node (node->decl);
577   first_clone->decl->decl_with_vis.symtab_node = first_clone;
578   gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
579 
580   /* Now reshape the clone tree, so all other clones descends from
581      first_clone.  */
582   if (first_clone->next_sibling_clone)
583     {
584       for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
585 	   n = n->next_sibling_clone)
586         n->clone_of = first_clone;
587       n->clone_of = first_clone;
588       n->next_sibling_clone = first_clone->clones;
589       if (first_clone->clones)
590         first_clone->clones->prev_sibling_clone = n;
591       first_clone->clones = first_clone->next_sibling_clone;
592       first_clone->next_sibling_clone->prev_sibling_clone = NULL;
593       first_clone->next_sibling_clone = NULL;
594       gcc_assert (!first_clone->prev_sibling_clone);
595     }
596 
597   tree prev_body_holder = node->decl;
598   if (!ipa_saved_clone_sources)
599     ipa_saved_clone_sources = new function_summary <tree *> (symtab);
600   else
601     {
602       tree *p = ipa_saved_clone_sources->get (node);
603       if (p)
604 	{
605 	  prev_body_holder = *p;
606 	  gcc_assert (prev_body_holder);
607 	}
608     }
609   *ipa_saved_clone_sources->get_create (first_clone) = prev_body_holder;
610   first_clone->former_clone_of
611     = node->former_clone_of ? node->former_clone_of : node->decl;
612   first_clone->clone_of = NULL;
613 
614   /* Now node in question has no clones.  */
615   node->clones = NULL;
616 
617   /* Inline clones share decl with the function they are cloned
618      from.  Walk the whole clone tree and redirect them all to the
619      new decl.  */
620   if (first_clone->clones)
621     for (n = first_clone->clones; n != first_clone;)
622       {
623         gcc_assert (n->decl == node->decl);
624 	n->decl = first_clone->decl;
625 	if (n->clones)
626 	  n = n->clones;
627 	else if (n->next_sibling_clone)
628 	  n = n->next_sibling_clone;
629 	else
630 	  {
631 	    while (n != first_clone && !n->next_sibling_clone)
632 	      n = n->clone_of;
633 	    if (n != first_clone)
634 	      n = n->next_sibling_clone;
635 	  }
636       }
637 
638   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
639   tree_function_versioning (node->decl, first_clone->decl,
640 			    NULL, NULL, true, NULL, NULL);
641 
642   /* The function will be short lived and removed after we inline all the clones,
643      but make it internal so we won't confuse ourself.  */
644   DECL_EXTERNAL (first_clone->decl) = 0;
645   TREE_PUBLIC (first_clone->decl) = 0;
646   DECL_COMDAT (first_clone->decl) = 0;
647   first_clone->ipa_transforms_to_apply.release ();
648 
649   /* When doing recursive inlining, the clone may become unnecessary.
650      This is possible i.e. in the case when the recursive function is proved to be
651      non-throwing and the recursion happens only in the EH landing pad.
652      We cannot remove the clone until we are done with saving the body.
653      Remove it now.  */
654   if (!first_clone->callers)
655     {
656       first_clone->remove_symbol_and_inline_clones ();
657       first_clone = NULL;
658     }
659   else if (flag_checking)
660     first_clone->verify ();
661 
662   return first_clone;
663 }
664 
665 /* Return true when function body of DECL still needs to be kept around
666    for later re-use.  */
667 static bool
preserve_function_body_p(struct cgraph_node * node)668 preserve_function_body_p (struct cgraph_node *node)
669 {
670   gcc_assert (symtab->global_info_ready);
671   gcc_assert (!node->alias && !node->thunk.thunk_p);
672 
673   /* Look if there is any non-thunk clone around.  */
674   for (node = node->clones; node; node = node->next_sibling_clone)
675     if (!node->thunk.thunk_p)
676       return true;
677   return false;
678 }
679 
680 /* Apply inline plan to function.  */
681 
682 unsigned int
inline_transform(struct cgraph_node * node)683 inline_transform (struct cgraph_node *node)
684 {
685   unsigned int todo = 0;
686   struct cgraph_edge *e, *next;
687   bool has_inline = false;
688 
689   /* FIXME: Currently the pass manager is adding inline transform more than
690      once to some clones.  This needs revisiting after WPA cleanups.  */
691   if (cfun->after_inlining)
692     return 0;
693 
694   /* We might need the body of this function so that we can expand
695      it inline somewhere else.  */
696   if (preserve_function_body_p (node))
697     save_inline_function_body (node);
698 
699   profile_count num = node->count;
700   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
701   bool scale = num.initialized_p () && !(num == den);
702   if (scale)
703     {
704       profile_count::adjust_for_ipa_scaling (&num, &den);
705       if (dump_file)
706 	{
707 	  fprintf (dump_file, "Applying count scale ");
708 	  num.dump (dump_file);
709 	  fprintf (dump_file, "/");
710 	  den.dump (dump_file);
711 	  fprintf (dump_file, "\n");
712 	}
713 
714       basic_block bb;
715       cfun->cfg->count_max = profile_count::uninitialized ();
716       FOR_ALL_BB_FN (bb, cfun)
717 	{
718 	  bb->count = bb->count.apply_scale (num, den);
719 	  cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
720 	}
721       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
722     }
723 
724   for (e = node->callees; e; e = next)
725     {
726       if (!e->inline_failed)
727 	has_inline = true;
728       next = e->next_callee;
729       cgraph_edge::redirect_call_stmt_to_callee (e);
730     }
731   node->remove_all_references ();
732 
733   timevar_push (TV_INTEGRATION);
734   if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
735     {
736       todo = optimize_inline_calls (current_function_decl);
737     }
738   timevar_pop (TV_INTEGRATION);
739 
740   cfun->always_inline_functions_inlined = true;
741   cfun->after_inlining = true;
742   todo |= execute_fixup_cfg ();
743 
744   if (!(todo & TODO_update_ssa_any))
745     /* Redirecting edges might lead to a need for vops to be recomputed.  */
746     todo |= TODO_update_ssa_only_virtuals;
747 
748   return todo;
749 }
750