1 /* Tree inlining.
2    Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "ipa-prop.h"
53 
54 /* I'm not real happy about this, but we need to handle gimple and
55    non-gimple trees.  */
56 #include "tree-gimple.h"
57 
58 /* Inlining, Saving, Cloning
59 
60    Inlining: a function body is duplicated, but the PARM_DECLs are
61    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62    MODIFY_EXPRs that store to a dedicated returned-value variable.
63    The duplicated eh_region info of the copy will later be appended
64    to the info for the caller; the eh_region info in copied throwing
65    statements and RESX_EXPRs is adjusted accordingly.
66 
67    Saving: make a semantically-identical copy of the function body.
68    Necessary when we want to generate code for the body (a destructive
69    operation), but we expect to need this body in the future (e.g. for
70    inlining into another function).
71 
72    Cloning: (only in C++) We have one body for a con/de/structor, and
73    multiple function decls, each with a unique parameter list.
74    Duplicate the body, using the given splay tree; some parameters
75    will become constants (like 0 or 1).
76 
77    All of these will simultaneously lookup any callgraph edges.  If
78    we're going to inline the duplicated function body, and the given
79    function has some cloned callgraph nodes (one for each place this
80    function will be inlined) those callgraph edges will be duplicated.
81    If we're saving or cloning the body, those callgraph edges will be
82    updated to point into the new body.  (Note that the original
83    callgraph node and edge list will not be altered.)
84 
85    See the CALL_EXPR handling case in copy_body_r ().  */
86 
87 /* 0 if we should not perform inlining.
88    1 if we should expand functions calls inline at the tree level.
89    2 if we should consider *all* functions to be inline
90    candidates.  */
91 
92 int flag_inline_trees = 0;
93 
94 /* To Do:
95 
96    o In order to make inlining-on-trees work, we pessimized
97      function-local static constants.  In particular, they are now
98      always output, even when not addressed.  Fix this by treating
99      function-local static constants just like global static
100      constants; the back-end already knows not to output them if they
101      are not needed.
102 
103    o Provide heuristics to clamp inlining of recursive template
104      calls?  */
105 
106 /* Data required for function inlining.  */
107 
108 typedef struct inline_data
109 {
110   /* FUNCTION_DECL for function being inlined.  */
111   tree callee;
112   /* FUNCTION_DECL for function being inlined into.  */
113   tree caller;
114   /* struct function for function being inlined.  Usually this is the same
115      as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116      and saved_eh are in use.  */
117   struct function *callee_cfun;
118   /* The VAR_DECL for the return value.  */
119   tree retvar;
120   /* The map from local declarations in the inlined function to
121      equivalents in the function into which it is being inlined.  */
122   splay_tree decl_map;
123   /* We use the same mechanism to build clones that we do to perform
124      inlining.  However, there are a few places where we need to
125      distinguish between those two situations.  This flag is true if
126      we are cloning, rather than inlining.  */
127   bool cloning_p;
128   /* Similarly for saving function body.  */
129   bool saving_p;
130   /* Versioning function is slightly different from inlining. */
131   bool versioning_p;
132   /* Callgraph node of function we are inlining into.  */
133   struct cgraph_node *node;
134   /* Callgraph node of currently inlined function.  */
135   struct cgraph_node *current_node;
136   /* Current BLOCK.  */
137   tree block;
138   varray_type ipa_info;
139   /* Exception region the inlined call lie in.  */
140   int eh_region;
141   /* Take region number in the function being copied, add this value and
142      get eh region number of the duplicate in the function we inline into.  */
143   int eh_region_offset;
144 } inline_data;
145 
146 /* Prototypes.  */
147 
148 static tree declare_return_variable (inline_data *, tree, tree, tree *);
149 static tree copy_body_r (tree *, int *, void *);
150 static tree copy_generic_body (inline_data *);
151 static bool inlinable_function_p (tree);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_type (tree, inline_data *);
154 static void remap_block (tree *, inline_data *);
155 static tree remap_decl (tree, inline_data *);
156 static tree remap_decls (tree, inline_data *);
157 static void copy_bind_expr (tree *, int *, inline_data *);
158 static tree mark_local_for_remap_r (tree *, int *, void *);
159 static void unsave_expr_1 (tree);
160 static tree unsave_r (tree *, int *, void *);
161 static void declare_inline_vars (tree, tree);
162 static void remap_save_expr (tree *, void *, int *);
163 static bool replace_ref_tree (inline_data *, tree *);
164 static inline bool inlining_p (inline_data *);
165 static void add_lexical_block (tree current_block, tree new_block);
166 
167 /* Insert a tree->tree mapping for ID.  Despite the name suggests
168    that the trees should be variables, it is used for more than that.  */
169 
170 static void
insert_decl_map(inline_data * id,tree key,tree value)171 insert_decl_map (inline_data *id, tree key, tree value)
172 {
173   splay_tree_insert (id->decl_map, (splay_tree_key) key,
174 		     (splay_tree_value) value);
175 
176   /* Always insert an identity map as well.  If we see this same new
177      node again, we won't want to duplicate it a second time.  */
178   if (key != value)
179     splay_tree_insert (id->decl_map, (splay_tree_key) value,
180 		       (splay_tree_value) value);
181 }
182 
183 /* Remap DECL during the copying of the BLOCK tree for the function.  */
184 
185 static tree
remap_decl(tree decl,inline_data * id)186 remap_decl (tree decl, inline_data *id)
187 {
188   splay_tree_node n;
189   tree fn;
190 
191   /* We only remap local variables in the current function.  */
192   fn = id->callee;
193 
194   /* See if we have remapped this declaration.  */
195 
196   n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
197 
198   /* If we didn't already have an equivalent for this declaration,
199      create one now.  */
200   if (!n)
201     {
202       /* Make a copy of the variable or label.  */
203       tree t;
204       t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
205 
206       /* Remember it, so that if we encounter this local entity again
207 	 we can reuse this copy.  Do this early because remap_type may
208 	 need this decl for TYPE_STUB_DECL.  */
209       insert_decl_map (id, decl, t);
210 
211       /* Remap types, if necessary.  */
212       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
213       if (TREE_CODE (t) == TYPE_DECL)
214         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
215 
216       /* Remap sizes as necessary.  */
217       walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
218       walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
219 
220       /* If fields, do likewise for offset and qualifier.  */
221       if (TREE_CODE (t) == FIELD_DECL)
222 	{
223 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
224 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
225 	    walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
226 	}
227 
228 #if 0
229       /* FIXME handle anon aggrs.  */
230       if (! DECL_NAME (t) && TREE_TYPE (t)
231 	  && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
232 	{
233 	  /* For a VAR_DECL of anonymous type, we must also copy the
234 	     member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS.  */
235 	  tree members = NULL;
236 	  tree src;
237 
238 	  for (src = DECL_ANON_UNION_ELEMS (t); src;
239 	       src = TREE_CHAIN (src))
240 	    {
241 	      tree member = remap_decl (TREE_VALUE (src), id);
242 
243 	      gcc_assert (!TREE_PURPOSE (src));
244 	      members = tree_cons (NULL, member, members);
245 	    }
246 	  DECL_ANON_UNION_ELEMS (t) = nreverse (members);
247 	}
248 #endif
249 
250       /* Remember it, so that if we encounter this local entity
251 	 again we can reuse this copy.  */
252       insert_decl_map (id, decl, t);
253       return t;
254     }
255 
256   return unshare_expr ((tree) n->value);
257 }
258 
259 static tree
remap_type_1(tree type,inline_data * id)260 remap_type_1 (tree type, inline_data *id)
261 {
262   tree new, t;
263 
264   /* We do need a copy.  build and register it now.  If this is a pointer or
265      reference type, remap the designated type and make a new pointer or
266      reference type.  */
267   if (TREE_CODE (type) == POINTER_TYPE)
268     {
269       new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
270 					 TYPE_MODE (type),
271 					 TYPE_REF_CAN_ALIAS_ALL (type));
272       insert_decl_map (id, type, new);
273       return new;
274     }
275   else if (TREE_CODE (type) == REFERENCE_TYPE)
276     {
277       new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
278 					    TYPE_MODE (type),
279 					    TYPE_REF_CAN_ALIAS_ALL (type));
280       insert_decl_map (id, type, new);
281       return new;
282     }
283   else
284     new = copy_node (type);
285 
286   insert_decl_map (id, type, new);
287 
288   /* This is a new type, not a copy of an old type.  Need to reassociate
289      variants.  We can handle everything except the main variant lazily.  */
290   t = TYPE_MAIN_VARIANT (type);
291   if (type != t)
292     {
293       t = remap_type (t, id);
294       TYPE_MAIN_VARIANT (new) = t;
295       TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
296       TYPE_NEXT_VARIANT (t) = new;
297     }
298   else
299     {
300       TYPE_MAIN_VARIANT (new) = new;
301       TYPE_NEXT_VARIANT (new) = NULL;
302     }
303 
304   if (TYPE_STUB_DECL (type))
305     TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
306 
307   /* Lazily create pointer and reference types.  */
308   TYPE_POINTER_TO (new) = NULL;
309   TYPE_REFERENCE_TO (new) = NULL;
310 
311   switch (TREE_CODE (new))
312     {
313     case INTEGER_TYPE:
314     case REAL_TYPE:
315     case ENUMERAL_TYPE:
316     case BOOLEAN_TYPE:
317     case CHAR_TYPE:
318       t = TYPE_MIN_VALUE (new);
319       if (t && TREE_CODE (t) != INTEGER_CST)
320         walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
321 
322       t = TYPE_MAX_VALUE (new);
323       if (t && TREE_CODE (t) != INTEGER_CST)
324         walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
325       return new;
326 
327     case FUNCTION_TYPE:
328       TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
329       walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
330       return new;
331 
332     case ARRAY_TYPE:
333       TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
334       TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
335       break;
336 
337     case RECORD_TYPE:
338     case UNION_TYPE:
339     case QUAL_UNION_TYPE:
340       {
341 	tree f, nf = NULL;
342 
343 	for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
344 	  {
345 	    t = remap_decl (f, id);
346 	    DECL_CONTEXT (t) = new;
347 	    TREE_CHAIN (t) = nf;
348 	    nf = t;
349 	  }
350 	TYPE_FIELDS (new) = nreverse (nf);
351       }
352       break;
353 
354     case OFFSET_TYPE:
355     default:
356       /* Shouldn't have been thought variable sized.  */
357       gcc_unreachable ();
358     }
359 
360   walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
361   walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
362 
363   return new;
364 }
365 
366 static tree
remap_type(tree type,inline_data * id)367 remap_type (tree type, inline_data *id)
368 {
369   splay_tree_node node;
370 
371   if (type == NULL)
372     return type;
373 
374   /* See if we have remapped this type.  */
375   node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
376   if (node)
377     return (tree) node->value;
378 
379   /* The type only needs remapping if it's variably modified.  */
380   if (! variably_modified_type_p (type, id->callee))
381     {
382       insert_decl_map (id, type, type);
383       return type;
384     }
385 
386   return remap_type_1 (type, id);
387 }
388 
389 static tree
remap_decls(tree decls,inline_data * id)390 remap_decls (tree decls, inline_data *id)
391 {
392   tree old_var;
393   tree new_decls = NULL_TREE;
394 
395   /* Remap its variables.  */
396   for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
397     {
398       tree new_var;
399 
400       /* We can not chain the local static declarations into the unexpanded_var_list
401          as we can't duplicate them or break one decl rule.  Go ahead and link
402          them into unexpanded_var_list.  */
403       if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
404 	  && !DECL_EXTERNAL (old_var))
405 	{
406 	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
407 						 cfun->unexpanded_var_list);
408 	  continue;
409 	}
410 
411       /* Remap the variable.  */
412       new_var = remap_decl (old_var, id);
413 
414       /* If we didn't remap this variable, so we can't mess with its
415 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
416 	 already declared somewhere else, so don't declare it here.  */
417       if (!new_var || new_var == id->retvar)
418 	;
419       else
420 	{
421 	  gcc_assert (DECL_P (new_var));
422 	  TREE_CHAIN (new_var) = new_decls;
423 	  new_decls = new_var;
424 	}
425     }
426 
427   return nreverse (new_decls);
428 }
429 
430 /* Copy the BLOCK to contain remapped versions of the variables
431    therein.  And hook the new block into the block-tree.  */
432 
433 static void
remap_block(tree * block,inline_data * id)434 remap_block (tree *block, inline_data *id)
435 {
436   tree old_block;
437   tree new_block;
438   tree fn;
439 
440   /* Make the new block.  */
441   old_block = *block;
442   new_block = make_node (BLOCK);
443   TREE_USED (new_block) = TREE_USED (old_block);
444   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
445   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
446   *block = new_block;
447 
448   /* Remap its variables.  */
449   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
450 
451   fn = id->caller;
452   if (id->cloning_p)
453     /* We're building a clone; DECL_INITIAL is still
454        error_mark_node, and current_binding_level is the parm
455        binding level.  */
456     lang_hooks.decls.insert_block (new_block);
457   /* Remember the remapped block.  */
458   insert_decl_map (id, old_block, new_block);
459 }
460 
461 /* Copy the whole block tree and root it in id->block.  */
462 static tree
remap_blocks(tree block,inline_data * id)463 remap_blocks (tree block, inline_data *id)
464 {
465   tree t;
466   tree new = block;
467 
468   if (!block)
469     return NULL;
470 
471   remap_block (&new, id);
472   gcc_assert (new != block);
473   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
474     add_lexical_block (new, remap_blocks (t, id));
475   return new;
476 }
477 
478 static void
copy_statement_list(tree * tp)479 copy_statement_list (tree *tp)
480 {
481   tree_stmt_iterator oi, ni;
482   tree new;
483 
484   new = alloc_stmt_list ();
485   ni = tsi_start (new);
486   oi = tsi_start (*tp);
487   *tp = new;
488 
489   for (; !tsi_end_p (oi); tsi_next (&oi))
490     tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
491 }
492 
493 static void
copy_bind_expr(tree * tp,int * walk_subtrees,inline_data * id)494 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
495 {
496   tree block = BIND_EXPR_BLOCK (*tp);
497   /* Copy (and replace) the statement.  */
498   copy_tree_r (tp, walk_subtrees, NULL);
499   if (block)
500     {
501       remap_block (&block, id);
502       BIND_EXPR_BLOCK (*tp) = block;
503     }
504 
505   if (BIND_EXPR_VARS (*tp))
506     /* This will remap a lot of the same decls again, but this should be
507        harmless.  */
508     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
509 }
510 
511 /* Called from copy_body_id via walk_tree.  DATA is really an
512    `inline_data *'.  */
513 
514 static tree
copy_body_r(tree * tp,int * walk_subtrees,void * data)515 copy_body_r (tree *tp, int *walk_subtrees, void *data)
516 {
517   inline_data *id = (inline_data *) data;
518   tree fn = id->callee;
519   tree new_block;
520 
521   /* Begin by recognizing trees that we'll completely rewrite for the
522      inlining context.  Our output for these trees is completely
523      different from out input (e.g. RETURN_EXPR is deleted, and morphs
524      into an edge).  Further down, we'll handle trees that get
525      duplicated and/or tweaked.  */
526 
527   /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
528      GOTO_STMT with the RET_LABEL as its target.  */
529   if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
530     {
531       tree assignment = TREE_OPERAND (*tp, 0);
532 
533       /* If we're returning something, just turn that into an
534 	 assignment into the equivalent of the original RESULT_DECL.
535 	 If the "assignment" is just the result decl, the result
536 	 decl has already been set (e.g. a recent "foo (&result_decl,
537 	 ...)"); just toss the entire RETURN_EXPR.  */
538       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
539 	{
540 	  /* Replace the RETURN_EXPR with (a copy of) the
541 	     MODIFY_EXPR hanging underneath.  */
542 	  *tp = copy_node (assignment);
543 	}
544       else /* Else the RETURN_EXPR returns no value.  */
545 	{
546 	  *tp = NULL;
547 	  return (void *)1;
548 	}
549     }
550 
551   /* Local variables and labels need to be replaced by equivalent
552      variables.  We don't want to copy static variables; there's only
553      one of those, no matter how many times we inline the containing
554      function.  Similarly for globals from an outer function.  */
555   else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
556     {
557       tree new_decl;
558 
559       /* Remap the declaration.  */
560       new_decl = remap_decl (*tp, id);
561       gcc_assert (new_decl);
562       /* Replace this variable with the copy.  */
563       STRIP_TYPE_NOPS (new_decl);
564       *tp = new_decl;
565       *walk_subtrees = 0;
566     }
567   else if (TREE_CODE (*tp) == STATEMENT_LIST)
568     copy_statement_list (tp);
569   else if (TREE_CODE (*tp) == SAVE_EXPR)
570     remap_save_expr (tp, id->decl_map, walk_subtrees);
571   else if (TREE_CODE (*tp) == LABEL_DECL
572 	   && (! DECL_CONTEXT (*tp)
573 	       || decl_function_context (*tp) == id->callee))
574     /* These may need to be remapped for EH handling.  */
575     *tp = remap_decl (*tp, id);
576   else if (TREE_CODE (*tp) == BIND_EXPR)
577     copy_bind_expr (tp, walk_subtrees, id);
578   /* Types may need remapping as well.  */
579   else if (TYPE_P (*tp))
580     *tp = remap_type (*tp, id);
581 
582   /* If this is a constant, we have to copy the node iff the type will be
583      remapped.  copy_tree_r will not copy a constant.  */
584   else if (CONSTANT_CLASS_P (*tp))
585     {
586       tree new_type = remap_type (TREE_TYPE (*tp), id);
587 
588       if (new_type == TREE_TYPE (*tp))
589 	*walk_subtrees = 0;
590 
591       else if (TREE_CODE (*tp) == INTEGER_CST)
592 	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
593 				  TREE_INT_CST_HIGH (*tp));
594       else
595 	{
596 	  *tp = copy_node (*tp);
597 	  TREE_TYPE (*tp) = new_type;
598 	}
599     }
600 
601   /* Otherwise, just copy the node.  Note that copy_tree_r already
602      knows not to copy VAR_DECLs, etc., so this is safe.  */
603   else
604     {
605       /* Here we handle trees that are not completely rewritten.
606 	 First we detect some inlining-induced bogosities for
607 	 discarding.  */
608       if (TREE_CODE (*tp) == MODIFY_EXPR
609 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
610 	  && (lang_hooks.tree_inlining.auto_var_in_fn_p
611 	      (TREE_OPERAND (*tp, 0), fn)))
612 	{
613 	  /* Some assignments VAR = VAR; don't generate any rtl code
614 	     and thus don't count as variable modification.  Avoid
615 	     keeping bogosities like 0 = 0.  */
616 	  tree decl = TREE_OPERAND (*tp, 0), value;
617 	  splay_tree_node n;
618 
619 	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
620 	  if (n)
621 	    {
622 	      value = (tree) n->value;
623 	      STRIP_TYPE_NOPS (value);
624 	      if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
625 		{
626 		  *tp = build_empty_stmt ();
627 		  return copy_body_r (tp, walk_subtrees, data);
628 		}
629 	    }
630 	}
631       else if (TREE_CODE (*tp) == INDIRECT_REF
632 	       && !id->versioning_p)
633 	{
634 	  /* Get rid of *& from inline substitutions that can happen when a
635 	     pointer argument is an ADDR_EXPR.  */
636 	  tree decl = TREE_OPERAND (*tp, 0);
637 	  splay_tree_node n;
638 
639 	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
640 	  if (n)
641 	    {
642 	      tree new;
643 	      tree old;
644 	      /* If we happen to get an ADDR_EXPR in n->value, strip
645 	         it manually here as we'll eventually get ADDR_EXPRs
646 		 which lie about their types pointed to.  In this case
647 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
648 		 but we absolutely rely on that.  As fold_indirect_ref
649 	         does other useful transformations, try that first, though.  */
650 	      tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
651 	      new = unshare_expr ((tree)n->value);
652 	      old = *tp;
653 	      *tp = fold_indirect_ref_1 (type, new);
654 	      if (! *tp)
655 	        {
656 		  if (TREE_CODE (new) == ADDR_EXPR)
657 		    *tp = TREE_OPERAND (new, 0);
658 	          else
659 		    {
660 	              *tp = build1 (INDIRECT_REF, type, new);
661 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
662 		    }
663 		}
664 	      *walk_subtrees = 0;
665 	      return NULL;
666 	    }
667 	}
668 
669       /* Here is the "usual case".  Copy this tree node, and then
670 	 tweak some special cases.  */
671       copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
672 
673       /* If EXPR has block defined, map it to newly constructed block.
674          When inlining we want EXPRs without block appear in the block
675 	 of function call.  */
676       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
677 	{
678 	  new_block = id->block;
679 	  if (TREE_BLOCK (*tp))
680 	    {
681 	      splay_tree_node n;
682 	      n = splay_tree_lookup (id->decl_map,
683 				     (splay_tree_key) TREE_BLOCK (*tp));
684 	      gcc_assert (n);
685 	      new_block = (tree) n->value;
686 	    }
687 	  TREE_BLOCK (*tp) = new_block;
688 	}
689 
690       if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
691 	TREE_OPERAND (*tp, 0) =
692 	  build_int_cst
693 	    (NULL_TREE,
694 	     id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
695 
696       TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
697 
698       /* The copied TARGET_EXPR has never been expanded, even if the
699 	 original node was expanded already.  */
700       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
701 	{
702 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
703 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
704 	}
705 
706       /* Variable substitution need not be simple.  In particular, the
707 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
708 	 and friends are up-to-date.  */
709       else if (TREE_CODE (*tp) == ADDR_EXPR)
710 	{
711 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
712 	  /* Handle the case where we substituted an INDIRECT_REF
713 	     into the operand of the ADDR_EXPR.  */
714 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
715 	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
716 	  else
717 	    recompute_tree_invarant_for_addr_expr (*tp);
718 	  *walk_subtrees = 0;
719 	}
720     }
721 
722   /* Keep iterating.  */
723   return NULL_TREE;
724 }
725 
726 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
727    later  */
728 
729 static basic_block
copy_bb(inline_data * id,basic_block bb,int frequency_scale,int count_scale)730 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
731 {
732   block_stmt_iterator bsi, copy_bsi;
733   basic_block copy_basic_block;
734 
735   /* create_basic_block() will append every new block to
736      basic_block_info automatically.  */
737   copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
738   copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
739   copy_basic_block->frequency = (bb->frequency
740 				     * frequency_scale / REG_BR_PROB_BASE);
741   copy_bsi = bsi_start (copy_basic_block);
742 
743   for (bsi = bsi_start (bb);
744        !bsi_end_p (bsi); bsi_next (&bsi))
745     {
746       tree stmt = bsi_stmt (bsi);
747       tree orig_stmt = stmt;
748 
749       walk_tree (&stmt, copy_body_r, id, NULL);
750 
751       /* RETURN_EXPR might be removed,
752          this is signalled by making stmt pointer NULL.  */
753       if (stmt)
754 	{
755 	  tree call, decl;
756 
757 	  /* With return slot optimization we can end up with
758 	     non-gimple (foo *)&this->m, fix that here.  */
759 	  if (TREE_CODE (stmt) == MODIFY_EXPR
760 	      && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
761 	      && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
762 	    gimplify_stmt (&stmt);
763 
764           bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
765 	  call = get_call_expr_in (stmt);
766 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
767 	     callgraph edges and update or duplicate them.  */
768 	  if (call && (decl = get_callee_fndecl (call)))
769 	    {
770 	      if (id->saving_p)
771 		{
772 		  struct cgraph_node *node;
773 		  struct cgraph_edge *edge;
774 
775 		  /* We're saving a copy of the body, so we'll update the
776 		     callgraph nodes in place.  Note that we avoid
777 		     altering the original callgraph node; we begin with
778 		     the first clone.  */
779 		  for (node = id->node->next_clone;
780 		       node;
781 		       node = node->next_clone)
782 		    {
783 		      edge = cgraph_edge (node, orig_stmt);
784 		      gcc_assert (edge);
785 		      edge->call_stmt = stmt;
786 		    }
787 		}
788 	      else
789 		{
790 		  struct cgraph_edge *edge;
791 
792 		  /* We're cloning or inlining this body; duplicate the
793 		     associate callgraph nodes.  */
794 		  if (!id->versioning_p)
795 		    {
796 		      edge = cgraph_edge (id->current_node, orig_stmt);
797 		      if (edge)
798 			cgraph_clone_edge (edge, id->node, stmt,
799 					   REG_BR_PROB_BASE, 1, true);
800 		    }
801 		}
802 	      if (id->versioning_p)
803 		{
804 		  /* Update the call_expr on the edges from the new version
805 		     to its callees. */
806 		  struct cgraph_edge *edge;
807 		  edge = cgraph_edge (id->node, orig_stmt);
808 		  if (edge)
809 		    edge->call_stmt = stmt;
810 		}
811 	    }
812 	  /* If you think we can abort here, you are wrong.
813 	     There is no region 0 in tree land.  */
814 	  gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
815 		      != 0);
816 
817 	  if (tree_could_throw_p (stmt))
818 	    {
819 	      int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
820 	      /* Add an entry for the copied tree in the EH hashtable.
821 		 When saving or cloning or versioning, use the hashtable in
822 		 cfun, and just copy the EH number.  When inlining, use the
823 		 hashtable in the caller, and adjust the region number.  */
824 	      if (region > 0)
825 		add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
826 
827 	      /* If this tree doesn't have a region associated with it,
828 		 and there is a "current region,"
829 		 then associate this tree with the current region
830 		 and add edges associated with this region.  */
831 	      if ((lookup_stmt_eh_region_fn (id->callee_cfun,
832 					     orig_stmt) <= 0
833 		   && id->eh_region > 0)
834 		  && tree_could_throw_p (stmt))
835 		add_stmt_to_eh_region (stmt, id->eh_region);
836 	    }
837 	}
838     }
839   return copy_basic_block;
840 }
841 
842 /* Copy edges from BB into its copy constructed earlier, scale profile
843    accordingly.  Edges will be taken care of later.  Assume aux
844    pointers to point to the copies of each BB.  */
845 static void
copy_edges_for_bb(basic_block bb,int count_scale)846 copy_edges_for_bb (basic_block bb, int count_scale)
847 {
848   basic_block new_bb = bb->aux;
849   edge_iterator ei;
850   edge old_edge;
851   block_stmt_iterator bsi;
852   int flags;
853 
854   /* Use the indices from the original blocks to create edges for the
855      new ones.  */
856   FOR_EACH_EDGE (old_edge, ei, bb->succs)
857     if (!(old_edge->flags & EDGE_EH))
858       {
859 	edge new;
860 
861 	flags = old_edge->flags;
862 
863 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
864 	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
865 	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
866 	  flags |= EDGE_FALLTHRU;
867 	new = make_edge (new_bb, old_edge->dest->aux, flags);
868 	new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
869 	new->probability = old_edge->probability;
870       }
871 
872   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
873     return;
874 
875   for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
876     {
877       tree copy_stmt;
878 
879       copy_stmt = bsi_stmt (bsi);
880       update_stmt (copy_stmt);
881       /* Do this before the possible split_block.  */
882       bsi_next (&bsi);
883 
884       /* If this tree could throw an exception, there are two
885          cases where we need to add abnormal edge(s): the
886          tree wasn't in a region and there is a "current
887          region" in the caller; or the original tree had
888          EH edges.  In both cases split the block after the tree,
889          and add abnormal edge(s) as needed; we need both
890          those from the callee and the caller.
891          We check whether the copy can throw, because the const
892          propagation can change an INDIRECT_REF which throws
893          into a COMPONENT_REF which doesn't.  If the copy
894          can throw, the original could also throw.  */
895 
896       if (tree_can_throw_internal (copy_stmt))
897 	{
898 	  if (!bsi_end_p (bsi))
899 	    /* Note that bb's predecessor edges aren't necessarily
900 	       right at this point; split_block doesn't care.  */
901 	    {
902 	      edge e = split_block (new_bb, copy_stmt);
903 	      new_bb = e->dest;
904 	      bsi = bsi_start (new_bb);
905 	    }
906 
907            make_eh_edges (copy_stmt);
908 	}
909     }
910 }
911 
912 /* Wrapper for remap_decl so it can be used as a callback.  */
913 static tree
remap_decl_1(tree decl,void * data)914 remap_decl_1 (tree decl, void *data)
915 {
916   return remap_decl (decl, data);
917 }
918 
919 /* Make a copy of the body of FN so that it can be inserted inline in
920    another function.  Walks FN via CFG, returns new fndecl.  */
921 
922 static tree
copy_cfg_body(inline_data * id,gcov_type count,int frequency,basic_block entry_block_map,basic_block exit_block_map)923 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
924 	       basic_block entry_block_map, basic_block exit_block_map)
925 {
926   tree callee_fndecl = id->callee;
927   /* Original cfun for the callee, doesn't change.  */
928   struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
929   /* Copy, built by this function.  */
930   struct function *new_cfun;
931   /* Place to copy from; when a copy of the function was saved off earlier,
932      use that instead of the main copy.  */
933   struct function *cfun_to_copy =
934     (struct function *) ggc_alloc_cleared (sizeof (struct function));
935   basic_block bb;
936   tree new_fndecl = NULL;
937   bool saving_or_cloning;
938   int count_scale, frequency_scale;
939 
940   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
941     count_scale = (REG_BR_PROB_BASE * count
942 		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
943   else
944     count_scale = 1;
945 
946   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
947     frequency_scale = (REG_BR_PROB_BASE * frequency
948 		       /
949 		       ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
950   else
951     frequency_scale = count_scale;
952 
953   /* Register specific tree functions.  */
954   tree_register_cfg_hooks ();
955 
956   /* Must have a CFG here at this point.  */
957   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
958 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
959 
960   *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
961 
962   /* If there is a saved_cfg+saved_args lurking in the
963      struct function, a copy of the callee body was saved there, and
964      the 'struct cgraph edge' nodes have been fudged to point into the
965      saved body.  Accordingly, we want to copy that saved body so the
966      callgraph edges will be recognized and cloned properly.  */
967   if (cfun_to_copy->saved_cfg)
968     {
969       cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
970       cfun_to_copy->eh = cfun_to_copy->saved_eh;
971     }
972   id->callee_cfun = cfun_to_copy;
973 
974   /* If saving or cloning a function body, create new basic_block_info
975      and label_to_block_maps.  Otherwise, we're duplicating a function
976      body for inlining; insert our new blocks and labels into the
977      existing varrays.  */
978   saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
979   if (saving_or_cloning)
980     {
981       new_cfun =
982 	(struct function *) ggc_alloc_cleared (sizeof (struct function));
983       *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
984       new_cfun->cfg = NULL;
985       new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
986       new_cfun->ib_boundaries_block = (varray_type) 0;
987       DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
988       push_cfun (new_cfun);
989       init_empty_tree_cfg ();
990 
991       ENTRY_BLOCK_PTR->count =
992 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
993 	 REG_BR_PROB_BASE);
994       ENTRY_BLOCK_PTR->frequency =
995 	(ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
996 	 frequency_scale / REG_BR_PROB_BASE);
997       EXIT_BLOCK_PTR->count =
998 	(EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
999 	 REG_BR_PROB_BASE);
1000       EXIT_BLOCK_PTR->frequency =
1001 	(EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
1002 	 frequency_scale / REG_BR_PROB_BASE);
1003 
1004       entry_block_map = ENTRY_BLOCK_PTR;
1005       exit_block_map = EXIT_BLOCK_PTR;
1006     }
1007 
1008   ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1009   EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1010 
1011 
1012   /* Duplicate any exception-handling regions.  */
1013   if (cfun->eh)
1014     {
1015       if (saving_or_cloning)
1016         init_eh_for_function ();
1017       id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
1018 		     				   remap_decl_1,
1019 						   id, id->eh_region);
1020       gcc_assert (inlining_p (id) || !id->eh_region_offset);
1021     }
1022   /* Use aux pointers to map the original blocks to copy.  */
1023   FOR_EACH_BB_FN (bb, cfun_to_copy)
1024     bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
1025   /* Now that we've duplicated the blocks, duplicate their edges.  */
1026   FOR_ALL_BB_FN (bb, cfun_to_copy)
1027     copy_edges_for_bb (bb, count_scale);
1028   FOR_ALL_BB_FN (bb, cfun_to_copy)
1029     bb->aux = NULL;
1030 
1031   if (saving_or_cloning)
1032     pop_cfun ();
1033 
1034   return new_fndecl;
1035 }
1036 
1037 /* Make a copy of the body of FN so that it can be inserted inline in
1038    another function.  */
1039 
1040 static tree
copy_generic_body(inline_data * id)1041 copy_generic_body (inline_data *id)
1042 {
1043   tree body;
1044   tree fndecl = id->callee;
1045 
1046   body = DECL_SAVED_TREE (fndecl);
1047   walk_tree (&body, copy_body_r, id, NULL);
1048 
1049   return body;
1050 }
1051 
1052 static tree
copy_body(inline_data * id,gcov_type count,int frequency,basic_block entry_block_map,basic_block exit_block_map)1053 copy_body (inline_data *id, gcov_type count, int frequency,
1054 	   basic_block entry_block_map, basic_block exit_block_map)
1055 {
1056   tree fndecl = id->callee;
1057   tree body;
1058 
1059   /* If this body has a CFG, walk CFG and copy.  */
1060   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1061   body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1062 
1063   return body;
1064 }
1065 
1066 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1067    defined in function FN, or of a data member thereof.  */
1068 
1069 static bool
self_inlining_addr_expr(tree value,tree fn)1070 self_inlining_addr_expr (tree value, tree fn)
1071 {
1072   tree var;
1073 
1074   if (TREE_CODE (value) != ADDR_EXPR)
1075     return false;
1076 
1077   var = get_base_address (TREE_OPERAND (value, 0));
1078 
1079   return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1080 }
1081 
1082 static void
setup_one_parameter(inline_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)1083 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1084 		     basic_block bb, tree *vars)
1085 {
1086   tree init_stmt;
1087   tree var;
1088   tree var_sub;
1089 
1090   /* If the parameter is never assigned to, we may not need to
1091      create a new variable here at all.  Instead, we may be able
1092      to just use the argument value.  */
1093   if (TREE_READONLY (p)
1094       && !TREE_ADDRESSABLE (p)
1095       && value && !TREE_SIDE_EFFECTS (value))
1096     {
1097       /* We may produce non-gimple trees by adding NOPs or introduce
1098 	 invalid sharing when operand is not really constant.
1099 	 It is not big deal to prohibit constant propagation here as
1100 	 we will constant propagate in DOM1 pass anyway.  */
1101       if (is_gimple_min_invariant (value)
1102 	  && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1103 	  /* We have to be very careful about ADDR_EXPR.  Make sure
1104 	     the base variable isn't a local variable of the inlined
1105 	     function, e.g., when doing recursive inlining, direct or
1106 	     mutually-recursive or whatever, which is why we don't
1107 	     just test whether fn == current_function_decl.  */
1108 	  && ! self_inlining_addr_expr (value, fn))
1109 	{
1110 	  insert_decl_map (id, p, value);
1111 	  return;
1112 	}
1113     }
1114 
1115   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
1116      here since the type of this decl must be visible to the calling
1117      function.  */
1118   var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
1119 
1120   /* See if the frontend wants to pass this by invisible reference.  If
1121      so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1122      replace uses of the PARM_DECL with dereferences.  */
1123   if (TREE_TYPE (var) != TREE_TYPE (p)
1124       && POINTER_TYPE_P (TREE_TYPE (var))
1125       && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1126     {
1127       insert_decl_map (id, var, var);
1128       var_sub = build_fold_indirect_ref (var);
1129     }
1130   else
1131     var_sub = var;
1132 
1133   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1134      that way, when the PARM_DECL is encountered, it will be
1135      automatically replaced by the VAR_DECL.  */
1136   insert_decl_map (id, p, var_sub);
1137 
1138   /* Declare this new variable.  */
1139   TREE_CHAIN (var) = *vars;
1140   *vars = var;
1141 
1142   /* Make gimplifier happy about this variable.  */
1143   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1144 
1145   /* Even if P was TREE_READONLY, the new VAR should not be.
1146      In the original code, we would have constructed a
1147      temporary, and then the function body would have never
1148      changed the value of P.  However, now, we will be
1149      constructing VAR directly.  The constructor body may
1150      change its value multiple times as it is being
1151      constructed.  Therefore, it must not be TREE_READONLY;
1152      the back-end assumes that TREE_READONLY variable is
1153      assigned to only once.  */
1154   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1155     TREE_READONLY (var) = 0;
1156 
1157   /* Initialize this VAR_DECL from the equivalent argument.  Convert
1158      the argument to the proper type in case it was promoted.  */
1159   if (value)
1160     {
1161       tree rhs = fold_convert (TREE_TYPE (var), value);
1162       block_stmt_iterator bsi = bsi_last (bb);
1163 
1164       if (rhs == error_mark_node)
1165 	return;
1166 
1167       /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1168 	 keep our trees in gimple form.  */
1169       init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1170 
1171       /* If we did not create a gimple value and we did not create a gimple
1172 	 cast of a gimple value, then we will need to gimplify INIT_STMTS
1173 	 at the end.  Note that is_gimple_cast only checks the outer
1174 	 tree code, not its operand.  Thus the explicit check that its
1175 	 operand is a gimple value.  */
1176       if (!is_gimple_val (rhs)
1177 	  && (!is_gimple_cast (rhs)
1178 	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1179 	gimplify_stmt (&init_stmt);
1180 
1181       /* If VAR represents a zero-sized variable, it's possible that the
1182 	 assignment statment may result in no gimple statements.  */
1183       if (init_stmt)
1184         bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1185     }
1186 }
1187 
1188 /* Generate code to initialize the parameters of the function at the
1189    top of the stack in ID from the ARGS (presented as a TREE_LIST).  */
1190 
1191 static void
initialize_inlined_parameters(inline_data * id,tree args,tree static_chain,tree fn,basic_block bb)1192 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1193 			       tree fn, basic_block bb)
1194 {
1195   tree parms;
1196   tree a;
1197   tree p;
1198   tree vars = NULL_TREE;
1199   int argnum = 0;
1200 
1201   /* Figure out what the parameters are.  */
1202   parms = DECL_ARGUMENTS (fn);
1203   if (fn == current_function_decl)
1204     parms = cfun->saved_args;
1205 
1206   /* Loop through the parameter declarations, replacing each with an
1207      equivalent VAR_DECL, appropriately initialized.  */
1208   for (p = parms, a = args; p;
1209        a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1210     {
1211       tree value;
1212 
1213       ++argnum;
1214 
1215       /* Find the initializer.  */
1216       value = lang_hooks.tree_inlining.convert_parm_for_inlining
1217 	      (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1218 
1219       setup_one_parameter (id, p, value, fn, bb, &vars);
1220     }
1221 
1222   /* Initialize the static chain.  */
1223   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1224   if (fn == current_function_decl)
1225     p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1226   if (p)
1227     {
1228       /* No static chain?  Seems like a bug in tree-nested.c.  */
1229       gcc_assert (static_chain);
1230 
1231       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1232     }
1233 
1234   declare_inline_vars (id->block, vars);
1235 }
1236 
1237 /* Declare a return variable to replace the RESULT_DECL for the
1238    function we are calling.  An appropriate DECL_STMT is returned.
1239    The USE_STMT is filled to contain a use of the declaration to
1240    indicate the return value of the function.
1241 
1242    RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1243    took the address of the result.  MODIFY_DEST, if non-null, was the LHS of
1244    the MODIFY_EXPR to which this call is the RHS.
1245 
1246    The return value is a (possibly null) value that is the result of the
1247    function as seen by the callee.  *USE_P is a (possibly null) value that
1248    holds the result as seen by the caller.  */
1249 
1250 static tree
declare_return_variable(inline_data * id,tree return_slot_addr,tree modify_dest,tree * use_p)1251 declare_return_variable (inline_data *id, tree return_slot_addr,
1252 			 tree modify_dest, tree *use_p)
1253 {
1254   tree callee = id->callee;
1255   tree caller = id->caller;
1256   tree result = DECL_RESULT (callee);
1257   tree callee_type = TREE_TYPE (result);
1258   tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1259   tree var, use;
1260 
1261   /* We don't need to do anything for functions that don't return
1262      anything.  */
1263   if (!result || VOID_TYPE_P (callee_type))
1264     {
1265       *use_p = NULL_TREE;
1266       return NULL_TREE;
1267     }
1268 
1269   /* If there was a return slot, then the return value is the
1270      dereferenced address of that object.  */
1271   if (return_slot_addr)
1272     {
1273       /* The front end shouldn't have used both return_slot_addr and
1274 	 a modify expression.  */
1275       gcc_assert (!modify_dest);
1276       if (DECL_BY_REFERENCE (result))
1277 	var = return_slot_addr;
1278       else
1279 	var = build_fold_indirect_ref (return_slot_addr);
1280       if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1281 	  && !DECL_COMPLEX_GIMPLE_REG_P (result)
1282 	  && DECL_P (var))
1283 	DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
1284       use = NULL;
1285       goto done;
1286     }
1287 
1288   /* All types requiring non-trivial constructors should have been handled.  */
1289   gcc_assert (!TREE_ADDRESSABLE (callee_type));
1290 
1291   /* Attempt to avoid creating a new temporary variable.  */
1292   if (modify_dest)
1293     {
1294       bool use_it = false;
1295 
1296       /* We can't use MODIFY_DEST if there's type promotion involved.  */
1297       if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1298 	use_it = false;
1299 
1300       /* ??? If we're assigning to a variable sized type, then we must
1301 	 reuse the destination variable, because we've no good way to
1302 	 create variable sized temporaries at this point.  */
1303       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1304 	use_it = true;
1305 
1306       /* If the callee cannot possibly modify MODIFY_DEST, then we can
1307 	 reuse it as the result of the call directly.  Don't do this if
1308 	 it would promote MODIFY_DEST to addressable.  */
1309       else if (TREE_ADDRESSABLE (result))
1310 	use_it = false;
1311       else
1312 	{
1313 	  tree base_m = get_base_address (modify_dest);
1314 
1315 	  /* If the base isn't a decl, then it's a pointer, and we don't
1316 	     know where that's going to go.  */
1317 	  if (!DECL_P (base_m))
1318 	    use_it = false;
1319 	  else if (is_global_var (base_m))
1320 	    use_it = false;
1321 	  else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1322 		   && !DECL_COMPLEX_GIMPLE_REG_P (result)
1323 		   && DECL_COMPLEX_GIMPLE_REG_P (base_m))
1324 	    use_it = false;
1325 	  else if (!TREE_ADDRESSABLE (base_m))
1326 	    use_it = true;
1327 	}
1328 
1329       if (use_it)
1330 	{
1331 	  var = modify_dest;
1332 	  use = NULL;
1333 	  goto done;
1334 	}
1335     }
1336 
1337   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1338 
1339   var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
1340 
1341   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1342   DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1343     = tree_cons (NULL_TREE, var,
1344 		 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1345 
1346   /* Do not have the rest of GCC warn about this variable as it should
1347      not be visible to the user.  */
1348   TREE_NO_WARNING (var) = 1;
1349 
1350   /* Build the use expr.  If the return type of the function was
1351      promoted, convert it back to the expected type.  */
1352   use = var;
1353   if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1354     use = fold_convert (caller_type, var);
1355 
1356  done:
1357   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1358      way, when the RESULT_DECL is encountered, it will be
1359      automatically replaced by the VAR_DECL.  */
1360   insert_decl_map (id, result, var);
1361 
1362   /* Remember this so we can ignore it in remap_decls.  */
1363   id->retvar = var;
1364 
1365   *use_p = use;
1366   return var;
1367 }
1368 
1369 /* Returns nonzero if a function can be inlined as a tree.  */
1370 
1371 bool
tree_inlinable_function_p(tree fn)1372 tree_inlinable_function_p (tree fn)
1373 {
1374   return inlinable_function_p (fn);
1375 }
1376 
1377 static const char *inline_forbidden_reason;
1378 
1379 static tree
inline_forbidden_p_1(tree * nodep,int * walk_subtrees ATTRIBUTE_UNUSED,void * fnp)1380 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1381 		      void *fnp)
1382 {
1383   tree node = *nodep;
1384   tree fn = (tree) fnp;
1385   tree t;
1386 
1387   switch (TREE_CODE (node))
1388     {
1389     case CALL_EXPR:
1390       /* Refuse to inline alloca call unless user explicitly forced so as
1391 	 this may change program's memory overhead drastically when the
1392 	 function using alloca is called in loop.  In GCC present in
1393 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
1394 	 RAM instead of 256MB.  */
1395       if (alloca_call_p (node)
1396 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1397 	{
1398 	  inline_forbidden_reason
1399 	    = G_("function %q+F can never be inlined because it uses "
1400 		 "alloca (override using the always_inline attribute)");
1401 	  return node;
1402 	}
1403       t = get_callee_fndecl (node);
1404       if (! t)
1405 	break;
1406 
1407       /* We cannot inline functions that call setjmp.  */
1408       if (setjmp_call_p (t))
1409 	{
1410 	  inline_forbidden_reason
1411 	    = G_("function %q+F can never be inlined because it uses setjmp");
1412 	  return node;
1413 	}
1414 
1415       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1416 	switch (DECL_FUNCTION_CODE (t))
1417 	  {
1418 	    /* We cannot inline functions that take a variable number of
1419 	       arguments.  */
1420 	  case BUILT_IN_VA_START:
1421 	  case BUILT_IN_STDARG_START:
1422 	  case BUILT_IN_NEXT_ARG:
1423 	  case BUILT_IN_VA_END:
1424 	    inline_forbidden_reason
1425 	      = G_("function %q+F can never be inlined because it "
1426 		   "uses variable argument lists");
1427 	    return node;
1428 
1429 	  case BUILT_IN_LONGJMP:
1430 	    /* We can't inline functions that call __builtin_longjmp at
1431 	       all.  The non-local goto machinery really requires the
1432 	       destination be in a different function.  If we allow the
1433 	       function calling __builtin_longjmp to be inlined into the
1434 	       function calling __builtin_setjmp, Things will Go Awry.  */
1435 	    inline_forbidden_reason
1436 	      = G_("function %q+F can never be inlined because "
1437 		   "it uses setjmp-longjmp exception handling");
1438 	    return node;
1439 
1440 	  case BUILT_IN_NONLOCAL_GOTO:
1441 	    /* Similarly.  */
1442 	    inline_forbidden_reason
1443 	      = G_("function %q+F can never be inlined because "
1444 		   "it uses non-local goto");
1445 	    return node;
1446 
1447 	  case BUILT_IN_RETURN:
1448 	  case BUILT_IN_APPLY_ARGS:
1449 	    /* If a __builtin_apply_args caller would be inlined,
1450 	       it would be saving arguments of the function it has
1451 	       been inlined into.  Similarly __builtin_return would
1452 	       return from the function the inline has been inlined into.  */
1453 	    inline_forbidden_reason
1454 	      = G_("function %q+F can never be inlined because "
1455 		   "it uses __builtin_return or __builtin_apply_args");
1456 	    return node;
1457 
1458 	  default:
1459 	    break;
1460 	  }
1461       break;
1462 
1463     case GOTO_EXPR:
1464       t = TREE_OPERAND (node, 0);
1465 
1466       /* We will not inline a function which uses computed goto.  The
1467 	 addresses of its local labels, which may be tucked into
1468 	 global storage, are of course not constant across
1469 	 instantiations, which causes unexpected behavior.  */
1470       if (TREE_CODE (t) != LABEL_DECL)
1471 	{
1472 	  inline_forbidden_reason
1473 	    = G_("function %q+F can never be inlined "
1474 		 "because it contains a computed goto");
1475 	  return node;
1476 	}
1477       break;
1478 
1479     case LABEL_EXPR:
1480       t = TREE_OPERAND (node, 0);
1481       if (DECL_NONLOCAL (t))
1482 	{
1483 	  /* We cannot inline a function that receives a non-local goto
1484 	     because we cannot remap the destination label used in the
1485 	     function that is performing the non-local goto.  */
1486 	  inline_forbidden_reason
1487 	    = G_("function %q+F can never be inlined "
1488 		 "because it receives a non-local goto");
1489 	  return node;
1490 	}
1491       break;
1492 
1493     case RECORD_TYPE:
1494     case UNION_TYPE:
1495       /* We cannot inline a function of the form
1496 
1497 	   void F (int i) { struct S { int ar[i]; } s; }
1498 
1499 	 Attempting to do so produces a catch-22.
1500 	 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1501 	 UNION_TYPE nodes, then it goes into infinite recursion on a
1502 	 structure containing a pointer to its own type.  If it doesn't,
1503 	 then the type node for S doesn't get adjusted properly when
1504 	 F is inlined.
1505 
1506 	 ??? This is likely no longer true, but it's too late in the 4.0
1507 	 cycle to try to find out.  This should be checked for 4.1.  */
1508       for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1509 	if (variably_modified_type_p (TREE_TYPE (t), NULL))
1510 	  {
1511 	    inline_forbidden_reason
1512 	      = G_("function %q+F can never be inlined "
1513 		   "because it uses variable sized variables");
1514 	    return node;
1515 	  }
1516 
1517     default:
1518       break;
1519     }
1520 
1521   return NULL_TREE;
1522 }
1523 
1524 /* Return subexpression representing possible alloca call, if any.  */
1525 static tree
inline_forbidden_p(tree fndecl)1526 inline_forbidden_p (tree fndecl)
1527 {
1528   location_t saved_loc = input_location;
1529   block_stmt_iterator bsi;
1530   basic_block bb;
1531   tree ret = NULL_TREE;
1532 
1533   FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1534     for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1535       {
1536 	ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1537 				    inline_forbidden_p_1, fndecl);
1538 	if (ret)
1539 	  goto egress;
1540       }
1541 
1542 egress:
1543   input_location = saved_loc;
1544   return ret;
1545 }
1546 
1547 /* Returns nonzero if FN is a function that does not have any
1548    fundamental inline blocking properties.  */
1549 
1550 static bool
inlinable_function_p(tree fn)1551 inlinable_function_p (tree fn)
1552 {
1553   bool inlinable = true;
1554 
1555   /* If we've already decided this function shouldn't be inlined,
1556      there's no need to check again.  */
1557   if (DECL_UNINLINABLE (fn))
1558     return false;
1559 
1560   /* See if there is any language-specific reason it cannot be
1561      inlined.  (It is important that this hook be called early because
1562      in C++ it may result in template instantiation.)
1563      If the function is not inlinable for language-specific reasons,
1564      it is left up to the langhook to explain why.  */
1565   inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1566 
1567   /* If we don't have the function body available, we can't inline it.
1568      However, this should not be recorded since we also get here for
1569      forward declared inline functions.  Therefore, return at once.  */
1570   if (!DECL_SAVED_TREE (fn))
1571     return false;
1572 
1573   /* If we're not inlining at all, then we cannot inline this function.  */
1574   else if (!flag_inline_trees)
1575     inlinable = false;
1576 
1577   /* Only try to inline functions if DECL_INLINE is set.  This should be
1578      true for all functions declared `inline', and for all other functions
1579      as well with -finline-functions.
1580 
1581      Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1582      it's the front-end that must set DECL_INLINE in this case, because
1583      dwarf2out loses if a function that does not have DECL_INLINE set is
1584      inlined anyway.  That is why we have both DECL_INLINE and
1585      DECL_DECLARED_INLINE_P.  */
1586   /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1587 	    here should be redundant.  */
1588   else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1589     inlinable = false;
1590 
1591   else if (inline_forbidden_p (fn))
1592     {
1593       /* See if we should warn about uninlinable functions.  Previously,
1594 	 some of these warnings would be issued while trying to expand
1595 	 the function inline, but that would cause multiple warnings
1596 	 about functions that would for example call alloca.  But since
1597 	 this a property of the function, just one warning is enough.
1598 	 As a bonus we can now give more details about the reason why a
1599 	 function is not inlinable.
1600 	 We only warn for functions declared `inline' by the user.  */
1601       bool do_warning = (warn_inline
1602 			 && DECL_INLINE (fn)
1603 			 && DECL_DECLARED_INLINE_P (fn)
1604 			 && !DECL_IN_SYSTEM_HEADER (fn));
1605 
1606       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1607 	sorry (inline_forbidden_reason, fn);
1608       else if (do_warning)
1609 	warning (OPT_Winline, inline_forbidden_reason, fn);
1610 
1611       inlinable = false;
1612     }
1613 
1614   /* Squirrel away the result so that we don't have to check again.  */
1615   DECL_UNINLINABLE (fn) = !inlinable;
1616 
1617   return inlinable;
1618 }
1619 
1620 /* Estimate the cost of a memory move.  Use machine dependent
1621    word size and take possible memcpy call into account.  */
1622 
1623 int
estimate_move_cost(tree type)1624 estimate_move_cost (tree type)
1625 {
1626   HOST_WIDE_INT size;
1627 
1628   size = int_size_in_bytes (type);
1629 
1630   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1631     /* Cost of a memcpy call, 3 arguments and the call.  */
1632     return 4;
1633   else
1634     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1635 }
1636 
1637 /* Used by estimate_num_insns.  Estimate number of instructions seen
1638    by given statement.  */
1639 
1640 static tree
estimate_num_insns_1(tree * tp,int * walk_subtrees,void * data)1641 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1642 {
1643   int *count = data;
1644   tree x = *tp;
1645 
1646   if (IS_TYPE_OR_DECL_P (x))
1647     {
1648       *walk_subtrees = 0;
1649       return NULL;
1650     }
1651   /* Assume that constants and references counts nothing.  These should
1652      be majorized by amount of operations among them we count later
1653      and are common target of CSE and similar optimizations.  */
1654   else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1655     return NULL;
1656 
1657   switch (TREE_CODE (x))
1658     {
1659     /* Containers have no cost.  */
1660     case TREE_LIST:
1661     case TREE_VEC:
1662     case BLOCK:
1663     case COMPONENT_REF:
1664     case BIT_FIELD_REF:
1665     case INDIRECT_REF:
1666     case ALIGN_INDIRECT_REF:
1667     case MISALIGNED_INDIRECT_REF:
1668     case ARRAY_REF:
1669     case ARRAY_RANGE_REF:
1670     case OBJ_TYPE_REF:
1671     case EXC_PTR_EXPR: /* ??? */
1672     case FILTER_EXPR: /* ??? */
1673     case COMPOUND_EXPR:
1674     case BIND_EXPR:
1675     case WITH_CLEANUP_EXPR:
1676     case NOP_EXPR:
1677     case VIEW_CONVERT_EXPR:
1678     case SAVE_EXPR:
1679     case ADDR_EXPR:
1680     case COMPLEX_EXPR:
1681     case RANGE_EXPR:
1682     case CASE_LABEL_EXPR:
1683     case SSA_NAME:
1684     case CATCH_EXPR:
1685     case EH_FILTER_EXPR:
1686     case STATEMENT_LIST:
1687     case ERROR_MARK:
1688     case NON_LVALUE_EXPR:
1689     case FDESC_EXPR:
1690     case VA_ARG_EXPR:
1691     case TRY_CATCH_EXPR:
1692     case TRY_FINALLY_EXPR:
1693     case LABEL_EXPR:
1694     case GOTO_EXPR:
1695     case RETURN_EXPR:
1696     case EXIT_EXPR:
1697     case LOOP_EXPR:
1698     case PHI_NODE:
1699     case WITH_SIZE_EXPR:
1700       break;
1701 
1702     /* We don't account constants for now.  Assume that the cost is amortized
1703        by operations that do use them.  We may re-consider this decision once
1704        we are able to optimize the tree before estimating its size and break
1705        out static initializers.  */
1706     case IDENTIFIER_NODE:
1707     case INTEGER_CST:
1708     case REAL_CST:
1709     case COMPLEX_CST:
1710     case VECTOR_CST:
1711     case STRING_CST:
1712       *walk_subtrees = 0;
1713       return NULL;
1714 
1715     /* Try to estimate the cost of assignments.  We have three cases to
1716        deal with:
1717 	1) Simple assignments to registers;
1718 	2) Stores to things that must live in memory.  This includes
1719 	   "normal" stores to scalars, but also assignments of large
1720 	   structures, or constructors of big arrays;
1721 	3) TARGET_EXPRs.
1722 
1723        Let us look at the first two cases, assuming we have "a = b + C":
1724        <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1725        If "a" is a GIMPLE register, the assignment to it is free on almost
1726        any target, because "a" usually ends up in a real register.  Hence
1727        the only cost of this expression comes from the PLUS_EXPR, and we
1728        can ignore the MODIFY_EXPR.
1729        If "a" is not a GIMPLE register, the assignment to "a" will most
1730        likely be a real store, so the cost of the MODIFY_EXPR is the cost
1731        of moving something into "a", which we compute using the function
1732        estimate_move_cost.
1733 
1734        The third case deals with TARGET_EXPRs, for which the semantics are
1735        that a temporary is assigned, unless the TARGET_EXPR itself is being
1736        assigned to something else.  In the latter case we do not need the
1737        temporary.  E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1738        MODIFY_EXPR is free.  */
1739     case INIT_EXPR:
1740     case MODIFY_EXPR:
1741       /* Is the right and side a TARGET_EXPR?  */
1742       if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1743 	break;
1744       /* ... fall through ...  */
1745 
1746     case TARGET_EXPR:
1747       x = TREE_OPERAND (x, 0);
1748       /* Is this an assignments to a register?  */
1749       if (is_gimple_reg (x))
1750 	break;
1751       /* Otherwise it's a store, so fall through to compute the move cost.  */
1752 
1753     case CONSTRUCTOR:
1754       *count += estimate_move_cost (TREE_TYPE (x));
1755       break;
1756 
1757     /* Assign cost of 1 to usual operations.
1758        ??? We may consider mapping RTL costs to this.  */
1759     case COND_EXPR:
1760     case VEC_COND_EXPR:
1761 
1762     case PLUS_EXPR:
1763     case MINUS_EXPR:
1764     case MULT_EXPR:
1765 
1766     case FIX_TRUNC_EXPR:
1767     case FIX_CEIL_EXPR:
1768     case FIX_FLOOR_EXPR:
1769     case FIX_ROUND_EXPR:
1770 
1771     case NEGATE_EXPR:
1772     case FLOAT_EXPR:
1773     case MIN_EXPR:
1774     case MAX_EXPR:
1775     case ABS_EXPR:
1776 
1777     case LSHIFT_EXPR:
1778     case RSHIFT_EXPR:
1779     case LROTATE_EXPR:
1780     case RROTATE_EXPR:
1781     case VEC_LSHIFT_EXPR:
1782     case VEC_RSHIFT_EXPR:
1783 
1784     case BIT_IOR_EXPR:
1785     case BIT_XOR_EXPR:
1786     case BIT_AND_EXPR:
1787     case BIT_NOT_EXPR:
1788 
1789     case TRUTH_ANDIF_EXPR:
1790     case TRUTH_ORIF_EXPR:
1791     case TRUTH_AND_EXPR:
1792     case TRUTH_OR_EXPR:
1793     case TRUTH_XOR_EXPR:
1794     case TRUTH_NOT_EXPR:
1795 
1796     case LT_EXPR:
1797     case LE_EXPR:
1798     case GT_EXPR:
1799     case GE_EXPR:
1800     case EQ_EXPR:
1801     case NE_EXPR:
1802     case ORDERED_EXPR:
1803     case UNORDERED_EXPR:
1804 
1805     case UNLT_EXPR:
1806     case UNLE_EXPR:
1807     case UNGT_EXPR:
1808     case UNGE_EXPR:
1809     case UNEQ_EXPR:
1810     case LTGT_EXPR:
1811 
1812     case CONVERT_EXPR:
1813 
1814     case CONJ_EXPR:
1815 
1816     case PREDECREMENT_EXPR:
1817     case PREINCREMENT_EXPR:
1818     case POSTDECREMENT_EXPR:
1819     case POSTINCREMENT_EXPR:
1820 
1821     case SWITCH_EXPR:
1822 
1823     case ASM_EXPR:
1824 
1825     case REALIGN_LOAD_EXPR:
1826 
1827     case REDUC_MAX_EXPR:
1828     case REDUC_MIN_EXPR:
1829     case REDUC_PLUS_EXPR:
1830 
1831     case RESX_EXPR:
1832       *count += 1;
1833       break;
1834 
1835     /* Few special cases of expensive operations.  This is useful
1836        to avoid inlining on functions having too many of these.  */
1837     case TRUNC_DIV_EXPR:
1838     case CEIL_DIV_EXPR:
1839     case FLOOR_DIV_EXPR:
1840     case ROUND_DIV_EXPR:
1841     case EXACT_DIV_EXPR:
1842     case TRUNC_MOD_EXPR:
1843     case CEIL_MOD_EXPR:
1844     case FLOOR_MOD_EXPR:
1845     case ROUND_MOD_EXPR:
1846     case RDIV_EXPR:
1847       *count += 10;
1848       break;
1849     case CALL_EXPR:
1850       {
1851 	tree decl = get_callee_fndecl (x);
1852 	tree arg;
1853 
1854 	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1855 	  switch (DECL_FUNCTION_CODE (decl))
1856 	    {
1857 	    case BUILT_IN_CONSTANT_P:
1858 	      *walk_subtrees = 0;
1859 	      return NULL_TREE;
1860 	    case BUILT_IN_EXPECT:
1861 	      return NULL_TREE;
1862 	    default:
1863 	      break;
1864 	    }
1865 
1866 	/* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1867 	   that does use function declaration to figure out the arguments.  */
1868 	if (!decl)
1869 	  {
1870 	    for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1871 	      *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1872 	  }
1873 	else
1874 	  {
1875 	    for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1876 	      *count += estimate_move_cost (TREE_TYPE (arg));
1877 	  }
1878 
1879 	*count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1880 	break;
1881       }
1882     default:
1883       gcc_unreachable ();
1884     }
1885   return NULL;
1886 }
1887 
1888 /* Estimate number of instructions that will be created by expanding EXPR.  */
1889 
1890 int
estimate_num_insns(tree expr)1891 estimate_num_insns (tree expr)
1892 {
1893   int num = 0;
1894   struct pointer_set_t *visited_nodes;
1895   basic_block bb;
1896   block_stmt_iterator bsi;
1897   struct function *my_function;
1898 
1899   /* If we're given an entire function, walk the CFG.  */
1900   if (TREE_CODE (expr) == FUNCTION_DECL)
1901     {
1902       my_function = DECL_STRUCT_FUNCTION (expr);
1903       gcc_assert (my_function && my_function->cfg);
1904       visited_nodes = pointer_set_create ();
1905       FOR_EACH_BB_FN (bb, my_function)
1906 	{
1907 	  for (bsi = bsi_start (bb);
1908 	       !bsi_end_p (bsi);
1909 	       bsi_next (&bsi))
1910 	    {
1911 	      walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1912 			 &num, visited_nodes);
1913 	    }
1914 	}
1915       pointer_set_destroy (visited_nodes);
1916     }
1917   else
1918     walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1919 
1920   return num;
1921 }
1922 
1923 typedef struct function *function_p;
1924 
1925 DEF_VEC_P(function_p);
1926 DEF_VEC_ALLOC_P(function_p,heap);
1927 
1928 /* Initialized with NOGC, making this poisonous to the garbage collector.  */
VEC(function_p,heap)1929 static VEC(function_p,heap) *cfun_stack;
1930 
1931 void
1932 push_cfun (struct function *new_cfun)
1933 {
1934   VEC_safe_push (function_p, heap, cfun_stack, cfun);
1935   cfun = new_cfun;
1936 }
1937 
1938 void
pop_cfun(void)1939 pop_cfun (void)
1940 {
1941   cfun = VEC_pop (function_p, cfun_stack);
1942 }
1943 
1944 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
1945 static void
add_lexical_block(tree current_block,tree new_block)1946 add_lexical_block (tree current_block, tree new_block)
1947 {
1948   tree *blk_p;
1949 
1950   /* Walk to the last sub-block.  */
1951   for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1952        *blk_p;
1953        blk_p = &TREE_CHAIN (*blk_p))
1954     ;
1955   *blk_p = new_block;
1956   BLOCK_SUPERCONTEXT (new_block) = current_block;
1957 }
1958 
1959 /* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
1960 
1961 static bool
expand_call_inline(basic_block bb,tree stmt,tree * tp,void * data)1962 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1963 {
1964   inline_data *id;
1965   tree t;
1966   tree use_retvar;
1967   tree fn;
1968   splay_tree st;
1969   tree args;
1970   tree return_slot_addr;
1971   tree modify_dest;
1972   location_t saved_location;
1973   struct cgraph_edge *cg_edge;
1974   const char *reason;
1975   basic_block return_block;
1976   edge e;
1977   block_stmt_iterator bsi, stmt_bsi;
1978   bool successfully_inlined = FALSE;
1979   tree t_step;
1980   tree var;
1981   struct cgraph_node *old_node;
1982   tree decl;
1983 
1984   /* See what we've got.  */
1985   id = (inline_data *) data;
1986   t = *tp;
1987 
1988   /* Set input_location here so we get the right instantiation context
1989      if we call instantiate_decl from inlinable_function_p.  */
1990   saved_location = input_location;
1991   if (EXPR_HAS_LOCATION (t))
1992     input_location = EXPR_LOCATION (t);
1993 
1994   /* From here on, we're only interested in CALL_EXPRs.  */
1995   if (TREE_CODE (t) != CALL_EXPR)
1996     goto egress;
1997 
1998   /* First, see if we can figure out what function is being called.
1999      If we cannot, then there is no hope of inlining the function.  */
2000   fn = get_callee_fndecl (t);
2001   if (!fn)
2002     goto egress;
2003 
2004   /* Turn forward declarations into real ones.  */
2005   fn = cgraph_node (fn)->decl;
2006 
2007   /* If fn is a declaration of a function in a nested scope that was
2008      globally declared inline, we don't set its DECL_INITIAL.
2009      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2010      C++ front-end uses it for cdtors to refer to their internal
2011      declarations, that are not real functions.  Fortunately those
2012      don't have trees to be saved, so we can tell by checking their
2013      DECL_SAVED_TREE.  */
2014   if (! DECL_INITIAL (fn)
2015       && DECL_ABSTRACT_ORIGIN (fn)
2016       && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2017     fn = DECL_ABSTRACT_ORIGIN (fn);
2018 
2019   /* Objective C and fortran still calls tree_rest_of_compilation directly.
2020      Kill this check once this is fixed.  */
2021   if (!id->current_node->analyzed)
2022     goto egress;
2023 
2024   cg_edge = cgraph_edge (id->current_node, stmt);
2025 
2026   /* Constant propagation on argument done during previous inlining
2027      may create new direct call.  Produce an edge for it.  */
2028   if (!cg_edge)
2029     {
2030       struct cgraph_node *dest = cgraph_node (fn);
2031 
2032       /* We have missing edge in the callgraph.  This can happen in one case
2033          where previous inlining turned indirect call into direct call by
2034          constant propagating arguments.  In all other cases we hit a bug
2035          (incorrect node sharing is most common reason for missing edges.  */
2036       gcc_assert (dest->needed || !flag_unit_at_a_time);
2037       cgraph_create_edge (id->node, dest, stmt,
2038 			  bb->count, bb->loop_depth)->inline_failed
2039 	= N_("originally indirect function call not considered for inlining");
2040       goto egress;
2041     }
2042 
2043   /* Don't try to inline functions that are not well-suited to
2044      inlining.  */
2045   if (!cgraph_inline_p (cg_edge, &reason))
2046     {
2047       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2048 	  /* Avoid warnings during early inline pass. */
2049 	  && (!flag_unit_at_a_time || cgraph_global_info_ready))
2050 	{
2051 	  sorry ("inlining failed in call to %q+F: %s", fn, reason);
2052 	  sorry ("called from here");
2053 	}
2054       else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2055 	       && !DECL_IN_SYSTEM_HEADER (fn)
2056 	       && strlen (reason)
2057 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2058 	       /* Avoid warnings during early inline pass. */
2059 	       && (!flag_unit_at_a_time || cgraph_global_info_ready))
2060 	{
2061 	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2062 		   fn, reason);
2063 	  warning (OPT_Winline, "called from here");
2064 	}
2065       goto egress;
2066     }
2067 
2068 #ifdef ENABLE_CHECKING
2069   if (cg_edge->callee->decl != id->node->decl)
2070     verify_cgraph_node (cg_edge->callee);
2071 #endif
2072 
2073   /* (TIGCC 20040926) The following code by Eric Botcazou fixes an ICE when
2074      inlining tries to change the mode of parameters or the return value. Eric
2075      Botcazou's comments explain the details.  -- Kevin Kofler  */
2076   /* We can't inline functions at a calling point where they are viewed
2077      with too different a prototype than the actual one, because the
2078      calling convention may not be the same on both sides.  */
2079   if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR)
2080     {
2081       tree from_ftype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2082       tree to_ftype = TREE_TYPE (fn);
2083 
2084       if (from_ftype != to_ftype)
2085 	{
2086 	  tree from_arg, to_arg;
2087 
2088 	  /* If the calling point expects a return value and it is too
2089 	     different from the one actually returned, don't inline.  */
2090 	  if (! VOID_TYPE_P (TREE_TYPE (from_ftype))
2091 	      && TYPE_MODE (TREE_TYPE (from_ftype))
2092 		 != TYPE_MODE (TREE_TYPE (to_ftype)))
2093 	    goto egress;
2094 
2095 	  /* If the calling point doesn't pass at least the correct
2096 	     number of arguments with the correct modes, don't inline.
2097 	     Objective-C appears to add a trailing void parameter at
2098 	     the calling point under certain circumstances.  */
2099 	  from_arg = TYPE_ARG_TYPES (from_ftype);
2100 	  to_arg = TYPE_ARG_TYPES (to_ftype);
2101 
2102 	  while (to_arg)
2103 	    {
2104 	      if (! from_arg
2105 		  || TYPE_MODE (TREE_VALUE (from_arg))
2106 		     != TYPE_MODE (TREE_VALUE (to_arg)))
2107 		goto egress;
2108 
2109 	      from_arg = TREE_CHAIN (from_arg);
2110 	      to_arg = TREE_CHAIN (to_arg);
2111 	    }
2112 	}
2113     }
2114 
2115   /* We will be inlining this callee.  */
2116 
2117   id->eh_region = lookup_stmt_eh_region (stmt);
2118 
2119   /* Split the block holding the CALL_EXPR.  */
2120 
2121   e = split_block (bb, stmt);
2122   bb = e->src;
2123   return_block = e->dest;
2124   remove_edge (e);
2125 
2126   /* split_block splits before the statement, work around this by moving
2127      the call into the first half_bb.  Not pretty, but seems easier than
2128      doing the CFG manipulation by hand when the CALL_EXPR is in the last
2129      statement in BB.  */
2130   stmt_bsi = bsi_last (bb);
2131   bsi = bsi_start (return_block);
2132   if (!bsi_end_p (bsi))
2133     bsi_move_before (&stmt_bsi, &bsi);
2134   else
2135     {
2136       tree stmt = bsi_stmt (stmt_bsi);
2137       bsi_remove (&stmt_bsi);
2138       bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2139     }
2140   stmt_bsi = bsi_start (return_block);
2141 
2142   /* Build a block containing code to initialize the arguments, the
2143      actual inline expansion of the body, and a label for the return
2144      statements within the function to jump to.  The type of the
2145      statement expression is the return type of the function call.  */
2146   id->block = make_node (BLOCK);
2147   BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2148   BLOCK_SOURCE_LOCATION (id->block) = input_location;
2149   add_lexical_block (TREE_BLOCK (stmt), id->block);
2150 
2151   /* Local declarations will be replaced by their equivalents in this
2152      map.  */
2153   st = id->decl_map;
2154   id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2155 				 NULL, NULL);
2156 
2157   /* Initialize the parameters.  */
2158   args = TREE_OPERAND (t, 1);
2159 
2160   initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2161 
2162   /* Record the function we are about to inline.  */
2163   id->callee = fn;
2164 
2165   if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2166     add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2167   else if (DECL_INITIAL (fn))
2168     add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2169 
2170   /* Return statements in the function body will be replaced by jumps
2171      to the RET_LABEL.  */
2172 
2173   gcc_assert (DECL_INITIAL (fn));
2174   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2175 
2176   /* Find the lhs to which the result of this call is assigned.  */
2177   return_slot_addr = NULL;
2178   if (TREE_CODE (stmt) == MODIFY_EXPR)
2179     {
2180       modify_dest = TREE_OPERAND (stmt, 0);
2181 
2182       /* The function which we are inlining might not return a value,
2183 	 in which case we should issue a warning that the function
2184 	 does not return a value.  In that case the optimizers will
2185 	 see that the variable to which the value is assigned was not
2186 	 initialized.  We do not want to issue a warning about that
2187 	 uninitialized variable.  */
2188       if (DECL_P (modify_dest))
2189 	TREE_NO_WARNING (modify_dest) = 1;
2190       if (CALL_EXPR_RETURN_SLOT_OPT (t))
2191 	{
2192 	  return_slot_addr = build_fold_addr_expr (modify_dest);
2193 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2194 	  modify_dest = NULL;
2195 	}
2196     }
2197   else
2198     modify_dest = NULL;
2199 
2200   /* Declare the return variable for the function.  */
2201   decl = declare_return_variable (id, return_slot_addr,
2202 			          modify_dest, &use_retvar);
2203   /* Do this only if declare_return_variable created a new one.  */
2204   if (decl && !return_slot_addr && decl != modify_dest)
2205     declare_inline_vars (id->block, decl);
2206 
2207   /* After we've initialized the parameters, we insert the body of the
2208      function itself.  */
2209   old_node = id->current_node;
2210 
2211   /* Anoint the callee-to-be-duplicated as the "current_node."  When
2212      CALL_EXPRs within callee are duplicated, the edges from callee to
2213      callee's callees (caller's grandchildren) will be cloned.  */
2214   id->current_node = cg_edge->callee;
2215 
2216   /* This is it.  Duplicate the callee body.  Assume callee is
2217      pre-gimplified.  Note that we must not alter the caller
2218      function in any way before this point, as this CALL_EXPR may be
2219      a self-referential call; if we're calling ourselves, we need to
2220      duplicate our body before altering anything.  */
2221   copy_body (id, bb->count, bb->frequency, bb, return_block);
2222   id->current_node = old_node;
2223 
2224   /* Add local vars in this inlined callee to caller.  */
2225   t_step = id->callee_cfun->unexpanded_var_list;
2226   if (id->callee_cfun->saved_unexpanded_var_list)
2227     t_step = id->callee_cfun->saved_unexpanded_var_list;
2228   for (; t_step; t_step = TREE_CHAIN (t_step))
2229     {
2230       var = TREE_VALUE (t_step);
2231       if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2232 	cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2233 					       cfun->unexpanded_var_list);
2234       else
2235 	cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2236 					       cfun->unexpanded_var_list);
2237     }
2238 
2239   /* Clean up.  */
2240   splay_tree_delete (id->decl_map);
2241   id->decl_map = st;
2242 
2243   /* If the inlined function returns a result that we care about,
2244      clobber the CALL_EXPR with a reference to the return variable.  */
2245   if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2246     {
2247       *tp = use_retvar;
2248       maybe_clean_or_replace_eh_stmt (stmt, stmt);
2249     }
2250   else
2251     /* We're modifying a TSI owned by gimple_expand_calls_inline();
2252        tsi_delink() will leave the iterator in a sane state.  */
2253     bsi_remove (&stmt_bsi);
2254 
2255   bsi_next (&bsi);
2256   if (bsi_end_p (bsi))
2257     tree_purge_dead_eh_edges (return_block);
2258 
2259   /* If the value of the new expression is ignored, that's OK.  We
2260      don't warn about this for CALL_EXPRs, so we shouldn't warn about
2261      the equivalent inlined version either.  */
2262   TREE_USED (*tp) = 1;
2263 
2264   /* Output the inlining info for this abstract function, since it has been
2265      inlined.  If we don't do this now, we can lose the information about the
2266      variables in the function when the blocks get blown away as soon as we
2267      remove the cgraph node.  */
2268   (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2269 
2270   /* Update callgraph if needed.  */
2271   cgraph_remove_node (cg_edge->callee);
2272 
2273   /* Declare the 'auto' variables added with this inlined body.  */
2274   record_vars (BLOCK_VARS (id->block));
2275   id->block = NULL_TREE;
2276   successfully_inlined = TRUE;
2277 
2278  egress:
2279   input_location = saved_location;
2280   return successfully_inlined;
2281 }
2282 
2283 /* Expand call statements reachable from STMT_P.
2284    We can only have CALL_EXPRs as the "toplevel" tree code or nested
2285    in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
2286    unfortunately not use that function here because we need a pointer
2287    to the CALL_EXPR, not the tree itself.  */
2288 
2289 static bool
gimple_expand_calls_inline(basic_block bb,inline_data * id)2290 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2291 {
2292   block_stmt_iterator bsi;
2293 
2294   /* Register specific tree functions.  */
2295   tree_register_cfg_hooks ();
2296   for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2297     {
2298       tree *expr_p = bsi_stmt_ptr (bsi);
2299       tree stmt = *expr_p;
2300 
2301       if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2302 	expr_p = &TREE_OPERAND (*expr_p, 1);
2303       if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2304 	expr_p = &TREE_OPERAND (*expr_p, 0);
2305       if (TREE_CODE (*expr_p) == CALL_EXPR)
2306 	if (expand_call_inline (bb, stmt, expr_p, id))
2307 	  return true;
2308     }
2309   return false;
2310 }
2311 
2312 /* Expand calls to inline functions in the body of FN.  */
2313 
2314 void
optimize_inline_calls(tree fn)2315 optimize_inline_calls (tree fn)
2316 {
2317   inline_data id;
2318   tree prev_fn;
2319   basic_block bb;
2320   /* There is no point in performing inlining if errors have already
2321      occurred -- and we might crash if we try to inline invalid
2322      code.  */
2323   if (errorcount || sorrycount)
2324     return;
2325 
2326   /* Clear out ID.  */
2327   memset (&id, 0, sizeof (id));
2328 
2329   id.current_node = id.node = cgraph_node (fn);
2330   id.caller = fn;
2331   /* Or any functions that aren't finished yet.  */
2332   prev_fn = NULL_TREE;
2333   if (current_function_decl)
2334     {
2335       id.caller = current_function_decl;
2336       prev_fn = current_function_decl;
2337     }
2338   push_gimplify_context ();
2339 
2340   /* Reach the trees by walking over the CFG, and note the
2341      enclosing basic-blocks in the call edges.  */
2342   /* We walk the blocks going forward, because inlined function bodies
2343      will split id->current_basic_block, and the new blocks will
2344      follow it; we'll trudge through them, processing their CALL_EXPRs
2345      along the way.  */
2346   FOR_EACH_BB (bb)
2347     gimple_expand_calls_inline (bb, &id);
2348 
2349 
2350   pop_gimplify_context (NULL);
2351   /* Renumber the (code) basic_blocks consecutively.  */
2352   compact_blocks ();
2353   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2354   number_blocks (fn);
2355 
2356 #ifdef ENABLE_CHECKING
2357     {
2358       struct cgraph_edge *e;
2359 
2360       verify_cgraph_node (id.node);
2361 
2362       /* Double check that we inlined everything we are supposed to inline.  */
2363       for (e = id.node->callees; e; e = e->next_callee)
2364 	gcc_assert (e->inline_failed);
2365     }
2366 #endif
2367   /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2368      as inlining loops might increase the maximum.  */
2369   if (ENTRY_BLOCK_PTR->count)
2370     counts_to_freqs ();
2371   fold_cond_expr_cond ();
2372 }
2373 
2374 /* FN is a function that has a complete body, and CLONE is a function whose
2375    body is to be set to a copy of FN, mapping argument declarations according
2376    to the ARG_MAP splay_tree.  */
2377 
2378 void
clone_body(tree clone,tree fn,void * arg_map)2379 clone_body (tree clone, tree fn, void *arg_map)
2380 {
2381   inline_data id;
2382 
2383   /* Clone the body, as if we were making an inline call.  But, remap the
2384      parameters in the callee to the parameters of caller.  */
2385   memset (&id, 0, sizeof (id));
2386   id.caller = clone;
2387   id.callee = fn;
2388   id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2389   id.decl_map = (splay_tree)arg_map;
2390 
2391   /* Cloning is treated slightly differently from inlining.  Set
2392      CLONING_P so that it's clear which operation we're performing.  */
2393   id.cloning_p = true;
2394 
2395   /* We're not inside any EH region.  */
2396   id.eh_region = -1;
2397 
2398   /* Actually copy the body.  */
2399   append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2400 }
2401 
2402 /* Save duplicate body in FN.  MAP is used to pass around splay tree
2403    used to update arguments in restore_body.  */
2404 
2405 /* Make and return duplicate of body in FN.  Put copies of DECL_ARGUMENTS
2406    in *arg_copy and of the static chain, if any, in *sc_copy.  */
2407 
2408 void
save_body(tree fn,tree * arg_copy,tree * sc_copy)2409 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2410 {
2411   inline_data id;
2412   tree newdecl, *parg;
2413   basic_block fn_entry_block;
2414   tree t_step;
2415 
2416   memset (&id, 0, sizeof (id));
2417   id.callee = fn;
2418   id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2419   id.caller = fn;
2420   id.node = cgraph_node (fn);
2421   id.saving_p = true;
2422   id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2423   *arg_copy = DECL_ARGUMENTS (fn);
2424 
2425   for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2426     {
2427       tree new = copy_node (*parg);
2428 
2429       lang_hooks.dup_lang_specific_decl (new);
2430       DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2431       insert_decl_map (&id, *parg, new);
2432       TREE_CHAIN (new) = TREE_CHAIN (*parg);
2433       *parg = new;
2434     }
2435 
2436   *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2437   if (*sc_copy)
2438     {
2439       tree new = copy_node (*sc_copy);
2440 
2441       lang_hooks.dup_lang_specific_decl (new);
2442       DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2443       insert_decl_map (&id, *sc_copy, new);
2444       TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2445       *sc_copy = new;
2446     }
2447 
2448   /* We're not inside any EH region.  */
2449   id.eh_region = -1;
2450 
2451   insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2452 
2453   DECL_STRUCT_FUNCTION (fn)->saved_blocks
2454     = remap_blocks (DECL_INITIAL (fn), &id);
2455   for (t_step = id.callee_cfun->unexpanded_var_list;
2456        t_step;
2457        t_step = TREE_CHAIN (t_step))
2458     {
2459       tree var = TREE_VALUE (t_step);
2460       if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2461 	cfun->saved_unexpanded_var_list
2462 	  = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2463       else
2464 	cfun->saved_unexpanded_var_list
2465 	  = tree_cons (NULL_TREE, remap_decl (var, &id),
2466 		       cfun->saved_unexpanded_var_list);
2467     }
2468 
2469   /* Actually copy the body, including a new (struct function *) and CFG.
2470      EH info is also duplicated so its labels point into the copied
2471      CFG, not the original.  */
2472   fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2473   newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2474 		       NULL, NULL);
2475   DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2476   DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2477 
2478   /* Clean up.  */
2479   splay_tree_delete (id.decl_map);
2480 }
2481 
2482 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
2483 
2484 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)2485 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2486 {
2487   enum tree_code code = TREE_CODE (*tp);
2488   inline_data *id = (inline_data *) data;
2489 
2490   /* We make copies of most nodes.  */
2491   if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2492       || code == TREE_LIST
2493       || code == TREE_VEC
2494       || code == TYPE_DECL)
2495     {
2496       /* Because the chain gets clobbered when we make a copy, we save it
2497 	 here.  */
2498       tree chain = TREE_CHAIN (*tp);
2499       tree new;
2500 
2501       if (id && id->versioning_p && replace_ref_tree (id, tp))
2502 	{
2503 	  *walk_subtrees = 0;
2504 	  return NULL_TREE;
2505 	}
2506       /* Copy the node.  */
2507       new = copy_node (*tp);
2508 
2509       /* Propagate mudflap marked-ness.  */
2510       if (flag_mudflap && mf_marked_p (*tp))
2511         mf_mark (new);
2512 
2513       *tp = new;
2514 
2515       /* Now, restore the chain, if appropriate.  That will cause
2516 	 walk_tree to walk into the chain as well.  */
2517       if (code == PARM_DECL || code == TREE_LIST)
2518 	TREE_CHAIN (*tp) = chain;
2519 
2520       /* For now, we don't update BLOCKs when we make copies.  So, we
2521 	 have to nullify all BIND_EXPRs.  */
2522       if (TREE_CODE (*tp) == BIND_EXPR)
2523 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2524     }
2525   else if (code == CONSTRUCTOR)
2526     {
2527       /* CONSTRUCTOR nodes need special handling because
2528          we need to duplicate the vector of elements.  */
2529       tree new;
2530 
2531       new = copy_node (*tp);
2532 
2533       /* Propagate mudflap marked-ness.  */
2534       if (flag_mudflap && mf_marked_p (*tp))
2535         mf_mark (new);
2536 
2537       CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2538 					 CONSTRUCTOR_ELTS (*tp));
2539       *tp = new;
2540     }
2541   else if (TREE_CODE_CLASS (code) == tcc_type)
2542     *walk_subtrees = 0;
2543   else if (TREE_CODE_CLASS (code) == tcc_declaration)
2544     *walk_subtrees = 0;
2545   else if (TREE_CODE_CLASS (code) == tcc_constant)
2546     *walk_subtrees = 0;
2547   else
2548     gcc_assert (code != STATEMENT_LIST);
2549   return NULL_TREE;
2550 }
2551 
2552 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
2553    information indicating to what new SAVE_EXPR this one should be mapped,
2554    use that one.  Otherwise, create a new node and enter it in ST.  FN is
2555    the function into which the copy will be placed.  */
2556 
2557 static void
remap_save_expr(tree * tp,void * st_,int * walk_subtrees)2558 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2559 {
2560   splay_tree st = (splay_tree) st_;
2561   splay_tree_node n;
2562   tree t;
2563 
2564   /* See if we already encountered this SAVE_EXPR.  */
2565   n = splay_tree_lookup (st, (splay_tree_key) *tp);
2566 
2567   /* If we didn't already remap this SAVE_EXPR, do so now.  */
2568   if (!n)
2569     {
2570       t = copy_node (*tp);
2571 
2572       /* Remember this SAVE_EXPR.  */
2573       splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2574       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
2575       splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2576     }
2577   else
2578     {
2579       /* We've already walked into this SAVE_EXPR; don't do it again.  */
2580       *walk_subtrees = 0;
2581       t = (tree) n->value;
2582     }
2583 
2584   /* Replace this SAVE_EXPR with the copy.  */
2585   *tp = t;
2586 }
2587 
2588 /* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
2589    copies the declaration and enters it in the splay_tree in DATA (which is
2590    really an `inline_data *').  */
2591 
2592 static tree
mark_local_for_remap_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)2593 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2594 			void *data)
2595 {
2596   inline_data *id = (inline_data *) data;
2597 
2598   /* Don't walk into types.  */
2599   if (TYPE_P (*tp))
2600     *walk_subtrees = 0;
2601 
2602   else if (TREE_CODE (*tp) == LABEL_EXPR)
2603     {
2604       tree decl = TREE_OPERAND (*tp, 0);
2605 
2606       /* Copy the decl and remember the copy.  */
2607       insert_decl_map (id, decl,
2608 		       copy_decl_for_dup (decl, DECL_CONTEXT (decl),
2609 					  DECL_CONTEXT (decl),  /*versioning=*/false));
2610     }
2611 
2612   return NULL_TREE;
2613 }
2614 
2615 /* Perform any modifications to EXPR required when it is unsaved.  Does
2616    not recurse into EXPR's subtrees.  */
2617 
2618 static void
unsave_expr_1(tree expr)2619 unsave_expr_1 (tree expr)
2620 {
2621   switch (TREE_CODE (expr))
2622     {
2623     case TARGET_EXPR:
2624       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2625          It's OK for this to happen if it was part of a subtree that
2626          isn't immediately expanded, such as operand 2 of another
2627          TARGET_EXPR.  */
2628       if (TREE_OPERAND (expr, 1))
2629 	break;
2630 
2631       TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2632       TREE_OPERAND (expr, 3) = NULL_TREE;
2633       break;
2634 
2635     default:
2636       break;
2637     }
2638 }
2639 
2640 /* Called via walk_tree when an expression is unsaved.  Using the
2641    splay_tree pointed to by ST (which is really a `splay_tree'),
2642    remaps all local declarations to appropriate replacements.  */
2643 
2644 static tree
unsave_r(tree * tp,int * walk_subtrees,void * data)2645 unsave_r (tree *tp, int *walk_subtrees, void *data)
2646 {
2647   inline_data *id = (inline_data *) data;
2648   splay_tree st = id->decl_map;
2649   splay_tree_node n;
2650 
2651   /* Only a local declaration (variable or label).  */
2652   if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2653       || TREE_CODE (*tp) == LABEL_DECL)
2654     {
2655       /* Lookup the declaration.  */
2656       n = splay_tree_lookup (st, (splay_tree_key) *tp);
2657 
2658       /* If it's there, remap it.  */
2659       if (n)
2660 	*tp = (tree) n->value;
2661     }
2662 
2663   else if (TREE_CODE (*tp) == STATEMENT_LIST)
2664     copy_statement_list (tp);
2665   else if (TREE_CODE (*tp) == BIND_EXPR)
2666     copy_bind_expr (tp, walk_subtrees, id);
2667   else if (TREE_CODE (*tp) == SAVE_EXPR)
2668     remap_save_expr (tp, st, walk_subtrees);
2669   else
2670     {
2671       copy_tree_r (tp, walk_subtrees, NULL);
2672 
2673       /* Do whatever unsaving is required.  */
2674       unsave_expr_1 (*tp);
2675     }
2676 
2677   /* Keep iterating.  */
2678   return NULL_TREE;
2679 }
2680 
2681 /* Copies everything in EXPR and replaces variables, labels
2682    and SAVE_EXPRs local to EXPR.  */
2683 
2684 tree
unsave_expr_now(tree expr)2685 unsave_expr_now (tree expr)
2686 {
2687   inline_data id;
2688 
2689   /* There's nothing to do for NULL_TREE.  */
2690   if (expr == 0)
2691     return expr;
2692 
2693   /* Set up ID.  */
2694   memset (&id, 0, sizeof (id));
2695   id.callee = current_function_decl;
2696   id.caller = current_function_decl;
2697   id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2698 
2699   /* Walk the tree once to find local labels.  */
2700   walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2701 
2702   /* Walk the tree again, copying, remapping, and unsaving.  */
2703   walk_tree (&expr, unsave_r, &id, NULL);
2704 
2705   /* Clean up.  */
2706   splay_tree_delete (id.decl_map);
2707 
2708   return expr;
2709 }
2710 
2711 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
2712 
2713 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)2714 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2715 {
2716   if (*tp == data)
2717     return (tree) data;
2718   else
2719     return NULL;
2720 }
2721 
2722 bool
debug_find_tree(tree top,tree search)2723 debug_find_tree (tree top, tree search)
2724 {
2725   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2726 }
2727 
2728 
2729 /* Declare the variables created by the inliner.  Add all the variables in
2730    VARS to BIND_EXPR.  */
2731 
2732 static void
declare_inline_vars(tree block,tree vars)2733 declare_inline_vars (tree block, tree vars)
2734 {
2735   tree t;
2736   for (t = vars; t; t = TREE_CHAIN (t))
2737     DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2738 
2739   if (block)
2740     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2741 }
2742 
2743 
2744 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
2745    but now it will be in the TO_FN.  VERSIONING means that this function
2746    is used by the versioning utility (not inlining or cloning).  */
2747 
2748 tree
copy_decl_for_dup(tree decl,tree from_fn,tree to_fn,bool versioning)2749 copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
2750 {
2751   tree copy;
2752 
2753   gcc_assert (DECL_P (decl));
2754   /* Copy the declaration.  */
2755   if (!versioning
2756       && (TREE_CODE (decl) == PARM_DECL
2757 	  || TREE_CODE (decl) == RESULT_DECL))
2758     {
2759       tree type = TREE_TYPE (decl);
2760 
2761       /* For a parameter or result, we must make an equivalent VAR_DECL,
2762 	 not a new PARM_DECL.  */
2763       copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2764       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2765       TREE_READONLY (copy) = TREE_READONLY (decl);
2766       TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2767       DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2768     }
2769   else
2770     {
2771       copy = copy_node (decl);
2772       /* The COPY is not abstract; it will be generated in TO_FN.  */
2773       DECL_ABSTRACT (copy) = 0;
2774       lang_hooks.dup_lang_specific_decl (copy);
2775 
2776       /* TREE_ADDRESSABLE isn't used to indicate that a label's
2777 	 address has been taken; it's for internal bookkeeping in
2778 	 expand_goto_internal.  */
2779       if (TREE_CODE (copy) == LABEL_DECL)
2780 	{
2781 	  TREE_ADDRESSABLE (copy) = 0;
2782 	  LABEL_DECL_UID (copy) = -1;
2783 	}
2784     }
2785 
2786   /* Don't generate debug information for the copy if we wouldn't have
2787      generated it for the copy either.  */
2788   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2789   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2790 
2791   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2792      declaration inspired this copy.  */
2793   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2794 
2795   /* The new variable/label has no RTL, yet.  */
2796   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2797       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2798     SET_DECL_RTL (copy, NULL_RTX);
2799 
2800   /* These args would always appear unused, if not for this.  */
2801   TREE_USED (copy) = 1;
2802 
2803   /* Set the context for the new declaration.  */
2804   if (!DECL_CONTEXT (decl))
2805     /* Globals stay global.  */
2806     ;
2807   else if (DECL_CONTEXT (decl) != from_fn)
2808     /* Things that weren't in the scope of the function we're inlining
2809        from aren't in the scope we're inlining to, either.  */
2810     ;
2811   else if (TREE_STATIC (decl))
2812     /* Function-scoped static variables should stay in the original
2813        function.  */
2814     ;
2815   else
2816     /* Ordinary automatic local variables are now in the scope of the
2817        new function.  */
2818     DECL_CONTEXT (copy) = to_fn;
2819 
2820   return copy;
2821 }
2822 
2823 /* Return a copy of the function's argument tree.  */
2824 static tree
copy_arguments_for_versioning(tree orig_parm,inline_data * id)2825 copy_arguments_for_versioning (tree orig_parm, inline_data * id)
2826 {
2827   tree *arg_copy, *parg;
2828 
2829   arg_copy = &orig_parm;
2830   for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2831     {
2832       tree new = remap_decl (*parg, id);
2833       lang_hooks.dup_lang_specific_decl (new);
2834       TREE_CHAIN (new) = TREE_CHAIN (*parg);
2835       *parg = new;
2836     }
2837   return orig_parm;
2838 }
2839 
2840 /* Return a copy of the function's static chain.  */
2841 static tree
copy_static_chain(tree static_chain,inline_data * id)2842 copy_static_chain (tree static_chain, inline_data * id)
2843 {
2844   tree *chain_copy, *pvar;
2845 
2846   chain_copy = &static_chain;
2847   for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2848     {
2849       tree new = remap_decl (*pvar, id);
2850       lang_hooks.dup_lang_specific_decl (new);
2851       TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2852       *pvar = new;
2853     }
2854   return static_chain;
2855 }
2856 
2857 /* Return true if the function is allowed to be versioned.
2858    This is a guard for the versioning functionality.  */
2859 bool
tree_versionable_function_p(tree fndecl)2860 tree_versionable_function_p (tree fndecl)
2861 {
2862   if (fndecl == NULL_TREE)
2863     return false;
2864   /* ??? There are cases where a function is
2865      uninlinable but can be versioned.  */
2866   if (!tree_inlinable_function_p (fndecl))
2867     return false;
2868 
2869   return true;
2870 }
2871 
2872 /* Create a copy of a function's tree.
2873    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2874    of the original function and the new copied function
2875    respectively.  In case we want to replace a DECL
2876    tree with another tree while duplicating the function's
2877    body, TREE_MAP represents the mapping between these
2878    trees.  */
2879 void
tree_function_versioning(tree old_decl,tree new_decl,varray_type tree_map)2880 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
2881 {
2882   struct cgraph_node *old_version_node;
2883   struct cgraph_node *new_version_node;
2884   inline_data id;
2885   tree p, new_fndecl;
2886   unsigned i;
2887   struct ipa_replace_map *replace_info;
2888   basic_block old_entry_block;
2889   tree t_step;
2890 
2891   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2892 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
2893   DECL_POSSIBLY_INLINED (old_decl) = 1;
2894 
2895   old_version_node = cgraph_node (old_decl);
2896   new_version_node = cgraph_node (new_decl);
2897 
2898   allocate_struct_function (new_decl);
2899   /* Cfun points to the new allocated function struct at this point.  */
2900   cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2901 
2902   DECL_ARTIFICIAL (new_decl) = 1;
2903   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2904 
2905   /* Generate a new name for the new version. */
2906   DECL_NAME (new_decl) =
2907     create_tmp_var_name (NULL);
2908   /* Create a new SYMBOL_REF rtx for the new name. */
2909   if (DECL_RTL (old_decl) != NULL)
2910     {
2911       SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2912       XEXP (DECL_RTL (new_decl), 0) =
2913 	gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2914 			    IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2915     }
2916 
2917   /* Prepare the data structures for the tree copy.  */
2918   memset (&id, 0, sizeof (id));
2919 
2920   /* The new version. */
2921   id.node = new_version_node;
2922 
2923   /* The old version. */
2924   id.current_node = cgraph_node (old_decl);
2925 
2926   id.versioning_p = true;
2927   id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2928   id.caller = new_decl;
2929   id.callee = old_decl;
2930   id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
2931 
2932   current_function_decl = new_decl;
2933 
2934   /* Copy the function's static chain.  */
2935   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2936   if (p)
2937     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2938       copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2939 			 &id);
2940   /* Copy the function's arguments.  */
2941   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2942     DECL_ARGUMENTS (new_decl) =
2943       copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2944 
2945   /* If there's a tree_map, prepare for substitution.  */
2946   if (tree_map)
2947     for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2948       {
2949 	replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2950 	if (replace_info->replace_p && !replace_info->ref_p)
2951 	  insert_decl_map (&id, replace_info->old_tree,
2952 			   replace_info->new_tree);
2953 	else if (replace_info->replace_p && replace_info->ref_p)
2954 	  id.ipa_info = tree_map;
2955       }
2956 
2957   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
2958 
2959   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2960   number_blocks (id.caller);
2961 
2962   if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2963     /* Add local vars.  */
2964     for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2965 	 t_step; t_step = TREE_CHAIN (t_step))
2966       {
2967 	tree var = TREE_VALUE (t_step);
2968 	if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2969 	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2970 						 cfun->unexpanded_var_list);
2971 	else
2972 	  cfun->unexpanded_var_list =
2973 	    tree_cons (NULL_TREE, remap_decl (var, &id),
2974 		       cfun->unexpanded_var_list);
2975       }
2976 
2977   /* Copy the Function's body.  */
2978   old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2979     (DECL_STRUCT_FUNCTION (old_decl));
2980   new_fndecl = copy_body (&id,
2981 			  old_entry_block->count,
2982 			  old_entry_block->frequency, NULL, NULL);
2983 
2984   DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2985 
2986   DECL_STRUCT_FUNCTION (new_decl)->cfg =
2987     DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2988   DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2989   DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2990     DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2991   DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2992     DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2993 
2994   if (DECL_RESULT (old_decl) != NULL_TREE)
2995     {
2996       tree *res_decl = &DECL_RESULT (old_decl);
2997       DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2998       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2999     }
3000 
3001   current_function_decl = NULL;
3002   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
3003   number_blocks (new_decl);
3004 
3005   /* Clean up.  */
3006   splay_tree_delete (id.decl_map);
3007   fold_cond_expr_cond ();
3008   return;
3009 }
3010 
3011 /*  Replace an INDIRECT_REF tree of a given DECL tree with a new
3012     given tree.
3013     ID->ipa_info keeps the old tree and the new tree.
3014     TP points to the INDIRECT REF tree.  Return true if
3015     the trees were replaced.  */
3016 static bool
replace_ref_tree(inline_data * id,tree * tp)3017 replace_ref_tree (inline_data * id, tree * tp)
3018 {
3019   bool replaced = false;
3020   tree new;
3021 
3022   if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
3023     {
3024       unsigned i;
3025 
3026       for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
3027 	{
3028 	  struct ipa_replace_map *replace_info;
3029 	  replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
3030 
3031 	  if (replace_info->replace_p && replace_info->ref_p)
3032 	    {
3033 	      tree old_tree = replace_info->old_tree;
3034 	      tree new_tree = replace_info->new_tree;
3035 
3036 	      if (TREE_CODE (*tp) == INDIRECT_REF
3037 		  && TREE_OPERAND (*tp, 0) == old_tree)
3038 		{
3039 		  new = copy_node (new_tree);
3040 		  *tp = new;
3041 		  replaced = true;
3042 		}
3043 	    }
3044 	}
3045     }
3046   return replaced;
3047 }
3048 
3049 /* Return true if we are inlining.  */
3050 static inline bool
inlining_p(inline_data * id)3051 inlining_p (inline_data * id)
3052 {
3053   return (!id->saving_p && !id->cloning_p && !id->versioning_p);
3054 }
3055 
3056 /* Duplicate a type, fields and all.  */
3057 
3058 tree
build_duplicate_type(tree type)3059 build_duplicate_type (tree type)
3060 {
3061   inline_data id;
3062 
3063   memset (&id, 0, sizeof (id));
3064   id.callee = current_function_decl;
3065   id.caller = current_function_decl;
3066   id.callee_cfun = cfun;
3067   id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3068 
3069   type = remap_type_1 (type, &id);
3070 
3071   splay_tree_delete (id.decl_map);
3072 
3073   return type;
3074 }
3075