xref: /dragonfly/contrib/gcc-8.0/gcc/tree-inline.c (revision c87dd536)
1 /* Tree inlining.
2    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "sreal.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138 
139 /* Insert a tree->tree mapping for ID.  Despite the name suggests
140    that the trees should be variables, it is used for more than that.  */
141 
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145   id->decl_map->put (key, value);
146 
147   /* Always insert an identity map as well.  If we see this same new
148      node again, we won't want to duplicate it a second time.  */
149   if (key != value)
150     id->decl_map->put (value, value);
151 }
152 
153 /* Insert a tree->tree mapping for ID.  This is only used for
154    variables.  */
155 
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159   if (!gimple_in_ssa_p (id->src_cfun))
160     return;
161 
162   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163     return;
164 
165   if (!target_for_debug_bind (key))
166     return;
167 
168   gcc_assert (TREE_CODE (key) == PARM_DECL);
169   gcc_assert (VAR_P (value));
170 
171   if (!id->debug_map)
172     id->debug_map = new hash_map<tree, tree>;
173 
174   id->debug_map->put (key, value);
175 }
176 
177 /* If nonzero, we're remapping the contents of inlined debug
178    statements.  If negative, an error has occurred, such as a
179    reference to a variable that isn't available in the inlined
180    context.  */
181 static int processing_debug_stmt = 0;
182 
183 /* Construct new SSA name for old NAME. ID is the inline context.  */
184 
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188   tree new_tree, var;
189   tree *n;
190 
191   gcc_assert (TREE_CODE (name) == SSA_NAME);
192 
193   n = id->decl_map->get (name);
194   if (n)
195     return unshare_expr (*n);
196 
197   if (processing_debug_stmt)
198     {
199       if (SSA_NAME_IS_DEFAULT_DEF (name)
200 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 	  && id->entry_bb == NULL
202 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 	{
204 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
205 	  gimple *def_temp;
206 	  gimple_stmt_iterator gsi;
207 	  tree val = SSA_NAME_VAR (name);
208 
209 	  n = id->decl_map->get (val);
210 	  if (n != NULL)
211 	    val = *n;
212 	  if (TREE_CODE (val) != PARM_DECL)
213 	    {
214 	      processing_debug_stmt = -1;
215 	      return name;
216 	    }
217 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 	  DECL_ARTIFICIAL (vexpr) = 1;
219 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
220 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 	  return vexpr;
224 	}
225 
226       processing_debug_stmt = -1;
227       return name;
228     }
229 
230   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
231   var = SSA_NAME_VAR (name);
232   if (!var
233       || (!SSA_NAME_IS_DEFAULT_DEF (name)
234 	  && VAR_P (var)
235 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236 	  && DECL_ARTIFICIAL (var)
237 	  && DECL_IGNORED_P (var)
238 	  && !DECL_NAME (var)))
239     {
240       struct ptr_info_def *pi;
241       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242       if (!var && SSA_NAME_IDENTIFIER (name))
243 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244       insert_decl_map (id, name, new_tree);
245       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247       /* At least IPA points-to info can be directly transferred.  */
248       if (id->src_cfun->gimple_df
249 	  && id->src_cfun->gimple_df->ipa_pta
250 	  && POINTER_TYPE_P (TREE_TYPE (name))
251 	  && (pi = SSA_NAME_PTR_INFO (name))
252 	  && !pi->pt.anything)
253 	{
254 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255 	  new_pi->pt = pi->pt;
256 	}
257       return new_tree;
258     }
259 
260   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261      in copy_bb.  */
262   new_tree = remap_decl (var, id);
263 
264   /* We might've substituted constant or another SSA_NAME for
265      the variable.
266 
267      Replace the SSA name representing RESULT_DECL by variable during
268      inlining:  this saves us from need to introduce PHI node in a case
269      return value is just partly initialized.  */
270   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271       && (!SSA_NAME_VAR (name)
272 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273 	  || !id->transform_return_to_modify))
274     {
275       struct ptr_info_def *pi;
276       new_tree = make_ssa_name (new_tree);
277       insert_decl_map (id, name, new_tree);
278       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280       /* At least IPA points-to info can be directly transferred.  */
281       if (id->src_cfun->gimple_df
282 	  && id->src_cfun->gimple_df->ipa_pta
283 	  && POINTER_TYPE_P (TREE_TYPE (name))
284 	  && (pi = SSA_NAME_PTR_INFO (name))
285 	  && !pi->pt.anything)
286 	{
287 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 	  new_pi->pt = pi->pt;
289 	}
290       if (SSA_NAME_IS_DEFAULT_DEF (name))
291 	{
292 	  /* By inlining function having uninitialized variable, we might
293 	     extend the lifetime (variable might get reused).  This cause
294 	     ICE in the case we end up extending lifetime of SSA name across
295 	     abnormal edge, but also increase register pressure.
296 
297 	     We simply initialize all uninitialized vars by 0 except
298 	     for case we are inlining to very first BB.  We can avoid
299 	     this for all BBs that are not inside strongly connected
300 	     regions of the CFG, but this is expensive to test.  */
301 	  if (id->entry_bb
302 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303 	      && (!SSA_NAME_VAR (name)
304 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306 					     0)->dest
307 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
308 	    {
309 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310 	      gimple *init_stmt;
311 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
312 
313 	      init_stmt = gimple_build_assign (new_tree, zero);
314 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 	    }
317 	  else
318 	    {
319 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321 	    }
322 	}
323     }
324   else
325     insert_decl_map (id, name, new_tree);
326   return new_tree;
327 }
328 
329 /* Remap DECL during the copying of the BLOCK tree for the function.  */
330 
331 tree
332 remap_decl (tree decl, copy_body_data *id)
333 {
334   tree *n;
335 
336   /* We only remap local variables in the current function.  */
337 
338   /* See if we have remapped this declaration.  */
339 
340   n = id->decl_map->get (decl);
341 
342   if (!n && processing_debug_stmt)
343     {
344       processing_debug_stmt = -1;
345       return decl;
346     }
347 
348   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349      necessary DECLs have already been remapped and we do not want to duplicate
350      a decl coming from outside of the sequence we are copying.  */
351   if (!n
352       && id->prevent_decl_creation_for_types
353       && id->remapping_type_depth > 0
354       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355     return decl;
356 
357   /* If we didn't already have an equivalent for this declaration, create one
358      now.  */
359   if (!n)
360     {
361       /* Make a copy of the variable or label.  */
362       tree t = id->copy_decl (decl, id);
363 
364       /* Remember it, so that if we encounter this local entity again
365 	 we can reuse this copy.  Do this early because remap_type may
366 	 need this decl for TYPE_STUB_DECL.  */
367       insert_decl_map (id, decl, t);
368 
369       if (!DECL_P (t))
370 	return t;
371 
372       /* Remap types, if necessary.  */
373       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374       if (TREE_CODE (t) == TYPE_DECL)
375 	{
376 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377 
378 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 	     is not set on the TYPE_DECL, for example in LTO mode.  */
381 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 	    {
383 	      tree x = build_variant_type_copy (TREE_TYPE (t));
384 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386 	      DECL_ORIGINAL_TYPE (t) = x;
387 	    }
388 	}
389 
390       /* Remap sizes as necessary.  */
391       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393 
394       /* If fields, do likewise for offset and qualifier.  */
395       if (TREE_CODE (t) == FIELD_DECL)
396 	{
397 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 	}
401 
402       return t;
403     }
404 
405   if (id->do_not_unshare)
406     return *n;
407   else
408     return unshare_expr (*n);
409 }
410 
411 static tree
412 remap_type_1 (tree type, copy_body_data *id)
413 {
414   tree new_tree, t;
415 
416   /* We do need a copy.  build and register it now.  If this is a pointer or
417      reference type, remap the designated type and make a new pointer or
418      reference type.  */
419   if (TREE_CODE (type) == POINTER_TYPE)
420     {
421       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 					 TYPE_MODE (type),
423 					 TYPE_REF_CAN_ALIAS_ALL (type));
424       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 	new_tree = build_type_attribute_qual_variant (new_tree,
426 						      TYPE_ATTRIBUTES (type),
427 						      TYPE_QUALS (type));
428       insert_decl_map (id, type, new_tree);
429       return new_tree;
430     }
431   else if (TREE_CODE (type) == REFERENCE_TYPE)
432     {
433       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 					    TYPE_MODE (type),
435 					    TYPE_REF_CAN_ALIAS_ALL (type));
436       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 	new_tree = build_type_attribute_qual_variant (new_tree,
438 						      TYPE_ATTRIBUTES (type),
439 						      TYPE_QUALS (type));
440       insert_decl_map (id, type, new_tree);
441       return new_tree;
442     }
443   else
444     new_tree = copy_node (type);
445 
446   insert_decl_map (id, type, new_tree);
447 
448   /* This is a new type, not a copy of an old type.  Need to reassociate
449      variants.  We can handle everything except the main variant lazily.  */
450   t = TYPE_MAIN_VARIANT (type);
451   if (type != t)
452     {
453       t = remap_type (t, id);
454       TYPE_MAIN_VARIANT (new_tree) = t;
455       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456       TYPE_NEXT_VARIANT (t) = new_tree;
457     }
458   else
459     {
460       TYPE_MAIN_VARIANT (new_tree) = new_tree;
461       TYPE_NEXT_VARIANT (new_tree) = NULL;
462     }
463 
464   if (TYPE_STUB_DECL (type))
465     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466 
467   /* Lazily create pointer and reference types.  */
468   TYPE_POINTER_TO (new_tree) = NULL;
469   TYPE_REFERENCE_TO (new_tree) = NULL;
470 
471   /* Copy all types that may contain references to local variables; be sure to
472      preserve sharing in between type and its main variant when possible.  */
473   switch (TREE_CODE (new_tree))
474     {
475     case INTEGER_TYPE:
476     case REAL_TYPE:
477     case FIXED_POINT_TYPE:
478     case ENUMERAL_TYPE:
479     case BOOLEAN_TYPE:
480       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 	{
482 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484 
485 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 	}
488       else
489 	{
490 	  t = TYPE_MIN_VALUE (new_tree);
491 	  if (t && TREE_CODE (t) != INTEGER_CST)
492 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493 
494 	  t = TYPE_MAX_VALUE (new_tree);
495 	  if (t && TREE_CODE (t) != INTEGER_CST)
496 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 	}
498       return new_tree;
499 
500     case FUNCTION_TYPE:
501       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504       else
505         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509       else
510         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511       return new_tree;
512 
513     case ARRAY_TYPE:
514       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517       else
518 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519 
520       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 	{
522 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 	}
525       else
526 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527       break;
528 
529     case RECORD_TYPE:
530     case UNION_TYPE:
531     case QUAL_UNION_TYPE:
532       if (TYPE_MAIN_VARIANT (type) != type
533 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535       else
536 	{
537 	  tree f, nf = NULL;
538 
539 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 	    {
541 	      t = remap_decl (f, id);
542 	      DECL_CONTEXT (t) = new_tree;
543 	      DECL_CHAIN (t) = nf;
544 	      nf = t;
545 	    }
546 	  TYPE_FIELDS (new_tree) = nreverse (nf);
547 	}
548       break;
549 
550     case OFFSET_TYPE:
551     default:
552       /* Shouldn't have been thought variable sized.  */
553       gcc_unreachable ();
554     }
555 
556   /* All variants of type share the same size, so use the already remaped data.  */
557   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558     {
559       tree s = TYPE_SIZE (type);
560       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561       tree su = TYPE_SIZE_UNIT (type);
562       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565 			   || s == mvs);
566       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568 			   || su == mvsu);
569       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571     }
572   else
573     {
574       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576     }
577 
578   return new_tree;
579 }
580 
581 tree
582 remap_type (tree type, copy_body_data *id)
583 {
584   tree *node;
585   tree tmp;
586 
587   if (type == NULL)
588     return type;
589 
590   /* See if we have remapped this type.  */
591   node = id->decl_map->get (type);
592   if (node)
593     return *node;
594 
595   /* The type only needs remapping if it's variably modified.  */
596   if (! variably_modified_type_p (type, id->src_fn))
597     {
598       insert_decl_map (id, type, type);
599       return type;
600     }
601 
602   id->remapping_type_depth++;
603   tmp = remap_type_1 (type, id);
604   id->remapping_type_depth--;
605 
606   return tmp;
607 }
608 
609 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
610 
611 static bool
612 can_be_nonlocal (tree decl, copy_body_data *id)
613 {
614   /* We can not duplicate function decls.  */
615   if (TREE_CODE (decl) == FUNCTION_DECL)
616     return true;
617 
618   /* Local static vars must be non-local or we get multiple declaration
619      problems.  */
620   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
621     return true;
622 
623   return false;
624 }
625 
626 static tree
627 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
628 	     copy_body_data *id)
629 {
630   tree old_var;
631   tree new_decls = NULL_TREE;
632 
633   /* Remap its variables.  */
634   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635     {
636       tree new_var;
637 
638       if (can_be_nonlocal (old_var, id))
639 	{
640 	  /* We need to add this variable to the local decls as otherwise
641 	     nothing else will do so.  */
642 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
643 	    add_local_decl (cfun, old_var);
644 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
645 	      && !DECL_IGNORED_P (old_var)
646 	      && nonlocalized_list)
647 	    vec_safe_push (*nonlocalized_list, old_var);
648 	  continue;
649 	}
650 
651       /* Remap the variable.  */
652       new_var = remap_decl (old_var, id);
653 
654       /* If we didn't remap this variable, we can't mess with its
655 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
656 	 already declared somewhere else, so don't declare it here.  */
657 
658       if (new_var == id->retvar)
659 	;
660       else if (!new_var)
661         {
662 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
663 	      && !DECL_IGNORED_P (old_var)
664 	      && nonlocalized_list)
665 	    vec_safe_push (*nonlocalized_list, old_var);
666 	}
667       else
668 	{
669 	  gcc_assert (DECL_P (new_var));
670 	  DECL_CHAIN (new_var) = new_decls;
671 	  new_decls = new_var;
672 
673 	  /* Also copy value-expressions.  */
674 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675 	    {
676 	      tree tem = DECL_VALUE_EXPR (new_var);
677 	      bool old_regimplify = id->regimplify;
678 	      id->remapping_type_depth++;
679 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
680 	      id->remapping_type_depth--;
681 	      id->regimplify = old_regimplify;
682 	      SET_DECL_VALUE_EXPR (new_var, tem);
683 	    }
684 	}
685     }
686 
687   return nreverse (new_decls);
688 }
689 
690 /* Copy the BLOCK to contain remapped versions of the variables
691    therein.  And hook the new block into the block-tree.  */
692 
693 static void
694 remap_block (tree *block, copy_body_data *id)
695 {
696   tree old_block;
697   tree new_block;
698 
699   /* Make the new block.  */
700   old_block = *block;
701   new_block = make_node (BLOCK);
702   TREE_USED (new_block) = TREE_USED (old_block);
703   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705   BLOCK_NONLOCALIZED_VARS (new_block)
706     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707   *block = new_block;
708 
709   /* Remap its variables.  */
710   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711   					&BLOCK_NONLOCALIZED_VARS (new_block),
712 					id);
713 
714   if (id->transform_lang_insert_block)
715     id->transform_lang_insert_block (new_block);
716 
717   /* Remember the remapped block.  */
718   insert_decl_map (id, old_block, new_block);
719 }
720 
721 /* Copy the whole block tree and root it in id->block.  */
722 static tree
723 remap_blocks (tree block, copy_body_data *id)
724 {
725   tree t;
726   tree new_tree = block;
727 
728   if (!block)
729     return NULL;
730 
731   remap_block (&new_tree, id);
732   gcc_assert (new_tree != block);
733   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734     prepend_lexical_block (new_tree, remap_blocks (t, id));
735   /* Blocks are in arbitrary order, but make things slightly prettier and do
736      not swap order when producing a copy.  */
737   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738   return new_tree;
739 }
740 
741 /* Remap the block tree rooted at BLOCK to nothing.  */
742 static void
743 remap_blocks_to_null (tree block, copy_body_data *id)
744 {
745   tree t;
746   insert_decl_map (id, block, NULL_TREE);
747   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
748     remap_blocks_to_null (t, id);
749 }
750 
751 static void
752 copy_statement_list (tree *tp)
753 {
754   tree_stmt_iterator oi, ni;
755   tree new_tree;
756 
757   new_tree = alloc_stmt_list ();
758   ni = tsi_start (new_tree);
759   oi = tsi_start (*tp);
760   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
761   *tp = new_tree;
762 
763   for (; !tsi_end_p (oi); tsi_next (&oi))
764     {
765       tree stmt = tsi_stmt (oi);
766       if (TREE_CODE (stmt) == STATEMENT_LIST)
767 	/* This copy is not redundant; tsi_link_after will smash this
768 	   STATEMENT_LIST into the end of the one we're building, and we
769 	   don't want to do that with the original.  */
770 	copy_statement_list (&stmt);
771       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
772     }
773 }
774 
775 static void
776 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
777 {
778   tree block = BIND_EXPR_BLOCK (*tp);
779   /* Copy (and replace) the statement.  */
780   copy_tree_r (tp, walk_subtrees, NULL);
781   if (block)
782     {
783       remap_block (&block, id);
784       BIND_EXPR_BLOCK (*tp) = block;
785     }
786 
787   if (BIND_EXPR_VARS (*tp))
788     /* This will remap a lot of the same decls again, but this should be
789        harmless.  */
790     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
791 }
792 
793 
794 /* Create a new gimple_seq by remapping all the statements in BODY
795    using the inlining information in ID.  */
796 
797 static gimple_seq
798 remap_gimple_seq (gimple_seq body, copy_body_data *id)
799 {
800   gimple_stmt_iterator si;
801   gimple_seq new_body = NULL;
802 
803   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
804     {
805       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
806       gimple_seq_add_seq (&new_body, new_stmts);
807     }
808 
809   return new_body;
810 }
811 
812 
813 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814    block using the mapping information in ID.  */
815 
816 static gimple *
817 copy_gimple_bind (gbind *stmt, copy_body_data *id)
818 {
819   gimple *new_bind;
820   tree new_block, new_vars;
821   gimple_seq body, new_body;
822 
823   /* Copy the statement.  Note that we purposely don't use copy_stmt
824      here because we need to remap statements as we copy.  */
825   body = gimple_bind_body (stmt);
826   new_body = remap_gimple_seq (body, id);
827 
828   new_block = gimple_bind_block (stmt);
829   if (new_block)
830     remap_block (&new_block, id);
831 
832   /* This will remap a lot of the same decls again, but this should be
833      harmless.  */
834   new_vars = gimple_bind_vars (stmt);
835   if (new_vars)
836     new_vars = remap_decls (new_vars, NULL, id);
837 
838   new_bind = gimple_build_bind (new_vars, new_body, new_block);
839 
840   return new_bind;
841 }
842 
843 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
844 
845 static bool
846 is_parm (tree decl)
847 {
848   if (TREE_CODE (decl) == SSA_NAME)
849     {
850       decl = SSA_NAME_VAR (decl);
851       if (!decl)
852 	return false;
853     }
854 
855   return (TREE_CODE (decl) == PARM_DECL);
856 }
857 
858 /* Remap the dependence CLIQUE from the source to the destination function
859    as specified in ID.  */
860 
861 static unsigned short
862 remap_dependence_clique (copy_body_data *id, unsigned short clique)
863 {
864   if (clique == 0 || processing_debug_stmt)
865     return 0;
866   if (!id->dependence_map)
867     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
868   bool existed;
869   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
870   if (!existed)
871     newc = ++cfun->last_clique;
872   return newc;
873 }
874 
875 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
876    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
877    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
878    recursing into the children nodes of *TP.  */
879 
880 static tree
881 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
882 {
883   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
884   copy_body_data *id = (copy_body_data *) wi_p->info;
885   tree fn = id->src_fn;
886 
887   /* For recursive invocations this is no longer the LHS itself.  */
888   bool is_lhs = wi_p->is_lhs;
889   wi_p->is_lhs = false;
890 
891   if (TREE_CODE (*tp) == SSA_NAME)
892     {
893       *tp = remap_ssa_name (*tp, id);
894       *walk_subtrees = 0;
895       if (is_lhs)
896 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
897       return NULL;
898     }
899   else if (auto_var_in_fn_p (*tp, fn))
900     {
901       /* Local variables and labels need to be replaced by equivalent
902 	 variables.  We don't want to copy static variables; there's
903 	 only one of those, no matter how many times we inline the
904 	 containing function.  Similarly for globals from an outer
905 	 function.  */
906       tree new_decl;
907 
908       /* Remap the declaration.  */
909       new_decl = remap_decl (*tp, id);
910       gcc_assert (new_decl);
911       /* Replace this variable with the copy.  */
912       STRIP_TYPE_NOPS (new_decl);
913       /* ???  The C++ frontend uses void * pointer zero to initialize
914          any other type.  This confuses the middle-end type verification.
915 	 As cloned bodies do not go through gimplification again the fixup
916 	 there doesn't trigger.  */
917       if (TREE_CODE (new_decl) == INTEGER_CST
918 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
919 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
920       *tp = new_decl;
921       *walk_subtrees = 0;
922     }
923   else if (TREE_CODE (*tp) == STATEMENT_LIST)
924     gcc_unreachable ();
925   else if (TREE_CODE (*tp) == SAVE_EXPR)
926     gcc_unreachable ();
927   else if (TREE_CODE (*tp) == LABEL_DECL
928 	   && (!DECL_CONTEXT (*tp)
929 	       || decl_function_context (*tp) == id->src_fn))
930     /* These may need to be remapped for EH handling.  */
931     *tp = remap_decl (*tp, id);
932   else if (TREE_CODE (*tp) == FIELD_DECL)
933     {
934       /* If the enclosing record type is variably_modified_type_p, the field
935 	 has already been remapped.  Otherwise, it need not be.  */
936       tree *n = id->decl_map->get (*tp);
937       if (n)
938 	*tp = *n;
939       *walk_subtrees = 0;
940     }
941   else if (TYPE_P (*tp))
942     /* Types may need remapping as well.  */
943     *tp = remap_type (*tp, id);
944   else if (CONSTANT_CLASS_P (*tp))
945     {
946       /* If this is a constant, we have to copy the node iff the type
947 	 will be remapped.  copy_tree_r will not copy a constant.  */
948       tree new_type = remap_type (TREE_TYPE (*tp), id);
949 
950       if (new_type == TREE_TYPE (*tp))
951 	*walk_subtrees = 0;
952 
953       else if (TREE_CODE (*tp) == INTEGER_CST)
954 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
955       else
956 	{
957 	  *tp = copy_node (*tp);
958 	  TREE_TYPE (*tp) = new_type;
959 	}
960     }
961   else
962     {
963       /* Otherwise, just copy the node.  Note that copy_tree_r already
964 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
965 
966       if (TREE_CODE (*tp) == MEM_REF)
967 	{
968 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
969 	     that can happen when a pointer argument is an ADDR_EXPR.
970 	     Recurse here manually to allow that.  */
971 	  tree ptr = TREE_OPERAND (*tp, 0);
972 	  tree type = remap_type (TREE_TYPE (*tp), id);
973 	  tree old = *tp;
974 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
975 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
976 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
977 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
978 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
979 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
980 	    {
981 	      MR_DEPENDENCE_CLIQUE (*tp)
982 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
983 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
984 	    }
985 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
986 	     remapped a parameter as the property might be valid only
987 	     for the parameter itself.  */
988 	  if (TREE_THIS_NOTRAP (old)
989 	      && (!is_parm (TREE_OPERAND (old, 0))
990 		  || (!id->transform_parameter && is_parm (ptr))))
991 	    TREE_THIS_NOTRAP (*tp) = 1;
992 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
993 	  *walk_subtrees = 0;
994 	  return NULL;
995 	}
996 
997       /* Here is the "usual case".  Copy this tree node, and then
998 	 tweak some special cases.  */
999       copy_tree_r (tp, walk_subtrees, NULL);
1000 
1001       if (TREE_CODE (*tp) != OMP_CLAUSE)
1002 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1003 
1004       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1005 	{
1006 	  /* The copied TARGET_EXPR has never been expanded, even if the
1007 	     original node was expanded already.  */
1008 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1009 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1010 	}
1011       else if (TREE_CODE (*tp) == ADDR_EXPR)
1012 	{
1013 	  /* Variable substitution need not be simple.  In particular,
1014 	     the MEM_REF substitution above.  Make sure that
1015 	     TREE_CONSTANT and friends are up-to-date.  */
1016 	  int invariant = is_gimple_min_invariant (*tp);
1017 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1018 	  recompute_tree_invariant_for_addr_expr (*tp);
1019 
1020 	  /* If this used to be invariant, but is not any longer,
1021 	     then regimplification is probably needed.  */
1022 	  if (invariant && !is_gimple_min_invariant (*tp))
1023 	    id->regimplify = true;
1024 
1025 	  *walk_subtrees = 0;
1026 	}
1027     }
1028 
1029   /* Update the TREE_BLOCK for the cloned expr.  */
1030   if (EXPR_P (*tp))
1031     {
1032       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1033       tree old_block = TREE_BLOCK (*tp);
1034       if (old_block)
1035 	{
1036 	  tree *n;
1037 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1038 	  if (n)
1039 	    new_block = *n;
1040 	}
1041       TREE_SET_BLOCK (*tp, new_block);
1042     }
1043 
1044   /* Keep iterating.  */
1045   return NULL_TREE;
1046 }
1047 
1048 
1049 /* Called from copy_body_id via walk_tree.  DATA is really a
1050    `copy_body_data *'.  */
1051 
1052 tree
1053 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1054 {
1055   copy_body_data *id = (copy_body_data *) data;
1056   tree fn = id->src_fn;
1057   tree new_block;
1058 
1059   /* Begin by recognizing trees that we'll completely rewrite for the
1060      inlining context.  Our output for these trees is completely
1061      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1062      into an edge).  Further down, we'll handle trees that get
1063      duplicated and/or tweaked.  */
1064 
1065   /* When requested, RETURN_EXPRs should be transformed to just the
1066      contained MODIFY_EXPR.  The branch semantics of the return will
1067      be handled elsewhere by manipulating the CFG rather than a statement.  */
1068   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1069     {
1070       tree assignment = TREE_OPERAND (*tp, 0);
1071 
1072       /* If we're returning something, just turn that into an
1073 	 assignment into the equivalent of the original RESULT_DECL.
1074 	 If the "assignment" is just the result decl, the result
1075 	 decl has already been set (e.g. a recent "foo (&result_decl,
1076 	 ...)"); just toss the entire RETURN_EXPR.  */
1077       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1078 	{
1079 	  /* Replace the RETURN_EXPR with (a copy of) the
1080 	     MODIFY_EXPR hanging underneath.  */
1081 	  *tp = copy_node (assignment);
1082 	}
1083       else /* Else the RETURN_EXPR returns no value.  */
1084 	{
1085 	  *tp = NULL;
1086 	  return (tree) (void *)1;
1087 	}
1088     }
1089   else if (TREE_CODE (*tp) == SSA_NAME)
1090     {
1091       *tp = remap_ssa_name (*tp, id);
1092       *walk_subtrees = 0;
1093       return NULL;
1094     }
1095 
1096   /* Local variables and labels need to be replaced by equivalent
1097      variables.  We don't want to copy static variables; there's only
1098      one of those, no matter how many times we inline the containing
1099      function.  Similarly for globals from an outer function.  */
1100   else if (auto_var_in_fn_p (*tp, fn))
1101     {
1102       tree new_decl;
1103 
1104       /* Remap the declaration.  */
1105       new_decl = remap_decl (*tp, id);
1106       gcc_assert (new_decl);
1107       /* Replace this variable with the copy.  */
1108       STRIP_TYPE_NOPS (new_decl);
1109       *tp = new_decl;
1110       *walk_subtrees = 0;
1111     }
1112   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1113     copy_statement_list (tp);
1114   else if (TREE_CODE (*tp) == SAVE_EXPR
1115 	   || TREE_CODE (*tp) == TARGET_EXPR)
1116     remap_save_expr (tp, id->decl_map, walk_subtrees);
1117   else if (TREE_CODE (*tp) == LABEL_DECL
1118 	   && (! DECL_CONTEXT (*tp)
1119 	       || decl_function_context (*tp) == id->src_fn))
1120     /* These may need to be remapped for EH handling.  */
1121     *tp = remap_decl (*tp, id);
1122   else if (TREE_CODE (*tp) == BIND_EXPR)
1123     copy_bind_expr (tp, walk_subtrees, id);
1124   /* Types may need remapping as well.  */
1125   else if (TYPE_P (*tp))
1126     *tp = remap_type (*tp, id);
1127 
1128   /* If this is a constant, we have to copy the node iff the type will be
1129      remapped.  copy_tree_r will not copy a constant.  */
1130   else if (CONSTANT_CLASS_P (*tp))
1131     {
1132       tree new_type = remap_type (TREE_TYPE (*tp), id);
1133 
1134       if (new_type == TREE_TYPE (*tp))
1135 	*walk_subtrees = 0;
1136 
1137       else if (TREE_CODE (*tp) == INTEGER_CST)
1138 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1139       else
1140 	{
1141 	  *tp = copy_node (*tp);
1142 	  TREE_TYPE (*tp) = new_type;
1143 	}
1144     }
1145 
1146   /* Otherwise, just copy the node.  Note that copy_tree_r already
1147      knows not to copy VAR_DECLs, etc., so this is safe.  */
1148   else
1149     {
1150       /* Here we handle trees that are not completely rewritten.
1151 	 First we detect some inlining-induced bogosities for
1152 	 discarding.  */
1153       if (TREE_CODE (*tp) == MODIFY_EXPR
1154 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1155 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1156 	{
1157 	  /* Some assignments VAR = VAR; don't generate any rtl code
1158 	     and thus don't count as variable modification.  Avoid
1159 	     keeping bogosities like 0 = 0.  */
1160 	  tree decl = TREE_OPERAND (*tp, 0), value;
1161 	  tree *n;
1162 
1163 	  n = id->decl_map->get (decl);
1164 	  if (n)
1165 	    {
1166 	      value = *n;
1167 	      STRIP_TYPE_NOPS (value);
1168 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1169 		{
1170 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1171 		  return copy_tree_body_r (tp, walk_subtrees, data);
1172 		}
1173 	    }
1174 	}
1175       else if (TREE_CODE (*tp) == INDIRECT_REF)
1176 	{
1177 	  /* Get rid of *& from inline substitutions that can happen when a
1178 	     pointer argument is an ADDR_EXPR.  */
1179 	  tree decl = TREE_OPERAND (*tp, 0);
1180 	  tree *n = id->decl_map->get (decl);
1181 	  if (n)
1182 	    {
1183 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1184 	         it manually here as we'll eventually get ADDR_EXPRs
1185 		 which lie about their types pointed to.  In this case
1186 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1187 		 but we absolutely rely on that.  As fold_indirect_ref
1188 	         does other useful transformations, try that first, though.  */
1189 	      tree type = TREE_TYPE (*tp);
1190 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1191 	      tree old = *tp;
1192 	      *tp = gimple_fold_indirect_ref (ptr);
1193 	      if (! *tp)
1194 	        {
1195 		  type = remap_type (type, id);
1196 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1197 		    {
1198 		      *tp
1199 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1200 		      /* ???  We should either assert here or build
1201 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1202 			 incompatible types to our IL.  */
1203 		      if (! *tp)
1204 			*tp = TREE_OPERAND (ptr, 0);
1205 		    }
1206 	          else
1207 		    {
1208 	              *tp = build1 (INDIRECT_REF, type, ptr);
1209 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1210 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1211 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1212 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1213 			 have remapped a parameter as the property might be
1214 			 valid only for the parameter itself.  */
1215 		      if (TREE_THIS_NOTRAP (old)
1216 			  && (!is_parm (TREE_OPERAND (old, 0))
1217 			      || (!id->transform_parameter && is_parm (ptr))))
1218 		        TREE_THIS_NOTRAP (*tp) = 1;
1219 		    }
1220 		}
1221 	      *walk_subtrees = 0;
1222 	      return NULL;
1223 	    }
1224 	}
1225       else if (TREE_CODE (*tp) == MEM_REF)
1226 	{
1227 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1228 	     that can happen when a pointer argument is an ADDR_EXPR.
1229 	     Recurse here manually to allow that.  */
1230 	  tree ptr = TREE_OPERAND (*tp, 0);
1231 	  tree type = remap_type (TREE_TYPE (*tp), id);
1232 	  tree old = *tp;
1233 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1234 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1235 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1236 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1237 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1238 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1239 	    {
1240 	      MR_DEPENDENCE_CLIQUE (*tp)
1241 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1242 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1243 	    }
1244 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1245 	     remapped a parameter as the property might be valid only
1246 	     for the parameter itself.  */
1247 	  if (TREE_THIS_NOTRAP (old)
1248 	      && (!is_parm (TREE_OPERAND (old, 0))
1249 		  || (!id->transform_parameter && is_parm (ptr))))
1250 	    TREE_THIS_NOTRAP (*tp) = 1;
1251 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1252 	  *walk_subtrees = 0;
1253 	  return NULL;
1254 	}
1255 
1256       /* Here is the "usual case".  Copy this tree node, and then
1257 	 tweak some special cases.  */
1258       copy_tree_r (tp, walk_subtrees, NULL);
1259 
1260       /* If EXPR has block defined, map it to newly constructed block.
1261          When inlining we want EXPRs without block appear in the block
1262 	 of function call if we are not remapping a type.  */
1263       if (EXPR_P (*tp))
1264 	{
1265 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1266 	  if (TREE_BLOCK (*tp))
1267 	    {
1268 	      tree *n;
1269 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1270 	      if (n)
1271 		new_block = *n;
1272 	    }
1273 	  TREE_SET_BLOCK (*tp, new_block);
1274 	}
1275 
1276       if (TREE_CODE (*tp) != OMP_CLAUSE)
1277 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1278 
1279       /* The copied TARGET_EXPR has never been expanded, even if the
1280 	 original node was expanded already.  */
1281       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1282 	{
1283 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1284 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1285 	}
1286 
1287       /* Variable substitution need not be simple.  In particular, the
1288 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1289 	 and friends are up-to-date.  */
1290       else if (TREE_CODE (*tp) == ADDR_EXPR)
1291 	{
1292 	  int invariant = is_gimple_min_invariant (*tp);
1293 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1294 
1295 	  /* Handle the case where we substituted an INDIRECT_REF
1296 	     into the operand of the ADDR_EXPR.  */
1297 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1298 	    {
1299 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1300 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1301 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1302 	      *tp = t;
1303 	    }
1304 	  else
1305 	    recompute_tree_invariant_for_addr_expr (*tp);
1306 
1307 	  /* If this used to be invariant, but is not any longer,
1308 	     then regimplification is probably needed.  */
1309 	  if (invariant && !is_gimple_min_invariant (*tp))
1310 	    id->regimplify = true;
1311 
1312 	  *walk_subtrees = 0;
1313 	}
1314     }
1315 
1316   /* Keep iterating.  */
1317   return NULL_TREE;
1318 }
1319 
1320 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1321    source function, map that to the duplicate EH region number in
1322    the destination function.  */
1323 
1324 static int
1325 remap_eh_region_nr (int old_nr, copy_body_data *id)
1326 {
1327   eh_region old_r, new_r;
1328 
1329   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1330   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1331 
1332   return new_r->index;
1333 }
1334 
1335 /* Similar, but operate on INTEGER_CSTs.  */
1336 
1337 static tree
1338 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1339 {
1340   int old_nr, new_nr;
1341 
1342   old_nr = tree_to_shwi (old_t_nr);
1343   new_nr = remap_eh_region_nr (old_nr, id);
1344 
1345   return build_int_cst (integer_type_node, new_nr);
1346 }
1347 
1348 /* Helper for copy_bb.  Remap statement STMT using the inlining
1349    information in ID.  Return the new statement copy.  */
1350 
1351 static gimple_seq
1352 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1353 {
1354   gimple *copy = NULL;
1355   struct walk_stmt_info wi;
1356   bool skip_first = false;
1357   gimple_seq stmts = NULL;
1358 
1359   if (is_gimple_debug (stmt)
1360       && (gimple_debug_nonbind_marker_p (stmt)
1361 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1362 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1363     return stmts;
1364 
1365   /* Begin by recognizing trees that we'll completely rewrite for the
1366      inlining context.  Our output for these trees is completely
1367      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1368      into an edge).  Further down, we'll handle trees that get
1369      duplicated and/or tweaked.  */
1370 
1371   /* When requested, GIMPLE_RETURNs should be transformed to just the
1372      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1373      be handled elsewhere by manipulating the CFG rather than the
1374      statement.  */
1375   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1376     {
1377       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1378       tree retbnd = gimple_return_retbnd (stmt);
1379       tree bndslot = id->retbnd;
1380 
1381       if (retbnd && bndslot)
1382 	{
1383 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1384 	  memset (&wi, 0, sizeof (wi));
1385 	  wi.info = id;
1386 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1387 	  gimple_seq_add_stmt (&stmts, bndcopy);
1388 	}
1389 
1390       /* If we're returning something, just turn that into an
1391 	 assignment into the equivalent of the original RESULT_DECL.
1392 	 If RETVAL is just the result decl, the result decl has
1393 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1394 	 just toss the entire GIMPLE_RETURN.  */
1395       if (retval
1396 	  && (TREE_CODE (retval) != RESULT_DECL
1397 	      && (TREE_CODE (retval) != SSA_NAME
1398 		  || ! SSA_NAME_VAR (retval)
1399 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1400         {
1401 	  copy = gimple_build_assign (id->do_not_unshare
1402 				      ? id->retvar : unshare_expr (id->retvar),
1403 				      retval);
1404 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1405 	  skip_first = true;
1406 
1407 	  /* We need to copy bounds if return structure with pointers into
1408 	     instrumented function.  */
1409 	  if (chkp_function_instrumented_p (id->dst_fn)
1410 	      && !bndslot
1411 	      && !BOUNDED_P (id->retvar)
1412 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1413 	    id->assign_stmts.safe_push (copy);
1414 
1415 	}
1416       else
1417 	return stmts;
1418     }
1419   else if (gimple_has_substatements (stmt))
1420     {
1421       gimple_seq s1, s2;
1422 
1423       /* When cloning bodies from the C++ front end, we will be handed bodies
1424 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1425 	 have embedded statements.  */
1426       switch (gimple_code (stmt))
1427 	{
1428 	case GIMPLE_BIND:
1429 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1430 	  break;
1431 
1432 	case GIMPLE_CATCH:
1433 	  {
1434 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1435 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1436 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1437 	  }
1438 	  break;
1439 
1440 	case GIMPLE_EH_FILTER:
1441 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1442 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1443 	  break;
1444 
1445 	case GIMPLE_TRY:
1446 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1447 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1448 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1449 	  break;
1450 
1451 	case GIMPLE_WITH_CLEANUP_EXPR:
1452 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1453 	  copy = gimple_build_wce (s1);
1454 	  break;
1455 
1456 	case GIMPLE_OMP_PARALLEL:
1457 	  {
1458 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1459 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1460 	    copy = gimple_build_omp_parallel
1461 	             (s1,
1462 		      gimple_omp_parallel_clauses (omp_par_stmt),
1463 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1464 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1465 	  }
1466 	  break;
1467 
1468 	case GIMPLE_OMP_TASK:
1469 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1470 	  copy = gimple_build_omp_task
1471 	           (s1,
1472 		    gimple_omp_task_clauses (stmt),
1473 		    gimple_omp_task_child_fn (stmt),
1474 		    gimple_omp_task_data_arg (stmt),
1475 		    gimple_omp_task_copy_fn (stmt),
1476 		    gimple_omp_task_arg_size (stmt),
1477 		    gimple_omp_task_arg_align (stmt));
1478 	  break;
1479 
1480 	case GIMPLE_OMP_FOR:
1481 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1482 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1483 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1484 				       gimple_omp_for_clauses (stmt),
1485 				       gimple_omp_for_collapse (stmt), s2);
1486 	  {
1487 	    size_t i;
1488 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1489 	      {
1490 		gimple_omp_for_set_index (copy, i,
1491 					  gimple_omp_for_index (stmt, i));
1492 		gimple_omp_for_set_initial (copy, i,
1493 					    gimple_omp_for_initial (stmt, i));
1494 		gimple_omp_for_set_final (copy, i,
1495 					  gimple_omp_for_final (stmt, i));
1496 		gimple_omp_for_set_incr (copy, i,
1497 					 gimple_omp_for_incr (stmt, i));
1498 		gimple_omp_for_set_cond (copy, i,
1499 					 gimple_omp_for_cond (stmt, i));
1500 	      }
1501 	  }
1502 	  break;
1503 
1504 	case GIMPLE_OMP_MASTER:
1505 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 	  copy = gimple_build_omp_master (s1);
1507 	  break;
1508 
1509 	case GIMPLE_OMP_TASKGROUP:
1510 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1511 	  copy = gimple_build_omp_taskgroup (s1);
1512 	  break;
1513 
1514 	case GIMPLE_OMP_ORDERED:
1515 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1516 	  copy = gimple_build_omp_ordered
1517 		   (s1,
1518 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1519 	  break;
1520 
1521 	case GIMPLE_OMP_SECTION:
1522 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 	  copy = gimple_build_omp_section (s1);
1524 	  break;
1525 
1526 	case GIMPLE_OMP_SECTIONS:
1527 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1528 	  copy = gimple_build_omp_sections
1529 	           (s1, gimple_omp_sections_clauses (stmt));
1530 	  break;
1531 
1532 	case GIMPLE_OMP_SINGLE:
1533 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 	  copy = gimple_build_omp_single
1535 	           (s1, gimple_omp_single_clauses (stmt));
1536 	  break;
1537 
1538 	case GIMPLE_OMP_TARGET:
1539 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540 	  copy = gimple_build_omp_target
1541 		   (s1, gimple_omp_target_kind (stmt),
1542 		    gimple_omp_target_clauses (stmt));
1543 	  break;
1544 
1545 	case GIMPLE_OMP_TEAMS:
1546 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 	  copy = gimple_build_omp_teams
1548 		   (s1, gimple_omp_teams_clauses (stmt));
1549 	  break;
1550 
1551 	case GIMPLE_OMP_CRITICAL:
1552 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1553 	  copy = gimple_build_omp_critical (s1,
1554 					    gimple_omp_critical_name
1555 					      (as_a <gomp_critical *> (stmt)),
1556 					    gimple_omp_critical_clauses
1557 					      (as_a <gomp_critical *> (stmt)));
1558 	  break;
1559 
1560 	case GIMPLE_TRANSACTION:
1561 	  {
1562 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1563 	    gtransaction *new_trans_stmt;
1564 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1565 				   id);
1566 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1567 	    gimple_transaction_set_subcode (new_trans_stmt,
1568 	      gimple_transaction_subcode (old_trans_stmt));
1569 	    gimple_transaction_set_label_norm (new_trans_stmt,
1570 	      gimple_transaction_label_norm (old_trans_stmt));
1571 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1572 	      gimple_transaction_label_uninst (old_trans_stmt));
1573 	    gimple_transaction_set_label_over (new_trans_stmt,
1574 	      gimple_transaction_label_over (old_trans_stmt));
1575 	  }
1576 	  break;
1577 
1578 	default:
1579 	  gcc_unreachable ();
1580 	}
1581     }
1582   else
1583     {
1584       if (gimple_assign_copy_p (stmt)
1585 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1586 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1587 	{
1588 	  /* Here we handle statements that are not completely rewritten.
1589 	     First we detect some inlining-induced bogosities for
1590 	     discarding.  */
1591 
1592 	  /* Some assignments VAR = VAR; don't generate any rtl code
1593 	     and thus don't count as variable modification.  Avoid
1594 	     keeping bogosities like 0 = 0.  */
1595 	  tree decl = gimple_assign_lhs (stmt), value;
1596 	  tree *n;
1597 
1598 	  n = id->decl_map->get (decl);
1599 	  if (n)
1600 	    {
1601 	      value = *n;
1602 	      STRIP_TYPE_NOPS (value);
1603 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1604 		return NULL;
1605 	    }
1606 	}
1607 
1608       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1609 	 in a block that we aren't copying during tree_function_versioning,
1610 	 just drop the clobber stmt.  */
1611       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1612 	{
1613 	  tree lhs = gimple_assign_lhs (stmt);
1614 	  if (TREE_CODE (lhs) == MEM_REF
1615 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1616 	    {
1617 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1618 	      if (gimple_bb (def_stmt)
1619 		  && !bitmap_bit_p (id->blocks_to_copy,
1620 				    gimple_bb (def_stmt)->index))
1621 		return NULL;
1622 	    }
1623 	}
1624 
1625       if (gimple_debug_bind_p (stmt))
1626 	{
1627 	  gdebug *copy
1628 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1629 				       gimple_debug_bind_get_value (stmt),
1630 				       stmt);
1631 	  id->debug_stmts.safe_push (copy);
1632 	  gimple_seq_add_stmt (&stmts, copy);
1633 	  return stmts;
1634 	}
1635       if (gimple_debug_source_bind_p (stmt))
1636 	{
1637 	  gdebug *copy = gimple_build_debug_source_bind
1638 	                   (gimple_debug_source_bind_get_var (stmt),
1639 			    gimple_debug_source_bind_get_value (stmt),
1640 			    stmt);
1641 	  id->debug_stmts.safe_push (copy);
1642 	  gimple_seq_add_stmt (&stmts, copy);
1643 	  return stmts;
1644 	}
1645       if (gimple_debug_nonbind_marker_p (stmt))
1646 	{
1647 	  /* If the inlined function has too many debug markers,
1648 	     don't copy them.  */
1649 	  if (id->src_cfun->debug_marker_count
1650 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1651 	    return stmts;
1652 
1653 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1654 	  id->debug_stmts.safe_push (copy);
1655 	  gimple_seq_add_stmt (&stmts, copy);
1656 	  return stmts;
1657 	}
1658       gcc_checking_assert (!is_gimple_debug (stmt));
1659 
1660       /* Create a new deep copy of the statement.  */
1661       copy = gimple_copy (stmt);
1662 
1663       /* Clear flags that need revisiting.  */
1664       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1665         {
1666 	  if (gimple_call_tail_p (call_stmt))
1667 	    gimple_call_set_tail (call_stmt, false);
1668 	  if (gimple_call_from_thunk_p (call_stmt))
1669 	    gimple_call_set_from_thunk (call_stmt, false);
1670 	  if (gimple_call_internal_p (call_stmt))
1671 	    switch (gimple_call_internal_fn (call_stmt))
1672 	      {
1673 	      case IFN_GOMP_SIMD_LANE:
1674 	      case IFN_GOMP_SIMD_VF:
1675 	      case IFN_GOMP_SIMD_LAST_LANE:
1676 	      case IFN_GOMP_SIMD_ORDERED_START:
1677 	      case IFN_GOMP_SIMD_ORDERED_END:
1678 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1679 	        break;
1680 	      default:
1681 		break;
1682 	      }
1683 	}
1684 
1685       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1686 	 RESX and EH_DISPATCH.  */
1687       if (id->eh_map)
1688 	switch (gimple_code (copy))
1689 	  {
1690 	  case GIMPLE_CALL:
1691 	    {
1692 	      tree r, fndecl = gimple_call_fndecl (copy);
1693 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1694 		switch (DECL_FUNCTION_CODE (fndecl))
1695 		  {
1696 		  case BUILT_IN_EH_COPY_VALUES:
1697 		    r = gimple_call_arg (copy, 1);
1698 		    r = remap_eh_region_tree_nr (r, id);
1699 		    gimple_call_set_arg (copy, 1, r);
1700 		    /* FALLTHRU */
1701 
1702 		  case BUILT_IN_EH_POINTER:
1703 		  case BUILT_IN_EH_FILTER:
1704 		    r = gimple_call_arg (copy, 0);
1705 		    r = remap_eh_region_tree_nr (r, id);
1706 		    gimple_call_set_arg (copy, 0, r);
1707 		    break;
1708 
1709 		  default:
1710 		    break;
1711 		  }
1712 
1713 	      /* Reset alias info if we didn't apply measures to
1714 		 keep it valid over inlining by setting DECL_PT_UID.  */
1715 	      if (!id->src_cfun->gimple_df
1716 		  || !id->src_cfun->gimple_df->ipa_pta)
1717 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1718 	    }
1719 	    break;
1720 
1721 	  case GIMPLE_RESX:
1722 	    {
1723 	      gresx *resx_stmt = as_a <gresx *> (copy);
1724 	      int r = gimple_resx_region (resx_stmt);
1725 	      r = remap_eh_region_nr (r, id);
1726 	      gimple_resx_set_region (resx_stmt, r);
1727 	    }
1728 	    break;
1729 
1730 	  case GIMPLE_EH_DISPATCH:
1731 	    {
1732 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1733 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1734 	      r = remap_eh_region_nr (r, id);
1735 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1736 	    }
1737 	    break;
1738 
1739 	  default:
1740 	    break;
1741 	  }
1742     }
1743 
1744   /* If STMT has a block defined, map it to the newly constructed
1745      block.  */
1746   if (gimple_block (copy))
1747     {
1748       tree *n;
1749       n = id->decl_map->get (gimple_block (copy));
1750       gcc_assert (n);
1751       gimple_set_block (copy, *n);
1752     }
1753 
1754   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1755       || gimple_debug_nonbind_marker_p (copy))
1756     {
1757       gimple_seq_add_stmt (&stmts, copy);
1758       return stmts;
1759     }
1760 
1761   /* Remap all the operands in COPY.  */
1762   memset (&wi, 0, sizeof (wi));
1763   wi.info = id;
1764   if (skip_first)
1765     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1766   else
1767     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1768 
1769   /* Clear the copied virtual operands.  We are not remapping them here
1770      but are going to recreate them from scratch.  */
1771   if (gimple_has_mem_ops (copy))
1772     {
1773       gimple_set_vdef (copy, NULL_TREE);
1774       gimple_set_vuse (copy, NULL_TREE);
1775     }
1776 
1777   gimple_seq_add_stmt (&stmts, copy);
1778   return stmts;
1779 }
1780 
1781 
1782 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1783    later  */
1784 
1785 static basic_block
1786 copy_bb (copy_body_data *id, basic_block bb,
1787          profile_count num, profile_count den)
1788 {
1789   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1790   basic_block copy_basic_block;
1791   tree decl;
1792   basic_block prev;
1793 
1794   profile_count::adjust_for_ipa_scaling (&num, &den);
1795 
1796   /* Search for previous copied basic block.  */
1797   prev = bb->prev_bb;
1798   while (!prev->aux)
1799     prev = prev->prev_bb;
1800 
1801   /* create_basic_block() will append every new block to
1802      basic_block_info automatically.  */
1803   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1804   copy_basic_block->count = bb->count.apply_scale (num, den);
1805 
1806   copy_gsi = gsi_start_bb (copy_basic_block);
1807 
1808   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1809     {
1810       gimple_seq stmts;
1811       gimple *stmt = gsi_stmt (gsi);
1812       gimple *orig_stmt = stmt;
1813       gimple_stmt_iterator stmts_gsi;
1814       bool stmt_added = false;
1815 
1816       id->regimplify = false;
1817       stmts = remap_gimple_stmt (stmt, id);
1818 
1819       if (gimple_seq_empty_p (stmts))
1820 	continue;
1821 
1822       seq_gsi = copy_gsi;
1823 
1824       for (stmts_gsi = gsi_start (stmts);
1825 	   !gsi_end_p (stmts_gsi); )
1826 	{
1827 	  stmt = gsi_stmt (stmts_gsi);
1828 
1829 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1830 	  gsi_next (&stmts_gsi);
1831 
1832 	  if (gimple_nop_p (stmt))
1833 	      continue;
1834 
1835 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1836 					    orig_stmt);
1837 
1838 	  /* With return slot optimization we can end up with
1839 	     non-gimple (foo *)&this->m, fix that here.  */
1840 	  if (is_gimple_assign (stmt)
1841 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1842 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1843 	    {
1844 	      tree new_rhs;
1845 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1846 						  gimple_assign_rhs1 (stmt),
1847 						  true, NULL, false,
1848 						  GSI_CONTINUE_LINKING);
1849 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1850 	      id->regimplify = false;
1851 	    }
1852 
1853 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1854 
1855 	  if (id->regimplify)
1856 	    gimple_regimplify_operands (stmt, &seq_gsi);
1857 
1858 	  stmt_added = true;
1859 	}
1860 
1861       if (!stmt_added)
1862 	continue;
1863 
1864       /* If copy_basic_block has been empty at the start of this iteration,
1865 	 call gsi_start_bb again to get at the newly added statements.  */
1866       if (gsi_end_p (copy_gsi))
1867 	copy_gsi = gsi_start_bb (copy_basic_block);
1868       else
1869 	gsi_next (&copy_gsi);
1870 
1871       /* Process the new statement.  The call to gimple_regimplify_operands
1872 	 possibly turned the statement into multiple statements, we
1873 	 need to process all of them.  */
1874       do
1875 	{
1876 	  tree fn;
1877 	  gcall *call_stmt;
1878 
1879 	  stmt = gsi_stmt (copy_gsi);
1880 	  call_stmt = dyn_cast <gcall *> (stmt);
1881 	  if (call_stmt
1882 	      && gimple_call_va_arg_pack_p (call_stmt)
1883 	      && id->call_stmt
1884 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1885 	    {
1886 	      /* __builtin_va_arg_pack () should be replaced by
1887 		 all arguments corresponding to ... in the caller.  */
1888 	      tree p;
1889 	      gcall *new_call;
1890 	      vec<tree> argarray;
1891 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1892 	      size_t n, i, nargs_to_copy;
1893 	      bool remove_bounds = false;
1894 
1895 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1896 		nargs--;
1897 
1898 	      /* Bounds should be removed from arg pack in case
1899 		 we handle not instrumented call in instrumented
1900 		 function.  */
1901 	      nargs_to_copy = nargs;
1902 	      if (gimple_call_with_bounds_p (id->call_stmt)
1903 		  && !gimple_call_with_bounds_p (stmt))
1904 		{
1905 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1906 		       i < gimple_call_num_args (id->call_stmt);
1907 		       i++)
1908 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1909 		      nargs_to_copy--;
1910 		  remove_bounds = true;
1911 		}
1912 
1913 	      /* Create the new array of arguments.  */
1914 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
1915 	      argarray.create (n);
1916 	      argarray.safe_grow_cleared (n);
1917 
1918 	      /* Copy all the arguments before '...'  */
1919 	      memcpy (argarray.address (),
1920 		      gimple_call_arg_ptr (call_stmt, 0),
1921 		      gimple_call_num_args (call_stmt) * sizeof (tree));
1922 
1923 	      if (remove_bounds)
1924 		{
1925 		  /* Append the rest of arguments removing bounds.  */
1926 		  unsigned cur = gimple_call_num_args (call_stmt);
1927 		  i = gimple_call_num_args (id->call_stmt) - nargs;
1928 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
1929 		       i < gimple_call_num_args (id->call_stmt);
1930 		       i++)
1931 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1932 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1933 		  gcc_assert (cur == n);
1934 		}
1935 	      else
1936 		{
1937 		  /* Append the arguments passed in '...'  */
1938 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1939 			  gimple_call_arg_ptr (id->call_stmt, 0)
1940 			  + (gimple_call_num_args (id->call_stmt) - nargs),
1941 			  nargs * sizeof (tree));
1942 		}
1943 
1944 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1945 						argarray);
1946 
1947 	      argarray.release ();
1948 
1949 	      /* Copy all GIMPLE_CALL flags, location and block, except
1950 		 GF_CALL_VA_ARG_PACK.  */
1951 	      gimple_call_copy_flags (new_call, call_stmt);
1952 	      gimple_call_set_va_arg_pack (new_call, false);
1953 	      gimple_set_location (new_call, gimple_location (stmt));
1954 	      gimple_set_block (new_call, gimple_block (stmt));
1955 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1956 
1957 	      gsi_replace (&copy_gsi, new_call, false);
1958 	      stmt = new_call;
1959 	    }
1960 	  else if (call_stmt
1961 		   && id->call_stmt
1962 		   && (decl = gimple_call_fndecl (stmt))
1963 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1964 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1965 		   && ! gimple_call_va_arg_pack_p (id->call_stmt))
1966 	    {
1967 	      /* __builtin_va_arg_pack_len () should be replaced by
1968 		 the number of anonymous arguments.  */
1969 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
1970 	      tree count, p;
1971 	      gimple *new_stmt;
1972 
1973 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1974 		nargs--;
1975 
1976 	      /* For instrumented calls we should ignore bounds.  */
1977 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
1978 		   i < gimple_call_num_args (id->call_stmt);
1979 		   i++)
1980 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1981 		  nargs--;
1982 
1983 	      count = build_int_cst (integer_type_node, nargs);
1984 	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1985 	      gsi_replace (&copy_gsi, new_stmt, false);
1986 	      stmt = new_stmt;
1987 	    }
1988 	  else if (call_stmt
1989 		   && id->call_stmt
1990 		   && gimple_call_internal_p (stmt)
1991 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1992 	    {
1993 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
1994 	      gsi_remove (&copy_gsi, false);
1995 	      continue;
1996 	    }
1997 
1998 	  /* Statements produced by inlining can be unfolded, especially
1999 	     when we constant propagated some operands.  We can't fold
2000 	     them right now for two reasons:
2001 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2002 	     2) we can't change function calls to builtins.
2003 	     So we just mark statement for later folding.  We mark
2004 	     all new statements, instead just statements that has changed
2005 	     by some nontrivial substitution so even statements made
2006 	     foldable indirectly are updated.  If this turns out to be
2007 	     expensive, copy_body can be told to watch for nontrivial
2008 	     changes.  */
2009 	  if (id->statements_to_fold)
2010 	    id->statements_to_fold->add (stmt);
2011 
2012 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2013 	     callgraph edges and update or duplicate them.  */
2014 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2015 	    {
2016 	      struct cgraph_edge *edge;
2017 
2018 	      switch (id->transform_call_graph_edges)
2019 		{
2020 		case CB_CGE_DUPLICATE:
2021 		  edge = id->src_node->get_edge (orig_stmt);
2022 		  if (edge)
2023 		    {
2024 		      struct cgraph_edge *old_edge = edge;
2025 		      profile_count old_cnt = edge->count;
2026 		      edge = edge->clone (id->dst_node, call_stmt,
2027 					  gimple_uid (stmt),
2028 					  num, den,
2029 					  true);
2030 
2031 		      /* Speculative calls consist of two edges - direct and
2032 			 indirect.  Duplicate the whole thing and distribute
2033 			 frequencies accordingly.  */
2034 		      if (edge->speculative)
2035 			{
2036 			  struct cgraph_edge *direct, *indirect;
2037 			  struct ipa_ref *ref;
2038 
2039 			  gcc_assert (!edge->indirect_unknown_callee);
2040 			  old_edge->speculative_call_info (direct, indirect, ref);
2041 
2042 			  profile_count indir_cnt = indirect->count;
2043 			  indirect = indirect->clone (id->dst_node, call_stmt,
2044 						      gimple_uid (stmt),
2045 						      num, den,
2046 						      true);
2047 
2048 			  profile_probability prob
2049 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
2050 			  indirect->count
2051 			     = copy_basic_block->count.apply_probability (prob);
2052 			  edge->count = copy_basic_block->count - indirect->count;
2053 			  id->dst_node->clone_reference (ref, stmt);
2054 			}
2055 		      else
2056 			edge->count = copy_basic_block->count;
2057 		    }
2058 		  break;
2059 
2060 		case CB_CGE_MOVE_CLONES:
2061 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2062 								call_stmt);
2063 		  edge = id->dst_node->get_edge (stmt);
2064 		  break;
2065 
2066 		case CB_CGE_MOVE:
2067 		  edge = id->dst_node->get_edge (orig_stmt);
2068 		  if (edge)
2069 		    edge->set_call_stmt (call_stmt);
2070 		  break;
2071 
2072 		default:
2073 		  gcc_unreachable ();
2074 		}
2075 
2076 	      /* Constant propagation on argument done during inlining
2077 		 may create new direct call.  Produce an edge for it.  */
2078 	      if ((!edge
2079 		   || (edge->indirect_inlining_edge
2080 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2081 		  && id->dst_node->definition
2082 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2083 		{
2084 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2085 
2086 		  /* We have missing edge in the callgraph.  This can happen
2087 		     when previous inlining turned an indirect call into a
2088 		     direct call by constant propagating arguments or we are
2089 		     producing dead clone (for further cloning).  In all
2090 		     other cases we hit a bug (incorrect node sharing is the
2091 		     most common reason for missing edges).  */
2092 		  gcc_assert (!dest->definition
2093 			      || dest->address_taken
2094 		  	      || !id->src_node->definition
2095 			      || !id->dst_node->definition);
2096 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2097 		    id->dst_node->create_edge_including_clones
2098 		      (dest, orig_stmt, call_stmt, bb->count,
2099 		       CIF_ORIGINALLY_INDIRECT_CALL);
2100 		  else
2101 		    id->dst_node->create_edge (dest, call_stmt,
2102 					bb->count)->inline_failed
2103 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2104 		  if (dump_file)
2105 		    {
2106 		      fprintf (dump_file, "Created new direct edge to %s\n",
2107 			       dest->name ());
2108 		    }
2109 		}
2110 
2111 	      notice_special_calls (as_a <gcall *> (stmt));
2112 	    }
2113 
2114 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2115 				      id->eh_map, id->eh_lp_nr);
2116 
2117 	  gsi_next (&copy_gsi);
2118 	}
2119       while (!gsi_end_p (copy_gsi));
2120 
2121       copy_gsi = gsi_last_bb (copy_basic_block);
2122     }
2123 
2124   return copy_basic_block;
2125 }
2126 
2127 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2128    form is quite easy, since dominator relationship for old basic blocks does
2129    not change.
2130 
2131    There is however exception where inlining might change dominator relation
2132    across EH edges from basic block within inlined functions destinating
2133    to landing pads in function we inline into.
2134 
2135    The function fills in PHI_RESULTs of such PHI nodes if they refer
2136    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2137    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2138    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2139    set, and this means that there will be no overlapping live ranges
2140    for the underlying symbol.
2141 
2142    This might change in future if we allow redirecting of EH edges and
2143    we might want to change way build CFG pre-inlining to include
2144    all the possible edges then.  */
2145 static void
2146 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2147 				  bool can_throw, bool nonlocal_goto)
2148 {
2149   edge e;
2150   edge_iterator ei;
2151 
2152   FOR_EACH_EDGE (e, ei, bb->succs)
2153     if (!e->dest->aux
2154 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2155       {
2156 	gphi *phi;
2157 	gphi_iterator si;
2158 
2159 	if (!nonlocal_goto)
2160 	  gcc_assert (e->flags & EDGE_EH);
2161 
2162 	if (!can_throw)
2163 	  gcc_assert (!(e->flags & EDGE_EH));
2164 
2165 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2166 	  {
2167 	    edge re;
2168 
2169 	    phi = si.phi ();
2170 
2171 	    /* For abnormal goto/call edges the receiver can be the
2172 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2173 
2174 	    gcc_assert ((e->flags & EDGE_EH)
2175 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2176 
2177 	    re = find_edge (ret_bb, e->dest);
2178 	    gcc_checking_assert (re);
2179 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2180 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2181 
2182 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2183 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2184 	  }
2185       }
2186 }
2187 
2188 
2189 /* Copy edges from BB into its copy constructed earlier, scale profile
2190    accordingly.  Edges will be taken care of later.  Assume aux
2191    pointers to point to the copies of each BB.  Return true if any
2192    debug stmts are left after a statement that must end the basic block.  */
2193 
2194 static bool
2195 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2196 		   basic_block ret_bb, basic_block abnormal_goto_dest)
2197 {
2198   basic_block new_bb = (basic_block) bb->aux;
2199   edge_iterator ei;
2200   edge old_edge;
2201   gimple_stmt_iterator si;
2202   int flags;
2203   bool need_debug_cleanup = false;
2204 
2205   /* Use the indices from the original blocks to create edges for the
2206      new ones.  */
2207   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2208     if (!(old_edge->flags & EDGE_EH))
2209       {
2210 	edge new_edge;
2211 
2212 	flags = old_edge->flags;
2213 
2214 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2215 	if (old_edge->dest->index == EXIT_BLOCK
2216 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2217 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2218 	  flags |= EDGE_FALLTHRU;
2219 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2220 	new_edge->probability = old_edge->probability;
2221       }
2222 
2223   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2224     return false;
2225 
2226   /* When doing function splitting, we must decreate count of the return block
2227      which was previously reachable by block we did not copy.  */
2228   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2229     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2230       if (old_edge->src->index != ENTRY_BLOCK
2231 	  && !old_edge->src->aux)
2232 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2233 
2234   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2235     {
2236       gimple *copy_stmt;
2237       bool can_throw, nonlocal_goto;
2238 
2239       copy_stmt = gsi_stmt (si);
2240       if (!is_gimple_debug (copy_stmt))
2241 	update_stmt (copy_stmt);
2242 
2243       /* Do this before the possible split_block.  */
2244       gsi_next (&si);
2245 
2246       /* If this tree could throw an exception, there are two
2247          cases where we need to add abnormal edge(s): the
2248          tree wasn't in a region and there is a "current
2249          region" in the caller; or the original tree had
2250          EH edges.  In both cases split the block after the tree,
2251          and add abnormal edge(s) as needed; we need both
2252          those from the callee and the caller.
2253          We check whether the copy can throw, because the const
2254          propagation can change an INDIRECT_REF which throws
2255          into a COMPONENT_REF which doesn't.  If the copy
2256          can throw, the original could also throw.  */
2257       can_throw = stmt_can_throw_internal (copy_stmt);
2258       nonlocal_goto
2259 	= (stmt_can_make_abnormal_goto (copy_stmt)
2260 	   && !computed_goto_p (copy_stmt));
2261 
2262       if (can_throw || nonlocal_goto)
2263 	{
2264 	  if (!gsi_end_p (si))
2265 	    {
2266 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2267 		gsi_next (&si);
2268 	      if (gsi_end_p (si))
2269 		need_debug_cleanup = true;
2270 	    }
2271 	  if (!gsi_end_p (si))
2272 	    /* Note that bb's predecessor edges aren't necessarily
2273 	       right at this point; split_block doesn't care.  */
2274 	    {
2275 	      edge e = split_block (new_bb, copy_stmt);
2276 
2277 	      new_bb = e->dest;
2278 	      new_bb->aux = e->src->aux;
2279 	      si = gsi_start_bb (new_bb);
2280 	    }
2281 	}
2282 
2283       bool update_probs = false;
2284 
2285       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2286 	{
2287 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2288 	  update_probs = true;
2289 	}
2290       else if (can_throw)
2291 	{
2292 	  make_eh_edges (copy_stmt);
2293 	  update_probs = true;
2294 	}
2295 
2296       /* EH edges may not match old edges.  Copy as much as possible.  */
2297       if (update_probs)
2298 	{
2299           edge e;
2300           edge_iterator ei;
2301 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2302 
2303           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2304             if ((old_edge->flags & EDGE_EH)
2305 		&& (e = find_edge (copy_stmt_bb,
2306 				   (basic_block) old_edge->dest->aux))
2307 		&& (e->flags & EDGE_EH))
2308 	      e->probability = old_edge->probability;
2309 
2310           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2311 	    if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2312 	      e->probability = profile_probability::never ();
2313         }
2314 
2315 
2316       /* If the call we inline cannot make abnormal goto do not add
2317          additional abnormal edges but only retain those already present
2318 	 in the original function body.  */
2319       if (abnormal_goto_dest == NULL)
2320 	nonlocal_goto = false;
2321       if (nonlocal_goto)
2322 	{
2323 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2324 
2325 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2326 	    nonlocal_goto = false;
2327 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2328 	     in OpenMP regions which aren't allowed to be left abnormally.
2329 	     So, no need to add abnormal edge in that case.  */
2330 	  else if (is_gimple_call (copy_stmt)
2331 		   && gimple_call_internal_p (copy_stmt)
2332 		   && (gimple_call_internal_fn (copy_stmt)
2333 		       == IFN_ABNORMAL_DISPATCHER)
2334 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2335 	    nonlocal_goto = false;
2336 	  else
2337 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2338 				   EDGE_ABNORMAL);
2339 	}
2340 
2341       if ((can_throw || nonlocal_goto)
2342 	  && gimple_in_ssa_p (cfun))
2343 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2344 					  can_throw, nonlocal_goto);
2345     }
2346   return need_debug_cleanup;
2347 }
2348 
2349 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2350    was possibly split and new outgoing EH edges inserted.
2351    BB points to the block of original function and AUX pointers links
2352    the original and newly copied blocks.  */
2353 
2354 static void
2355 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2356 {
2357   basic_block const new_bb = (basic_block) bb->aux;
2358   edge_iterator ei;
2359   gphi *phi;
2360   gphi_iterator si;
2361   edge new_edge;
2362   bool inserted = false;
2363 
2364   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2365     {
2366       tree res, new_res;
2367       gphi *new_phi;
2368 
2369       phi = si.phi ();
2370       res = PHI_RESULT (phi);
2371       new_res = res;
2372       if (!virtual_operand_p (res))
2373 	{
2374 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2375 	  if (EDGE_COUNT (new_bb->preds) == 0)
2376 	    {
2377 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2378 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2379 	    }
2380 	  else
2381 	    {
2382 	      new_phi = create_phi_node (new_res, new_bb);
2383 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2384 		{
2385 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2386 					     bb);
2387 		  tree arg;
2388 		  tree new_arg;
2389 		  edge_iterator ei2;
2390 		  location_t locus;
2391 
2392 		  /* When doing partial cloning, we allow PHIs on the entry
2393 		     block as long as all the arguments are the same.
2394 		     Find any input edge to see argument to copy.  */
2395 		  if (!old_edge)
2396 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2397 		      if (!old_edge->src->aux)
2398 			break;
2399 
2400 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2401 		  new_arg = arg;
2402 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2403 		  gcc_assert (new_arg);
2404 		  /* With return slot optimization we can end up with
2405 		     non-gimple (foo *)&this->m, fix that here.  */
2406 		  if (TREE_CODE (new_arg) != SSA_NAME
2407 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2408 		      && !is_gimple_val (new_arg))
2409 		    {
2410 		      gimple_seq stmts = NULL;
2411 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2412 						      NULL);
2413 		      gsi_insert_seq_on_edge (new_edge, stmts);
2414 		      inserted = true;
2415 		    }
2416 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2417 		  if (LOCATION_BLOCK (locus))
2418 		    {
2419 		      tree *n;
2420 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
2421 		      gcc_assert (n);
2422 		      locus = set_block (locus, *n);
2423 		    }
2424 		  else
2425 		    locus = LOCATION_LOCUS (locus);
2426 
2427 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2428 		}
2429 	    }
2430 	}
2431     }
2432 
2433   /* Commit the delayed edge insertions.  */
2434   if (inserted)
2435     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2436       gsi_commit_one_edge_insert (new_edge, NULL);
2437 }
2438 
2439 
2440 /* Wrapper for remap_decl so it can be used as a callback.  */
2441 
2442 static tree
2443 remap_decl_1 (tree decl, void *data)
2444 {
2445   return remap_decl (decl, (copy_body_data *) data);
2446 }
2447 
2448 /* Build struct function and associated datastructures for the new clone
2449    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2450    the cfun to the function of new_fndecl (and current_function_decl too).  */
2451 
2452 static void
2453 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2454 {
2455   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2456 
2457   if (!DECL_ARGUMENTS (new_fndecl))
2458     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2459   if (!DECL_RESULT (new_fndecl))
2460     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2461 
2462   /* Register specific tree functions.  */
2463   gimple_register_cfg_hooks ();
2464 
2465   /* Get clean struct function.  */
2466   push_struct_function (new_fndecl);
2467 
2468   /* We will rebuild these, so just sanity check that they are empty.  */
2469   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2470   gcc_assert (cfun->local_decls == NULL);
2471   gcc_assert (cfun->cfg == NULL);
2472   gcc_assert (cfun->decl == new_fndecl);
2473 
2474   /* Copy items we preserve during cloning.  */
2475   cfun->static_chain_decl = src_cfun->static_chain_decl;
2476   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2477   cfun->function_end_locus = src_cfun->function_end_locus;
2478   cfun->curr_properties = src_cfun->curr_properties;
2479   cfun->last_verified = src_cfun->last_verified;
2480   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2481   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2482   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2483   cfun->stdarg = src_cfun->stdarg;
2484   cfun->after_inlining = src_cfun->after_inlining;
2485   cfun->can_throw_non_call_exceptions
2486     = src_cfun->can_throw_non_call_exceptions;
2487   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2488   cfun->returns_struct = src_cfun->returns_struct;
2489   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2490 
2491   init_empty_tree_cfg ();
2492 
2493   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2494 
2495   profile_count num = count;
2496   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2497   profile_count::adjust_for_ipa_scaling (&num, &den);
2498 
2499   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2500     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2501 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2502   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2503     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2504 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2505   if (src_cfun->eh)
2506     init_eh_for_function ();
2507 
2508   if (src_cfun->gimple_df)
2509     {
2510       init_tree_ssa (cfun);
2511       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2512       if (cfun->gimple_df->in_ssa_p)
2513 	init_ssa_operands (cfun);
2514     }
2515 }
2516 
2517 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2518    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2519    successor has multiple predecessors, reset them, otherwise keep
2520    their value.  */
2521 
2522 static void
2523 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2524 {
2525   edge e;
2526   edge_iterator ei;
2527   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2528 
2529   if (gsi_end_p (si)
2530       || gsi_one_before_end_p (si)
2531       || !(stmt_can_throw_internal (gsi_stmt (si))
2532 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2533     return;
2534 
2535   FOR_EACH_EDGE (e, ei, new_bb->succs)
2536     {
2537       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2538       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2539       while (is_gimple_debug (gsi_stmt (ssi)))
2540 	{
2541 	  gimple *stmt = gsi_stmt (ssi);
2542 	  gdebug *new_stmt;
2543 	  tree var;
2544 	  tree value;
2545 
2546 	  /* For the last edge move the debug stmts instead of copying
2547 	     them.  */
2548 	  if (ei_one_before_end_p (ei))
2549 	    {
2550 	      si = ssi;
2551 	      gsi_prev (&ssi);
2552 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2553 		gimple_debug_bind_reset_value (stmt);
2554 	      gsi_remove (&si, false);
2555 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2556 	      continue;
2557 	    }
2558 
2559 	  if (gimple_debug_bind_p (stmt))
2560 	    {
2561 	      var = gimple_debug_bind_get_var (stmt);
2562 	      if (single_pred_p (e->dest))
2563 		{
2564 		  value = gimple_debug_bind_get_value (stmt);
2565 		  value = unshare_expr (value);
2566 		}
2567 	      else
2568 		value = NULL_TREE;
2569 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2570 	    }
2571 	  else if (gimple_debug_source_bind_p (stmt))
2572 	    {
2573 	      var = gimple_debug_source_bind_get_var (stmt);
2574 	      value = gimple_debug_source_bind_get_value (stmt);
2575 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2576 	    }
2577 	  else if (gimple_debug_nonbind_marker_p (stmt))
2578 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2579 	  else
2580 	    gcc_unreachable ();
2581 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2582 	  id->debug_stmts.safe_push (new_stmt);
2583 	  gsi_prev (&ssi);
2584 	}
2585     }
2586 }
2587 
2588 /* Make a copy of the sub-loops of SRC_PARENT and place them
2589    as siblings of DEST_PARENT.  */
2590 
2591 static void
2592 copy_loops (copy_body_data *id,
2593 	    struct loop *dest_parent, struct loop *src_parent)
2594 {
2595   struct loop *src_loop = src_parent->inner;
2596   while (src_loop)
2597     {
2598       if (!id->blocks_to_copy
2599 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2600 	{
2601 	  struct loop *dest_loop = alloc_loop ();
2602 
2603 	  /* Assign the new loop its header and latch and associate
2604 	     those with the new loop.  */
2605 	  dest_loop->header = (basic_block)src_loop->header->aux;
2606 	  dest_loop->header->loop_father = dest_loop;
2607 	  if (src_loop->latch != NULL)
2608 	    {
2609 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2610 	      dest_loop->latch->loop_father = dest_loop;
2611 	    }
2612 
2613 	  /* Copy loop meta-data.  */
2614 	  copy_loop_info (src_loop, dest_loop);
2615 
2616 	  /* Finally place it into the loop array and the loop tree.  */
2617 	  place_new_loop (cfun, dest_loop);
2618 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2619 
2620 	  dest_loop->safelen = src_loop->safelen;
2621 	  if (src_loop->unroll)
2622 	    {
2623 	      dest_loop->unroll = src_loop->unroll;
2624 	      cfun->has_unroll = true;
2625 	    }
2626 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2627 	  if (src_loop->force_vectorize)
2628 	    {
2629 	      dest_loop->force_vectorize = true;
2630 	      cfun->has_force_vectorize_loops = true;
2631 	    }
2632 	  if (src_loop->simduid)
2633 	    {
2634 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2635 	      cfun->has_simduid_loops = true;
2636 	    }
2637 
2638 	  /* Recurse.  */
2639 	  copy_loops (id, dest_loop, src_loop);
2640 	}
2641       src_loop = src_loop->next;
2642     }
2643 }
2644 
2645 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2646 
2647 void
2648 redirect_all_calls (copy_body_data * id, basic_block bb)
2649 {
2650   gimple_stmt_iterator si;
2651   gimple *last = last_stmt (bb);
2652   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2653     {
2654       gimple *stmt = gsi_stmt (si);
2655       if (is_gimple_call (stmt))
2656 	{
2657 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2658 	  if (edge)
2659 	    {
2660 	      edge->redirect_call_stmt_to_callee ();
2661 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2662 		gimple_purge_dead_eh_edges (bb);
2663 	    }
2664 	}
2665     }
2666 }
2667 
2668 /* Make a copy of the body of FN so that it can be inserted inline in
2669    another function.  Walks FN via CFG, returns new fndecl.  */
2670 
2671 static tree
2672 copy_cfg_body (copy_body_data * id,
2673 	       basic_block entry_block_map, basic_block exit_block_map,
2674 	       basic_block new_entry)
2675 {
2676   tree callee_fndecl = id->src_fn;
2677   /* Original cfun for the callee, doesn't change.  */
2678   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2679   struct function *cfun_to_copy;
2680   basic_block bb;
2681   tree new_fndecl = NULL;
2682   bool need_debug_cleanup = false;
2683   int last;
2684   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2685   profile_count num = entry_block_map->count;
2686 
2687   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2688 
2689   /* Register specific tree functions.  */
2690   gimple_register_cfg_hooks ();
2691 
2692   /* If we are inlining just region of the function, make sure to connect
2693      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2694      part of loop, we must compute frequency and probability of
2695      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2696      probabilities of edges incoming from nonduplicated region.  */
2697   if (new_entry)
2698     {
2699       edge e;
2700       edge_iterator ei;
2701       den = profile_count::zero ();
2702 
2703       FOR_EACH_EDGE (e, ei, new_entry->preds)
2704 	if (!e->src->aux)
2705 	  den += e->count ();
2706       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2707     }
2708 
2709   profile_count::adjust_for_ipa_scaling (&num, &den);
2710 
2711   /* Must have a CFG here at this point.  */
2712   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2713 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2714 
2715 
2716   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2717   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2718   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2719   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2720 
2721   /* Duplicate any exception-handling regions.  */
2722   if (cfun->eh)
2723     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2724 				       remap_decl_1, id);
2725 
2726   /* Use aux pointers to map the original blocks to copy.  */
2727   FOR_EACH_BB_FN (bb, cfun_to_copy)
2728     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2729       {
2730 	basic_block new_bb = copy_bb (id, bb, num, den);
2731 	bb->aux = new_bb;
2732 	new_bb->aux = bb;
2733 	new_bb->loop_father = entry_block_map->loop_father;
2734       }
2735 
2736   last = last_basic_block_for_fn (cfun);
2737 
2738   /* Now that we've duplicated the blocks, duplicate their edges.  */
2739   basic_block abnormal_goto_dest = NULL;
2740   if (id->call_stmt
2741       && stmt_can_make_abnormal_goto (id->call_stmt))
2742     {
2743       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2744 
2745       bb = gimple_bb (id->call_stmt);
2746       gsi_next (&gsi);
2747       if (gsi_end_p (gsi))
2748 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2749     }
2750   FOR_ALL_BB_FN (bb, cfun_to_copy)
2751     if (!id->blocks_to_copy
2752 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2753       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2754 					       abnormal_goto_dest);
2755 
2756   if (new_entry)
2757     {
2758       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2759 			  EDGE_FALLTHRU);
2760       e->probability = profile_probability::always ();
2761     }
2762 
2763   /* Duplicate the loop tree, if available and wanted.  */
2764   if (loops_for_fn (src_cfun) != NULL
2765       && current_loops != NULL)
2766     {
2767       copy_loops (id, entry_block_map->loop_father,
2768 		  get_loop (src_cfun, 0));
2769       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2770       loops_state_set (LOOPS_NEED_FIXUP);
2771     }
2772 
2773   /* If the loop tree in the source function needed fixup, mark the
2774      destination loop tree for fixup, too.  */
2775   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2776     loops_state_set (LOOPS_NEED_FIXUP);
2777 
2778   if (gimple_in_ssa_p (cfun))
2779     FOR_ALL_BB_FN (bb, cfun_to_copy)
2780       if (!id->blocks_to_copy
2781 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2782 	copy_phis_for_bb (bb, id);
2783 
2784   FOR_ALL_BB_FN (bb, cfun_to_copy)
2785     if (bb->aux)
2786       {
2787 	if (need_debug_cleanup
2788 	    && bb->index != ENTRY_BLOCK
2789 	    && bb->index != EXIT_BLOCK)
2790 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2791 	/* Update call edge destinations.  This can not be done before loop
2792 	   info is updated, because we may split basic blocks.  */
2793 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2794 	    && bb->index != ENTRY_BLOCK
2795 	    && bb->index != EXIT_BLOCK)
2796 	  redirect_all_calls (id, (basic_block)bb->aux);
2797 	((basic_block)bb->aux)->aux = NULL;
2798 	bb->aux = NULL;
2799       }
2800 
2801   /* Zero out AUX fields of newly created block during EH edge
2802      insertion. */
2803   for (; last < last_basic_block_for_fn (cfun); last++)
2804     {
2805       if (need_debug_cleanup)
2806 	maybe_move_debug_stmts_to_successors (id,
2807 					      BASIC_BLOCK_FOR_FN (cfun, last));
2808       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2809       /* Update call edge destinations.  This can not be done before loop
2810 	 info is updated, because we may split basic blocks.  */
2811       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2812 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2813     }
2814   entry_block_map->aux = NULL;
2815   exit_block_map->aux = NULL;
2816 
2817   if (id->eh_map)
2818     {
2819       delete id->eh_map;
2820       id->eh_map = NULL;
2821     }
2822   if (id->dependence_map)
2823     {
2824       delete id->dependence_map;
2825       id->dependence_map = NULL;
2826     }
2827 
2828   return new_fndecl;
2829 }
2830 
2831 /* Copy the debug STMT using ID.  We deal with these statements in a
2832    special way: if any variable in their VALUE expression wasn't
2833    remapped yet, we won't remap it, because that would get decl uids
2834    out of sync, causing codegen differences between -g and -g0.  If
2835    this arises, we drop the VALUE expression altogether.  */
2836 
2837 static void
2838 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2839 {
2840   tree t, *n;
2841   struct walk_stmt_info wi;
2842 
2843   if (gimple_block (stmt))
2844     {
2845       n = id->decl_map->get (gimple_block (stmt));
2846       gimple_set_block (stmt, n ? *n : id->block);
2847     }
2848 
2849   if (gimple_debug_nonbind_marker_p (stmt))
2850     return;
2851 
2852   /* Remap all the operands in COPY.  */
2853   memset (&wi, 0, sizeof (wi));
2854   wi.info = id;
2855 
2856   processing_debug_stmt = 1;
2857 
2858   if (gimple_debug_source_bind_p (stmt))
2859     t = gimple_debug_source_bind_get_var (stmt);
2860   else if (gimple_debug_bind_p (stmt))
2861     t = gimple_debug_bind_get_var (stmt);
2862   else
2863     gcc_unreachable ();
2864 
2865   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2866       && (n = id->debug_map->get (t)))
2867     {
2868       gcc_assert (VAR_P (*n));
2869       t = *n;
2870     }
2871   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2872     /* T is a non-localized variable.  */;
2873   else
2874     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2875 
2876   if (gimple_debug_bind_p (stmt))
2877     {
2878       gimple_debug_bind_set_var (stmt, t);
2879 
2880       if (gimple_debug_bind_has_value_p (stmt))
2881 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2882 		   remap_gimple_op_r, &wi, NULL);
2883 
2884       /* Punt if any decl couldn't be remapped.  */
2885       if (processing_debug_stmt < 0)
2886 	gimple_debug_bind_reset_value (stmt);
2887     }
2888   else if (gimple_debug_source_bind_p (stmt))
2889     {
2890       gimple_debug_source_bind_set_var (stmt, t);
2891       /* When inlining and source bind refers to one of the optimized
2892 	 away parameters, change the source bind into normal debug bind
2893 	 referring to the corresponding DEBUG_EXPR_DECL that should have
2894 	 been bound before the call stmt.  */
2895       t = gimple_debug_source_bind_get_value (stmt);
2896       if (t != NULL_TREE
2897 	  && TREE_CODE (t) == PARM_DECL
2898 	  && id->call_stmt)
2899 	{
2900 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2901 	  unsigned int i;
2902 	  if (debug_args != NULL)
2903 	    {
2904 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2905 		if ((**debug_args)[i] == DECL_ORIGIN (t)
2906 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2907 		  {
2908 		    t = (**debug_args)[i + 1];
2909 		    stmt->subcode = GIMPLE_DEBUG_BIND;
2910 		    gimple_debug_bind_set_value (stmt, t);
2911 		    break;
2912 		  }
2913 	    }
2914 	}
2915       if (gimple_debug_source_bind_p (stmt))
2916 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2917 		   remap_gimple_op_r, &wi, NULL);
2918     }
2919 
2920   processing_debug_stmt = 0;
2921 
2922   update_stmt (stmt);
2923 }
2924 
2925 /* Process deferred debug stmts.  In order to give values better odds
2926    of being successfully remapped, we delay the processing of debug
2927    stmts until all other stmts that might require remapping are
2928    processed.  */
2929 
2930 static void
2931 copy_debug_stmts (copy_body_data *id)
2932 {
2933   size_t i;
2934   gdebug *stmt;
2935 
2936   if (!id->debug_stmts.exists ())
2937     return;
2938 
2939   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2940     copy_debug_stmt (stmt, id);
2941 
2942   id->debug_stmts.release ();
2943 }
2944 
2945 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2946    another function.  */
2947 
2948 static tree
2949 copy_tree_body (copy_body_data *id)
2950 {
2951   tree fndecl = id->src_fn;
2952   tree body = DECL_SAVED_TREE (fndecl);
2953 
2954   walk_tree (&body, copy_tree_body_r, id, NULL);
2955 
2956   return body;
2957 }
2958 
2959 /* Make a copy of the body of FN so that it can be inserted inline in
2960    another function.  */
2961 
2962 static tree
2963 copy_body (copy_body_data *id,
2964 	   basic_block entry_block_map, basic_block exit_block_map,
2965 	   basic_block new_entry)
2966 {
2967   tree fndecl = id->src_fn;
2968   tree body;
2969 
2970   /* If this body has a CFG, walk CFG and copy.  */
2971   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2972   body = copy_cfg_body (id, entry_block_map, exit_block_map,
2973 			new_entry);
2974   copy_debug_stmts (id);
2975 
2976   return body;
2977 }
2978 
2979 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2980    defined in function FN, or of a data member thereof.  */
2981 
2982 static bool
2983 self_inlining_addr_expr (tree value, tree fn)
2984 {
2985   tree var;
2986 
2987   if (TREE_CODE (value) != ADDR_EXPR)
2988     return false;
2989 
2990   var = get_base_address (TREE_OPERAND (value, 0));
2991 
2992   return var && auto_var_in_fn_p (var, fn);
2993 }
2994 
2995 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2996    lexical block and line number information from base_stmt, if given,
2997    or from the last stmt of the block otherwise.  */
2998 
2999 static gimple *
3000 insert_init_debug_bind (copy_body_data *id,
3001 			basic_block bb, tree var, tree value,
3002 			gimple *base_stmt)
3003 {
3004   gimple *note;
3005   gimple_stmt_iterator gsi;
3006   tree tracked_var;
3007 
3008   if (!gimple_in_ssa_p (id->src_cfun))
3009     return NULL;
3010 
3011   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3012     return NULL;
3013 
3014   tracked_var = target_for_debug_bind (var);
3015   if (!tracked_var)
3016     return NULL;
3017 
3018   if (bb)
3019     {
3020       gsi = gsi_last_bb (bb);
3021       if (!base_stmt && !gsi_end_p (gsi))
3022 	base_stmt = gsi_stmt (gsi);
3023     }
3024 
3025   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3026 
3027   if (bb)
3028     {
3029       if (!gsi_end_p (gsi))
3030 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3031       else
3032 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3033     }
3034 
3035   return note;
3036 }
3037 
3038 static void
3039 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3040 {
3041   /* If VAR represents a zero-sized variable, it's possible that the
3042      assignment statement may result in no gimple statements.  */
3043   if (init_stmt)
3044     {
3045       gimple_stmt_iterator si = gsi_last_bb (bb);
3046 
3047       /* We can end up with init statements that store to a non-register
3048          from a rhs with a conversion.  Handle that here by forcing the
3049 	 rhs into a temporary.  gimple_regimplify_operands is not
3050 	 prepared to do this for us.  */
3051       if (!is_gimple_debug (init_stmt)
3052 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3053 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3054 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3055 	{
3056 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3057 			     gimple_expr_type (init_stmt),
3058 			     gimple_assign_rhs1 (init_stmt));
3059 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3060 					  GSI_NEW_STMT);
3061 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3062 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3063 	}
3064       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3065       gimple_regimplify_operands (init_stmt, &si);
3066 
3067       if (!is_gimple_debug (init_stmt))
3068 	{
3069 	  tree def = gimple_assign_lhs (init_stmt);
3070 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3071 	}
3072     }
3073 }
3074 
3075 /* Initialize parameter P with VALUE.  If needed, produce init statement
3076    at the end of BB.  When BB is NULL, we return init statement to be
3077    output later.  */
3078 static gimple *
3079 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3080 		     basic_block bb, tree *vars)
3081 {
3082   gimple *init_stmt = NULL;
3083   tree var;
3084   tree rhs = value;
3085   tree def = (gimple_in_ssa_p (cfun)
3086 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3087 
3088   if (value
3089       && value != error_mark_node
3090       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3091     {
3092       /* If we can match up types by promotion/demotion do so.  */
3093       if (fold_convertible_p (TREE_TYPE (p), value))
3094 	rhs = fold_convert (TREE_TYPE (p), value);
3095       else
3096 	{
3097 	  /* ???  For valid programs we should not end up here.
3098 	     Still if we end up with truly mismatched types here, fall back
3099 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3100 	     GIMPLE to the following passes.  */
3101 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3102 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3103 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3104 	  else
3105 	    rhs = build_zero_cst (TREE_TYPE (p));
3106 	}
3107     }
3108 
3109   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3110      here since the type of this decl must be visible to the calling
3111      function.  */
3112   var = copy_decl_to_var (p, id);
3113 
3114   /* Declare this new variable.  */
3115   DECL_CHAIN (var) = *vars;
3116   *vars = var;
3117 
3118   /* Make gimplifier happy about this variable.  */
3119   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3120 
3121   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3122      we would not need to create a new variable here at all, if it
3123      weren't for debug info.  Still, we can just use the argument
3124      value.  */
3125   if (TREE_READONLY (p)
3126       && !TREE_ADDRESSABLE (p)
3127       && value && !TREE_SIDE_EFFECTS (value)
3128       && !def)
3129     {
3130       /* We may produce non-gimple trees by adding NOPs or introduce
3131 	 invalid sharing when operand is not really constant.
3132 	 It is not big deal to prohibit constant propagation here as
3133 	 we will constant propagate in DOM1 pass anyway.  */
3134       if (is_gimple_min_invariant (value)
3135 	  && useless_type_conversion_p (TREE_TYPE (p),
3136 						 TREE_TYPE (value))
3137 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3138 	     the base variable isn't a local variable of the inlined
3139 	     function, e.g., when doing recursive inlining, direct or
3140 	     mutually-recursive or whatever, which is why we don't
3141 	     just test whether fn == current_function_decl.  */
3142 	  && ! self_inlining_addr_expr (value, fn))
3143 	{
3144 	  insert_decl_map (id, p, value);
3145 	  insert_debug_decl_map (id, p, var);
3146 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3147 	}
3148     }
3149 
3150   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3151      that way, when the PARM_DECL is encountered, it will be
3152      automatically replaced by the VAR_DECL.  */
3153   insert_decl_map (id, p, var);
3154 
3155   /* Even if P was TREE_READONLY, the new VAR should not be.
3156      In the original code, we would have constructed a
3157      temporary, and then the function body would have never
3158      changed the value of P.  However, now, we will be
3159      constructing VAR directly.  The constructor body may
3160      change its value multiple times as it is being
3161      constructed.  Therefore, it must not be TREE_READONLY;
3162      the back-end assumes that TREE_READONLY variable is
3163      assigned to only once.  */
3164   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3165     TREE_READONLY (var) = 0;
3166 
3167   /* If there is no setup required and we are in SSA, take the easy route
3168      replacing all SSA names representing the function parameter by the
3169      SSA name passed to function.
3170 
3171      We need to construct map for the variable anyway as it might be used
3172      in different SSA names when parameter is set in function.
3173 
3174      Do replacement at -O0 for const arguments replaced by constant.
3175      This is important for builtin_constant_p and other construct requiring
3176      constant argument to be visible in inlined function body.  */
3177   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3178       && (optimize
3179           || (TREE_READONLY (p)
3180 	      && is_gimple_min_invariant (rhs)))
3181       && (TREE_CODE (rhs) == SSA_NAME
3182 	  || is_gimple_min_invariant (rhs))
3183       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3184     {
3185       insert_decl_map (id, def, rhs);
3186       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3187     }
3188 
3189   /* If the value of argument is never used, don't care about initializing
3190      it.  */
3191   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3192     {
3193       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3194       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3195     }
3196 
3197   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3198      the argument to the proper type in case it was promoted.  */
3199   if (value)
3200     {
3201       if (rhs == error_mark_node)
3202 	{
3203 	  insert_decl_map (id, p, var);
3204 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3205 	}
3206 
3207       STRIP_USELESS_TYPE_CONVERSION (rhs);
3208 
3209       /* If we are in SSA form properly remap the default definition
3210          or assign to a dummy SSA name if the parameter is unused and
3211 	 we are not optimizing.  */
3212       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3213 	{
3214 	  if (def)
3215 	    {
3216 	      def = remap_ssa_name (def, id);
3217 	      init_stmt = gimple_build_assign (def, rhs);
3218 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3219 	      set_ssa_default_def (cfun, var, NULL);
3220 	    }
3221 	  else if (!optimize)
3222 	    {
3223 	      def = make_ssa_name (var);
3224 	      init_stmt = gimple_build_assign (def, rhs);
3225 	    }
3226 	}
3227       else
3228         init_stmt = gimple_build_assign (var, rhs);
3229 
3230       if (bb && init_stmt)
3231         insert_init_stmt (id, bb, init_stmt);
3232     }
3233   return init_stmt;
3234 }
3235 
3236 /* Generate code to initialize the parameters of the function at the
3237    top of the stack in ID from the GIMPLE_CALL STMT.  */
3238 
3239 static void
3240 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3241 			       tree fn, basic_block bb)
3242 {
3243   tree parms;
3244   size_t i;
3245   tree p;
3246   tree vars = NULL_TREE;
3247   tree static_chain = gimple_call_chain (stmt);
3248 
3249   /* Figure out what the parameters are.  */
3250   parms = DECL_ARGUMENTS (fn);
3251 
3252   /* Loop through the parameter declarations, replacing each with an
3253      equivalent VAR_DECL, appropriately initialized.  */
3254   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3255     {
3256       tree val;
3257       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3258       setup_one_parameter (id, p, val, fn, bb, &vars);
3259     }
3260   /* After remapping parameters remap their types.  This has to be done
3261      in a second loop over all parameters to appropriately remap
3262      variable sized arrays when the size is specified in a
3263      parameter following the array.  */
3264   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3265     {
3266       tree *varp = id->decl_map->get (p);
3267       if (varp && VAR_P (*varp))
3268 	{
3269 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3270 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3271 	  tree var = *varp;
3272 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3273 	  /* Also remap the default definition if it was remapped
3274 	     to the default definition of the parameter replacement
3275 	     by the parameter setup.  */
3276 	  if (def)
3277 	    {
3278 	      tree *defp = id->decl_map->get (def);
3279 	      if (defp
3280 		  && TREE_CODE (*defp) == SSA_NAME
3281 		  && SSA_NAME_VAR (*defp) == var)
3282 		TREE_TYPE (*defp) = TREE_TYPE (var);
3283 	    }
3284 	}
3285     }
3286 
3287   /* Initialize the static chain.  */
3288   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3289   gcc_assert (fn != current_function_decl);
3290   if (p)
3291     {
3292       /* No static chain?  Seems like a bug in tree-nested.c.  */
3293       gcc_assert (static_chain);
3294 
3295       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3296     }
3297 
3298   declare_inline_vars (id->block, vars);
3299 }
3300 
3301 
3302 /* Declare a return variable to replace the RESULT_DECL for the
3303    function we are calling.  An appropriate DECL_STMT is returned.
3304    The USE_STMT is filled to contain a use of the declaration to
3305    indicate the return value of the function.
3306 
3307    RETURN_SLOT, if non-null is place where to store the result.  It
3308    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3309    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3310 
3311    RETURN_BOUNDS holds a destination for returned bounds.
3312 
3313    The return value is a (possibly null) value that holds the result
3314    as seen by the caller.  */
3315 
3316 static tree
3317 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3318 			 tree return_bounds, basic_block entry_bb)
3319 {
3320   tree callee = id->src_fn;
3321   tree result = DECL_RESULT (callee);
3322   tree callee_type = TREE_TYPE (result);
3323   tree caller_type;
3324   tree var, use;
3325 
3326   /* Handle type-mismatches in the function declaration return type
3327      vs. the call expression.  */
3328   if (modify_dest)
3329     caller_type = TREE_TYPE (modify_dest);
3330   else
3331     caller_type = TREE_TYPE (TREE_TYPE (callee));
3332 
3333   /* We don't need to do anything for functions that don't return anything.  */
3334   if (VOID_TYPE_P (callee_type))
3335     return NULL_TREE;
3336 
3337   /* If there was a return slot, then the return value is the
3338      dereferenced address of that object.  */
3339   if (return_slot)
3340     {
3341       /* The front end shouldn't have used both return_slot and
3342 	 a modify expression.  */
3343       gcc_assert (!modify_dest);
3344       if (DECL_BY_REFERENCE (result))
3345 	{
3346 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3347 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3348 
3349 	  /* We are going to construct *&return_slot and we can't do that
3350 	     for variables believed to be not addressable.
3351 
3352 	     FIXME: This check possibly can match, because values returned
3353 	     via return slot optimization are not believed to have address
3354 	     taken by alias analysis.  */
3355 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3356 	  var = return_slot_addr;
3357 	}
3358       else
3359 	{
3360 	  var = return_slot;
3361 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3362 	  if (TREE_ADDRESSABLE (result))
3363 	    mark_addressable (var);
3364 	}
3365       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3366            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3367 	  && !DECL_GIMPLE_REG_P (result)
3368 	  && DECL_P (var))
3369 	DECL_GIMPLE_REG_P (var) = 0;
3370       use = NULL;
3371       goto done;
3372     }
3373 
3374   /* All types requiring non-trivial constructors should have been handled.  */
3375   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3376 
3377   /* Attempt to avoid creating a new temporary variable.  */
3378   if (modify_dest
3379       && TREE_CODE (modify_dest) != SSA_NAME)
3380     {
3381       bool use_it = false;
3382 
3383       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3384       if (!useless_type_conversion_p (callee_type, caller_type))
3385 	use_it = false;
3386 
3387       /* ??? If we're assigning to a variable sized type, then we must
3388 	 reuse the destination variable, because we've no good way to
3389 	 create variable sized temporaries at this point.  */
3390       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3391 	use_it = true;
3392 
3393       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3394 	 reuse it as the result of the call directly.  Don't do this if
3395 	 it would promote MODIFY_DEST to addressable.  */
3396       else if (TREE_ADDRESSABLE (result))
3397 	use_it = false;
3398       else
3399 	{
3400 	  tree base_m = get_base_address (modify_dest);
3401 
3402 	  /* If the base isn't a decl, then it's a pointer, and we don't
3403 	     know where that's going to go.  */
3404 	  if (!DECL_P (base_m))
3405 	    use_it = false;
3406 	  else if (is_global_var (base_m))
3407 	    use_it = false;
3408 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3409 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3410 		   && !DECL_GIMPLE_REG_P (result)
3411 		   && DECL_GIMPLE_REG_P (base_m))
3412 	    use_it = false;
3413 	  else if (!TREE_ADDRESSABLE (base_m))
3414 	    use_it = true;
3415 	}
3416 
3417       if (use_it)
3418 	{
3419 	  var = modify_dest;
3420 	  use = NULL;
3421 	  goto done;
3422 	}
3423     }
3424 
3425   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3426 
3427   var = copy_result_decl_to_var (result, id);
3428   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3429 
3430   /* Do not have the rest of GCC warn about this variable as it should
3431      not be visible to the user.  */
3432   TREE_NO_WARNING (var) = 1;
3433 
3434   declare_inline_vars (id->block, var);
3435 
3436   /* Build the use expr.  If the return type of the function was
3437      promoted, convert it back to the expected type.  */
3438   use = var;
3439   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3440     {
3441       /* If we can match up types by promotion/demotion do so.  */
3442       if (fold_convertible_p (caller_type, var))
3443 	use = fold_convert (caller_type, var);
3444       else
3445 	{
3446 	  /* ???  For valid programs we should not end up here.
3447 	     Still if we end up with truly mismatched types here, fall back
3448 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3449 	     passes.  */
3450 	  /* Prevent var from being written into SSA form.  */
3451 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3452 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3453 	    DECL_GIMPLE_REG_P (var) = false;
3454 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3455 	    TREE_ADDRESSABLE (var) = true;
3456 	  use = fold_build2 (MEM_REF, caller_type,
3457 			     build_fold_addr_expr (var),
3458 			     build_int_cst (ptr_type_node, 0));
3459 	}
3460     }
3461 
3462   STRIP_USELESS_TYPE_CONVERSION (use);
3463 
3464   if (DECL_BY_REFERENCE (result))
3465     {
3466       TREE_ADDRESSABLE (var) = 1;
3467       var = build_fold_addr_expr (var);
3468     }
3469 
3470  done:
3471   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3472      way, when the RESULT_DECL is encountered, it will be
3473      automatically replaced by the VAR_DECL.
3474 
3475      When returning by reference, ensure that RESULT_DECL remaps to
3476      gimple_val.  */
3477   if (DECL_BY_REFERENCE (result)
3478       && !is_gimple_val (var))
3479     {
3480       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3481       insert_decl_map (id, result, temp);
3482       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3483 	 it's default_def SSA_NAME.  */
3484       if (gimple_in_ssa_p (id->src_cfun)
3485 	  && is_gimple_reg (result))
3486 	{
3487 	  temp = make_ssa_name (temp);
3488 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3489 	}
3490       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3491     }
3492   else
3493     insert_decl_map (id, result, var);
3494 
3495   /* Remember this so we can ignore it in remap_decls.  */
3496   id->retvar = var;
3497 
3498   /* If returned bounds are used, then make var for them.  */
3499   if (return_bounds)
3500   {
3501     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3502     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3503     TREE_NO_WARNING (bndtemp) = 1;
3504     declare_inline_vars (id->block, bndtemp);
3505 
3506     id->retbnd = bndtemp;
3507     insert_init_stmt (id, entry_bb,
3508 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3509   }
3510 
3511   return use;
3512 }
3513 
3514 /* Determine if the function can be copied.  If so return NULL.  If
3515    not return a string describng the reason for failure.  */
3516 
3517 const char *
3518 copy_forbidden (struct function *fun)
3519 {
3520   const char *reason = fun->cannot_be_copied_reason;
3521 
3522   /* Only examine the function once.  */
3523   if (fun->cannot_be_copied_set)
3524     return reason;
3525 
3526   /* We cannot copy a function that receives a non-local goto
3527      because we cannot remap the destination label used in the
3528      function that is performing the non-local goto.  */
3529   /* ??? Actually, this should be possible, if we work at it.
3530      No doubt there's just a handful of places that simply
3531      assume it doesn't happen and don't substitute properly.  */
3532   if (fun->has_nonlocal_label)
3533     {
3534       reason = G_("function %q+F can never be copied "
3535 		  "because it receives a non-local goto");
3536       goto fail;
3537     }
3538 
3539   if (fun->has_forced_label_in_static)
3540     {
3541       reason = G_("function %q+F can never be copied because it saves "
3542 		  "address of local label in a static variable");
3543       goto fail;
3544     }
3545 
3546  fail:
3547   fun->cannot_be_copied_reason = reason;
3548   fun->cannot_be_copied_set = true;
3549   return reason;
3550 }
3551 
3552 
3553 static const char *inline_forbidden_reason;
3554 
3555 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3556    iff a function can not be inlined.  Also sets the reason why. */
3557 
3558 static tree
3559 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3560 			 struct walk_stmt_info *wip)
3561 {
3562   tree fn = (tree) wip->info;
3563   tree t;
3564   gimple *stmt = gsi_stmt (*gsi);
3565 
3566   switch (gimple_code (stmt))
3567     {
3568     case GIMPLE_CALL:
3569       /* Refuse to inline alloca call unless user explicitly forced so as
3570 	 this may change program's memory overhead drastically when the
3571 	 function using alloca is called in loop.  In GCC present in
3572 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3573 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3574 	 VLA objects as those can't cause unbounded growth (they're always
3575 	 wrapped inside stack_save/stack_restore regions.  */
3576       if (gimple_maybe_alloca_call_p (stmt)
3577 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3578 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3579 	{
3580 	  inline_forbidden_reason
3581 	    = G_("function %q+F can never be inlined because it uses "
3582 		 "alloca (override using the always_inline attribute)");
3583 	  *handled_ops_p = true;
3584 	  return fn;
3585 	}
3586 
3587       t = gimple_call_fndecl (stmt);
3588       if (t == NULL_TREE)
3589 	break;
3590 
3591       /* We cannot inline functions that call setjmp.  */
3592       if (setjmp_call_p (t))
3593 	{
3594 	  inline_forbidden_reason
3595 	    = G_("function %q+F can never be inlined because it uses setjmp");
3596 	  *handled_ops_p = true;
3597 	  return t;
3598 	}
3599 
3600       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3601 	switch (DECL_FUNCTION_CODE (t))
3602 	  {
3603 	    /* We cannot inline functions that take a variable number of
3604 	       arguments.  */
3605 	  case BUILT_IN_VA_START:
3606 	  case BUILT_IN_NEXT_ARG:
3607 	  case BUILT_IN_VA_END:
3608 	    inline_forbidden_reason
3609 	      = G_("function %q+F can never be inlined because it "
3610 		   "uses variable argument lists");
3611 	    *handled_ops_p = true;
3612 	    return t;
3613 
3614 	  case BUILT_IN_LONGJMP:
3615 	    /* We can't inline functions that call __builtin_longjmp at
3616 	       all.  The non-local goto machinery really requires the
3617 	       destination be in a different function.  If we allow the
3618 	       function calling __builtin_longjmp to be inlined into the
3619 	       function calling __builtin_setjmp, Things will Go Awry.  */
3620 	    inline_forbidden_reason
3621 	      = G_("function %q+F can never be inlined because "
3622 		   "it uses setjmp-longjmp exception handling");
3623 	    *handled_ops_p = true;
3624 	    return t;
3625 
3626 	  case BUILT_IN_NONLOCAL_GOTO:
3627 	    /* Similarly.  */
3628 	    inline_forbidden_reason
3629 	      = G_("function %q+F can never be inlined because "
3630 		   "it uses non-local goto");
3631 	    *handled_ops_p = true;
3632 	    return t;
3633 
3634 	  case BUILT_IN_RETURN:
3635 	  case BUILT_IN_APPLY_ARGS:
3636 	    /* If a __builtin_apply_args caller would be inlined,
3637 	       it would be saving arguments of the function it has
3638 	       been inlined into.  Similarly __builtin_return would
3639 	       return from the function the inline has been inlined into.  */
3640 	    inline_forbidden_reason
3641 	      = G_("function %q+F can never be inlined because "
3642 		   "it uses __builtin_return or __builtin_apply_args");
3643 	    *handled_ops_p = true;
3644 	    return t;
3645 
3646 	  default:
3647 	    break;
3648 	  }
3649       break;
3650 
3651     case GIMPLE_GOTO:
3652       t = gimple_goto_dest (stmt);
3653 
3654       /* We will not inline a function which uses computed goto.  The
3655 	 addresses of its local labels, which may be tucked into
3656 	 global storage, are of course not constant across
3657 	 instantiations, which causes unexpected behavior.  */
3658       if (TREE_CODE (t) != LABEL_DECL)
3659 	{
3660 	  inline_forbidden_reason
3661 	    = G_("function %q+F can never be inlined "
3662 		 "because it contains a computed goto");
3663 	  *handled_ops_p = true;
3664 	  return t;
3665 	}
3666       break;
3667 
3668     default:
3669       break;
3670     }
3671 
3672   *handled_ops_p = false;
3673   return NULL_TREE;
3674 }
3675 
3676 /* Return true if FNDECL is a function that cannot be inlined into
3677    another one.  */
3678 
3679 static bool
3680 inline_forbidden_p (tree fndecl)
3681 {
3682   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3683   struct walk_stmt_info wi;
3684   basic_block bb;
3685   bool forbidden_p = false;
3686 
3687   /* First check for shared reasons not to copy the code.  */
3688   inline_forbidden_reason = copy_forbidden (fun);
3689   if (inline_forbidden_reason != NULL)
3690     return true;
3691 
3692   /* Next, walk the statements of the function looking for
3693      constraucts we can't handle, or are non-optimal for inlining.  */
3694   hash_set<tree> visited_nodes;
3695   memset (&wi, 0, sizeof (wi));
3696   wi.info = (void *) fndecl;
3697   wi.pset = &visited_nodes;
3698 
3699   FOR_EACH_BB_FN (bb, fun)
3700     {
3701       gimple *ret;
3702       gimple_seq seq = bb_seq (bb);
3703       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3704       forbidden_p = (ret != NULL);
3705       if (forbidden_p)
3706 	break;
3707     }
3708 
3709   return forbidden_p;
3710 }
3711 
3712 /* Return false if the function FNDECL cannot be inlined on account of its
3713    attributes, true otherwise.  */
3714 static bool
3715 function_attribute_inlinable_p (const_tree fndecl)
3716 {
3717   if (targetm.attribute_table)
3718     {
3719       const_tree a;
3720 
3721       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3722 	{
3723 	  const_tree name = TREE_PURPOSE (a);
3724 	  int i;
3725 
3726 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3727 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3728 	      return targetm.function_attribute_inlinable_p (fndecl);
3729 	}
3730     }
3731 
3732   return true;
3733 }
3734 
3735 /* Returns nonzero if FN is a function that does not have any
3736    fundamental inline blocking properties.  */
3737 
3738 bool
3739 tree_inlinable_function_p (tree fn)
3740 {
3741   bool inlinable = true;
3742   bool do_warning;
3743   tree always_inline;
3744 
3745   /* If we've already decided this function shouldn't be inlined,
3746      there's no need to check again.  */
3747   if (DECL_UNINLINABLE (fn))
3748     return false;
3749 
3750   /* We only warn for functions declared `inline' by the user.  */
3751   do_warning = (warn_inline
3752 		&& DECL_DECLARED_INLINE_P (fn)
3753 		&& !DECL_NO_INLINE_WARNING_P (fn)
3754 		&& !DECL_IN_SYSTEM_HEADER (fn));
3755 
3756   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3757 
3758   if (flag_no_inline
3759       && always_inline == NULL)
3760     {
3761       if (do_warning)
3762         warning (OPT_Winline, "function %q+F can never be inlined because it "
3763                  "is suppressed using -fno-inline", fn);
3764       inlinable = false;
3765     }
3766 
3767   else if (!function_attribute_inlinable_p (fn))
3768     {
3769       if (do_warning)
3770         warning (OPT_Winline, "function %q+F can never be inlined because it "
3771                  "uses attributes conflicting with inlining", fn);
3772       inlinable = false;
3773     }
3774 
3775   else if (inline_forbidden_p (fn))
3776     {
3777       /* See if we should warn about uninlinable functions.  Previously,
3778 	 some of these warnings would be issued while trying to expand
3779 	 the function inline, but that would cause multiple warnings
3780 	 about functions that would for example call alloca.  But since
3781 	 this a property of the function, just one warning is enough.
3782 	 As a bonus we can now give more details about the reason why a
3783 	 function is not inlinable.  */
3784       if (always_inline)
3785 	error (inline_forbidden_reason, fn);
3786       else if (do_warning)
3787 	warning (OPT_Winline, inline_forbidden_reason, fn);
3788 
3789       inlinable = false;
3790     }
3791 
3792   /* Squirrel away the result so that we don't have to check again.  */
3793   DECL_UNINLINABLE (fn) = !inlinable;
3794 
3795   return inlinable;
3796 }
3797 
3798 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3799    word size and take possible memcpy call into account and return
3800    cost based on whether optimizing for size or speed according to SPEED_P.  */
3801 
3802 int
3803 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3804 {
3805   HOST_WIDE_INT size;
3806 
3807   gcc_assert (!VOID_TYPE_P (type));
3808 
3809   if (TREE_CODE (type) == VECTOR_TYPE)
3810     {
3811       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3812       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3813       int orig_mode_size
3814 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3815       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3816       return ((orig_mode_size + simd_mode_size - 1)
3817 	      / simd_mode_size);
3818     }
3819 
3820   size = int_size_in_bytes (type);
3821 
3822   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3823     /* Cost of a memcpy call, 3 arguments and the call.  */
3824     return 4;
3825   else
3826     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3827 }
3828 
3829 /* Returns cost of operation CODE, according to WEIGHTS  */
3830 
3831 static int
3832 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3833 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3834 {
3835   switch (code)
3836     {
3837     /* These are "free" conversions, or their presumed cost
3838        is folded into other operations.  */
3839     case RANGE_EXPR:
3840     CASE_CONVERT:
3841     case COMPLEX_EXPR:
3842     case PAREN_EXPR:
3843     case VIEW_CONVERT_EXPR:
3844       return 0;
3845 
3846     /* Assign cost of 1 to usual operations.
3847        ??? We may consider mapping RTL costs to this.  */
3848     case COND_EXPR:
3849     case VEC_COND_EXPR:
3850     case VEC_PERM_EXPR:
3851 
3852     case PLUS_EXPR:
3853     case POINTER_PLUS_EXPR:
3854     case POINTER_DIFF_EXPR:
3855     case MINUS_EXPR:
3856     case MULT_EXPR:
3857     case MULT_HIGHPART_EXPR:
3858     case FMA_EXPR:
3859 
3860     case ADDR_SPACE_CONVERT_EXPR:
3861     case FIXED_CONVERT_EXPR:
3862     case FIX_TRUNC_EXPR:
3863 
3864     case NEGATE_EXPR:
3865     case FLOAT_EXPR:
3866     case MIN_EXPR:
3867     case MAX_EXPR:
3868     case ABS_EXPR:
3869 
3870     case LSHIFT_EXPR:
3871     case RSHIFT_EXPR:
3872     case LROTATE_EXPR:
3873     case RROTATE_EXPR:
3874 
3875     case BIT_IOR_EXPR:
3876     case BIT_XOR_EXPR:
3877     case BIT_AND_EXPR:
3878     case BIT_NOT_EXPR:
3879 
3880     case TRUTH_ANDIF_EXPR:
3881     case TRUTH_ORIF_EXPR:
3882     case TRUTH_AND_EXPR:
3883     case TRUTH_OR_EXPR:
3884     case TRUTH_XOR_EXPR:
3885     case TRUTH_NOT_EXPR:
3886 
3887     case LT_EXPR:
3888     case LE_EXPR:
3889     case GT_EXPR:
3890     case GE_EXPR:
3891     case EQ_EXPR:
3892     case NE_EXPR:
3893     case ORDERED_EXPR:
3894     case UNORDERED_EXPR:
3895 
3896     case UNLT_EXPR:
3897     case UNLE_EXPR:
3898     case UNGT_EXPR:
3899     case UNGE_EXPR:
3900     case UNEQ_EXPR:
3901     case LTGT_EXPR:
3902 
3903     case CONJ_EXPR:
3904 
3905     case PREDECREMENT_EXPR:
3906     case PREINCREMENT_EXPR:
3907     case POSTDECREMENT_EXPR:
3908     case POSTINCREMENT_EXPR:
3909 
3910     case REALIGN_LOAD_EXPR:
3911 
3912     case WIDEN_SUM_EXPR:
3913     case WIDEN_MULT_EXPR:
3914     case DOT_PROD_EXPR:
3915     case SAD_EXPR:
3916     case WIDEN_MULT_PLUS_EXPR:
3917     case WIDEN_MULT_MINUS_EXPR:
3918     case WIDEN_LSHIFT_EXPR:
3919 
3920     case VEC_WIDEN_MULT_HI_EXPR:
3921     case VEC_WIDEN_MULT_LO_EXPR:
3922     case VEC_WIDEN_MULT_EVEN_EXPR:
3923     case VEC_WIDEN_MULT_ODD_EXPR:
3924     case VEC_UNPACK_HI_EXPR:
3925     case VEC_UNPACK_LO_EXPR:
3926     case VEC_UNPACK_FLOAT_HI_EXPR:
3927     case VEC_UNPACK_FLOAT_LO_EXPR:
3928     case VEC_PACK_TRUNC_EXPR:
3929     case VEC_PACK_SAT_EXPR:
3930     case VEC_PACK_FIX_TRUNC_EXPR:
3931     case VEC_WIDEN_LSHIFT_HI_EXPR:
3932     case VEC_WIDEN_LSHIFT_LO_EXPR:
3933     case VEC_DUPLICATE_EXPR:
3934     case VEC_SERIES_EXPR:
3935 
3936       return 1;
3937 
3938     /* Few special cases of expensive operations.  This is useful
3939        to avoid inlining on functions having too many of these.  */
3940     case TRUNC_DIV_EXPR:
3941     case CEIL_DIV_EXPR:
3942     case FLOOR_DIV_EXPR:
3943     case ROUND_DIV_EXPR:
3944     case EXACT_DIV_EXPR:
3945     case TRUNC_MOD_EXPR:
3946     case CEIL_MOD_EXPR:
3947     case FLOOR_MOD_EXPR:
3948     case ROUND_MOD_EXPR:
3949     case RDIV_EXPR:
3950       if (TREE_CODE (op2) != INTEGER_CST)
3951         return weights->div_mod_cost;
3952       return 1;
3953 
3954     /* Bit-field insertion needs several shift and mask operations.  */
3955     case BIT_INSERT_EXPR:
3956       return 3;
3957 
3958     default:
3959       /* We expect a copy assignment with no operator.  */
3960       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3961       return 0;
3962     }
3963 }
3964 
3965 
3966 /* Estimate number of instructions that will be created by expanding
3967    the statements in the statement sequence STMTS.
3968    WEIGHTS contains weights attributed to various constructs.  */
3969 
3970 int
3971 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3972 {
3973   int cost;
3974   gimple_stmt_iterator gsi;
3975 
3976   cost = 0;
3977   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3978     cost += estimate_num_insns (gsi_stmt (gsi), weights);
3979 
3980   return cost;
3981 }
3982 
3983 
3984 /* Estimate number of instructions that will be created by expanding STMT.
3985    WEIGHTS contains weights attributed to various constructs.  */
3986 
3987 int
3988 estimate_num_insns (gimple *stmt, eni_weights *weights)
3989 {
3990   unsigned cost, i;
3991   enum gimple_code code = gimple_code (stmt);
3992   tree lhs;
3993   tree rhs;
3994 
3995   switch (code)
3996     {
3997     case GIMPLE_ASSIGN:
3998       /* Try to estimate the cost of assignments.  We have three cases to
3999 	 deal with:
4000 	 1) Simple assignments to registers;
4001 	 2) Stores to things that must live in memory.  This includes
4002 	    "normal" stores to scalars, but also assignments of large
4003 	    structures, or constructors of big arrays;
4004 
4005 	 Let us look at the first two cases, assuming we have "a = b + C":
4006 	 <GIMPLE_ASSIGN <var_decl "a">
4007 	        <plus_expr <var_decl "b"> <constant C>>
4008 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4009 	 any target, because "a" usually ends up in a real register.  Hence
4010 	 the only cost of this expression comes from the PLUS_EXPR, and we
4011 	 can ignore the GIMPLE_ASSIGN.
4012 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4013 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4014 	 of moving something into "a", which we compute using the function
4015 	 estimate_move_cost.  */
4016       if (gimple_clobber_p (stmt))
4017 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4018 
4019       lhs = gimple_assign_lhs (stmt);
4020       rhs = gimple_assign_rhs1 (stmt);
4021 
4022       cost = 0;
4023 
4024       /* Account for the cost of moving to / from memory.  */
4025       if (gimple_store_p (stmt))
4026 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4027       if (gimple_assign_load_p (stmt))
4028 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4029 
4030       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4031       				      gimple_assign_rhs1 (stmt),
4032 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4033 				      == GIMPLE_BINARY_RHS
4034 				      ? gimple_assign_rhs2 (stmt) : NULL);
4035       break;
4036 
4037     case GIMPLE_COND:
4038       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4039       				         gimple_op (stmt, 0),
4040 				         gimple_op (stmt, 1));
4041       break;
4042 
4043     case GIMPLE_SWITCH:
4044       {
4045 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4046 	/* Take into account cost of the switch + guess 2 conditional jumps for
4047 	   each case label.
4048 
4049 	   TODO: once the switch expansion logic is sufficiently separated, we can
4050 	   do better job on estimating cost of the switch.  */
4051 	if (weights->time_based)
4052 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4053 	else
4054 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4055       }
4056       break;
4057 
4058     case GIMPLE_CALL:
4059       {
4060 	tree decl;
4061 
4062 	if (gimple_call_internal_p (stmt))
4063 	  return 0;
4064 	else if ((decl = gimple_call_fndecl (stmt))
4065 		 && DECL_BUILT_IN (decl))
4066 	  {
4067 	    /* Do not special case builtins where we see the body.
4068 	       This just confuse inliner.  */
4069 	    struct cgraph_node *node;
4070 	    if (!(node = cgraph_node::get (decl))
4071 		|| node->definition)
4072 	      ;
4073 	    /* For buitins that are likely expanded to nothing or
4074 	       inlined do not account operand costs.  */
4075 	    else if (is_simple_builtin (decl))
4076 	      return 0;
4077 	    else if (is_inexpensive_builtin (decl))
4078 	      return weights->target_builtin_call_cost;
4079 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4080 	      {
4081 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4082 		   specialize the cheap expansion we do here.
4083 		   ???  This asks for a more general solution.  */
4084 		switch (DECL_FUNCTION_CODE (decl))
4085 		  {
4086 		    case BUILT_IN_POW:
4087 		    case BUILT_IN_POWF:
4088 		    case BUILT_IN_POWL:
4089 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4090 			  && (real_equal
4091 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4092 			       &dconst2)))
4093 			return estimate_operator_cost
4094 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4095 			     gimple_call_arg (stmt, 0));
4096 		      break;
4097 
4098 		    default:
4099 		      break;
4100 		  }
4101 	      }
4102 	  }
4103 
4104 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4105 	if (gimple_call_lhs (stmt))
4106 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4107 				      weights->time_based);
4108 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4109 	  {
4110 	    tree arg = gimple_call_arg (stmt, i);
4111 	    cost += estimate_move_cost (TREE_TYPE (arg),
4112 					weights->time_based);
4113 	  }
4114 	break;
4115       }
4116 
4117     case GIMPLE_RETURN:
4118       return weights->return_cost;
4119 
4120     case GIMPLE_GOTO:
4121     case GIMPLE_LABEL:
4122     case GIMPLE_NOP:
4123     case GIMPLE_PHI:
4124     case GIMPLE_PREDICT:
4125     case GIMPLE_DEBUG:
4126       return 0;
4127 
4128     case GIMPLE_ASM:
4129       {
4130 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4131 	/* 1000 means infinity. This avoids overflows later
4132 	   with very long asm statements.  */
4133 	if (count > 1000)
4134 	  count = 1000;
4135 	return MAX (1, count);
4136       }
4137 
4138     case GIMPLE_RESX:
4139       /* This is either going to be an external function call with one
4140 	 argument, or two register copy statements plus a goto.  */
4141       return 2;
4142 
4143     case GIMPLE_EH_DISPATCH:
4144       /* ??? This is going to turn into a switch statement.  Ideally
4145 	 we'd have a look at the eh region and estimate the number of
4146 	 edges involved.  */
4147       return 10;
4148 
4149     case GIMPLE_BIND:
4150       return estimate_num_insns_seq (
4151 	       gimple_bind_body (as_a <gbind *> (stmt)),
4152 	       weights);
4153 
4154     case GIMPLE_EH_FILTER:
4155       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4156 
4157     case GIMPLE_CATCH:
4158       return estimate_num_insns_seq (gimple_catch_handler (
4159 				       as_a <gcatch *> (stmt)),
4160 				     weights);
4161 
4162     case GIMPLE_TRY:
4163       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4164               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4165 
4166     /* OMP directives are generally very expensive.  */
4167 
4168     case GIMPLE_OMP_RETURN:
4169     case GIMPLE_OMP_SECTIONS_SWITCH:
4170     case GIMPLE_OMP_ATOMIC_STORE:
4171     case GIMPLE_OMP_CONTINUE:
4172       /* ...except these, which are cheap.  */
4173       return 0;
4174 
4175     case GIMPLE_OMP_ATOMIC_LOAD:
4176       return weights->omp_cost;
4177 
4178     case GIMPLE_OMP_FOR:
4179       return (weights->omp_cost
4180               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4181               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4182 
4183     case GIMPLE_OMP_PARALLEL:
4184     case GIMPLE_OMP_TASK:
4185     case GIMPLE_OMP_CRITICAL:
4186     case GIMPLE_OMP_MASTER:
4187     case GIMPLE_OMP_TASKGROUP:
4188     case GIMPLE_OMP_ORDERED:
4189     case GIMPLE_OMP_SECTION:
4190     case GIMPLE_OMP_SECTIONS:
4191     case GIMPLE_OMP_SINGLE:
4192     case GIMPLE_OMP_TARGET:
4193     case GIMPLE_OMP_TEAMS:
4194       return (weights->omp_cost
4195               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4196 
4197     case GIMPLE_TRANSACTION:
4198       return (weights->tm_cost
4199 	      + estimate_num_insns_seq (gimple_transaction_body (
4200 					  as_a <gtransaction *> (stmt)),
4201 					weights));
4202 
4203     default:
4204       gcc_unreachable ();
4205     }
4206 
4207   return cost;
4208 }
4209 
4210 /* Estimate number of instructions that will be created by expanding
4211    function FNDECL.  WEIGHTS contains weights attributed to various
4212    constructs.  */
4213 
4214 int
4215 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4216 {
4217   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4218   gimple_stmt_iterator bsi;
4219   basic_block bb;
4220   int n = 0;
4221 
4222   gcc_assert (my_function && my_function->cfg);
4223   FOR_EACH_BB_FN (bb, my_function)
4224     {
4225       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4226 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4227     }
4228 
4229   return n;
4230 }
4231 
4232 
4233 /* Initializes weights used by estimate_num_insns.  */
4234 
4235 void
4236 init_inline_once (void)
4237 {
4238   eni_size_weights.call_cost = 1;
4239   eni_size_weights.indirect_call_cost = 3;
4240   eni_size_weights.target_builtin_call_cost = 1;
4241   eni_size_weights.div_mod_cost = 1;
4242   eni_size_weights.omp_cost = 40;
4243   eni_size_weights.tm_cost = 10;
4244   eni_size_weights.time_based = false;
4245   eni_size_weights.return_cost = 1;
4246 
4247   /* Estimating time for call is difficult, since we have no idea what the
4248      called function does.  In the current uses of eni_time_weights,
4249      underestimating the cost does less harm than overestimating it, so
4250      we choose a rather small value here.  */
4251   eni_time_weights.call_cost = 10;
4252   eni_time_weights.indirect_call_cost = 15;
4253   eni_time_weights.target_builtin_call_cost = 1;
4254   eni_time_weights.div_mod_cost = 10;
4255   eni_time_weights.omp_cost = 40;
4256   eni_time_weights.tm_cost = 40;
4257   eni_time_weights.time_based = true;
4258   eni_time_weights.return_cost = 2;
4259 }
4260 
4261 
4262 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4263 
4264 static void
4265 prepend_lexical_block (tree current_block, tree new_block)
4266 {
4267   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4268   BLOCK_SUBBLOCKS (current_block) = new_block;
4269   BLOCK_SUPERCONTEXT (new_block) = current_block;
4270 }
4271 
4272 /* Add local variables from CALLEE to CALLER.  */
4273 
4274 static inline void
4275 add_local_variables (struct function *callee, struct function *caller,
4276 		     copy_body_data *id)
4277 {
4278   tree var;
4279   unsigned ix;
4280 
4281   FOR_EACH_LOCAL_DECL (callee, ix, var)
4282     if (!can_be_nonlocal (var, id))
4283       {
4284         tree new_var = remap_decl (var, id);
4285 
4286         /* Remap debug-expressions.  */
4287 	if (VAR_P (new_var)
4288 	    && DECL_HAS_DEBUG_EXPR_P (var)
4289 	    && new_var != var)
4290 	  {
4291 	    tree tem = DECL_DEBUG_EXPR (var);
4292 	    bool old_regimplify = id->regimplify;
4293 	    id->remapping_type_depth++;
4294 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4295 	    id->remapping_type_depth--;
4296 	    id->regimplify = old_regimplify;
4297 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4298 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4299 	  }
4300 	add_local_decl (caller, new_var);
4301       }
4302 }
4303 
4304 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4305    have brought in or introduced any debug stmts for SRCVAR.  */
4306 
4307 static inline void
4308 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4309 {
4310   tree *remappedvarp = id->decl_map->get (srcvar);
4311 
4312   if (!remappedvarp)
4313     return;
4314 
4315   if (!VAR_P (*remappedvarp))
4316     return;
4317 
4318   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4319     return;
4320 
4321   tree tvar = target_for_debug_bind (*remappedvarp);
4322   if (!tvar)
4323     return;
4324 
4325   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4326 					  id->call_stmt);
4327   gimple_seq_add_stmt (bindings, stmt);
4328 }
4329 
4330 /* For each inlined variable for which we may have debug bind stmts,
4331    add before GSI a final debug stmt resetting it, marking the end of
4332    its life, so that var-tracking knows it doesn't have to compute
4333    further locations for it.  */
4334 
4335 static inline void
4336 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4337 {
4338   tree var;
4339   unsigned ix;
4340   gimple_seq bindings = NULL;
4341 
4342   if (!gimple_in_ssa_p (id->src_cfun))
4343     return;
4344 
4345   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4346     return;
4347 
4348   for (var = DECL_ARGUMENTS (id->src_fn);
4349        var; var = DECL_CHAIN (var))
4350     reset_debug_binding (id, var, &bindings);
4351 
4352   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4353     reset_debug_binding (id, var, &bindings);
4354 
4355   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4356 }
4357 
4358 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4359 
4360 static bool
4361 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4362 {
4363   tree use_retvar;
4364   tree fn;
4365   hash_map<tree, tree> *dst;
4366   hash_map<tree, tree> *st = NULL;
4367   tree return_slot;
4368   tree modify_dest;
4369   tree return_bounds = NULL;
4370   struct cgraph_edge *cg_edge;
4371   cgraph_inline_failed_t reason;
4372   basic_block return_block;
4373   edge e;
4374   gimple_stmt_iterator gsi, stmt_gsi;
4375   bool successfully_inlined = false;
4376   bool purge_dead_abnormal_edges;
4377   gcall *call_stmt;
4378   unsigned int i;
4379   unsigned int prop_mask, src_properties;
4380   struct function *dst_cfun;
4381   tree simduid;
4382   use_operand_p use;
4383   gimple *simtenter_stmt = NULL;
4384   vec<tree> *simtvars_save;
4385 
4386   /* The gimplifier uses input_location in too many places, such as
4387      internal_get_tmp_var ().  */
4388   location_t saved_location = input_location;
4389   input_location = gimple_location (stmt);
4390 
4391   /* From here on, we're only interested in CALL_EXPRs.  */
4392   call_stmt = dyn_cast <gcall *> (stmt);
4393   if (!call_stmt)
4394     goto egress;
4395 
4396   cg_edge = id->dst_node->get_edge (stmt);
4397   gcc_checking_assert (cg_edge);
4398   /* First, see if we can figure out what function is being called.
4399      If we cannot, then there is no hope of inlining the function.  */
4400   if (cg_edge->indirect_unknown_callee)
4401     goto egress;
4402   fn = cg_edge->callee->decl;
4403   gcc_checking_assert (fn);
4404 
4405   /* If FN is a declaration of a function in a nested scope that was
4406      globally declared inline, we don't set its DECL_INITIAL.
4407      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4408      C++ front-end uses it for cdtors to refer to their internal
4409      declarations, that are not real functions.  Fortunately those
4410      don't have trees to be saved, so we can tell by checking their
4411      gimple_body.  */
4412   if (!DECL_INITIAL (fn)
4413       && DECL_ABSTRACT_ORIGIN (fn)
4414       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4415     fn = DECL_ABSTRACT_ORIGIN (fn);
4416 
4417   /* Don't try to inline functions that are not well-suited to inlining.  */
4418   if (cg_edge->inline_failed)
4419     {
4420       reason = cg_edge->inline_failed;
4421       /* If this call was originally indirect, we do not want to emit any
4422 	 inlining related warnings or sorry messages because there are no
4423 	 guarantees regarding those.  */
4424       if (cg_edge->indirect_inlining_edge)
4425 	goto egress;
4426 
4427       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4428           /* For extern inline functions that get redefined we always
4429 	     silently ignored always_inline flag. Better behavior would
4430 	     be to be able to keep both bodies and use extern inline body
4431 	     for inlining, but we can't do that because frontends overwrite
4432 	     the body.  */
4433 	  && !cg_edge->callee->local.redefined_extern_inline
4434 	  /* During early inline pass, report only when optimization is
4435 	     not turned on.  */
4436 	  && (symtab->global_info_ready
4437 	      || !optimize
4438 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4439 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4440 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4441 	{
4442 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4443 		 cgraph_inline_failed_string (reason));
4444 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4445 	    inform (gimple_location (stmt), "called from here");
4446 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4447 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4448                    "called from this function");
4449 	}
4450       else if (warn_inline
4451 	       && DECL_DECLARED_INLINE_P (fn)
4452 	       && !DECL_NO_INLINE_WARNING_P (fn)
4453 	       && !DECL_IN_SYSTEM_HEADER (fn)
4454 	       && reason != CIF_UNSPECIFIED
4455 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4456 	       /* Do not warn about not inlined recursive calls.  */
4457 	       && !cg_edge->recursive_p ()
4458 	       /* Avoid warnings during early inline pass. */
4459 	       && symtab->global_info_ready)
4460 	{
4461 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4462 		       fn, _(cgraph_inline_failed_string (reason))))
4463 	    {
4464 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4465 		inform (gimple_location (stmt), "called from here");
4466 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4467 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4468                        "called from this function");
4469 	    }
4470 	}
4471       goto egress;
4472     }
4473   id->src_node = cg_edge->callee;
4474 
4475   /* If callee is thunk, all we need is to adjust the THIS pointer
4476      and redirect to function being thunked.  */
4477   if (id->src_node->thunk.thunk_p)
4478     {
4479       cgraph_edge *edge;
4480       tree virtual_offset = NULL;
4481       profile_count count = cg_edge->count;
4482       tree op;
4483       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4484 
4485       cg_edge->remove ();
4486       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4487 		   		           gimple_uid (stmt),
4488 				   	   profile_count::one (),
4489 					   profile_count::one (),
4490 				           true);
4491       edge->count = count;
4492       if (id->src_node->thunk.virtual_offset_p)
4493         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4494       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4495 			      NULL);
4496       gsi_insert_before (&iter, gimple_build_assign (op,
4497 						    gimple_call_arg (stmt, 0)),
4498 			 GSI_NEW_STMT);
4499       gcc_assert (id->src_node->thunk.this_adjusting);
4500       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4501 			 virtual_offset);
4502 
4503       gimple_call_set_arg (stmt, 0, op);
4504       gimple_call_set_fndecl (stmt, edge->callee->decl);
4505       update_stmt (stmt);
4506       id->src_node->remove ();
4507       expand_call_inline (bb, stmt, id);
4508       maybe_remove_unused_call_args (cfun, stmt);
4509       return true;
4510     }
4511   fn = cg_edge->callee->decl;
4512   cg_edge->callee->get_untransformed_body ();
4513 
4514   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4515     cg_edge->callee->verify ();
4516 
4517   /* We will be inlining this callee.  */
4518   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4519   id->assign_stmts.create (0);
4520 
4521   /* Update the callers EH personality.  */
4522   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4523     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4524       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4525 
4526   /* Split the block before the GIMPLE_CALL.  */
4527   stmt_gsi = gsi_for_stmt (stmt);
4528   gsi_prev (&stmt_gsi);
4529   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4530   bb = e->src;
4531   return_block = e->dest;
4532   remove_edge (e);
4533 
4534   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4535      been the source of abnormal edges.  In this case, schedule
4536      the removal of dead abnormal edges.  */
4537   gsi = gsi_start_bb (return_block);
4538   gsi_next (&gsi);
4539   purge_dead_abnormal_edges = gsi_end_p (gsi);
4540 
4541   stmt_gsi = gsi_start_bb (return_block);
4542 
4543   /* Build a block containing code to initialize the arguments, the
4544      actual inline expansion of the body, and a label for the return
4545      statements within the function to jump to.  The type of the
4546      statement expression is the return type of the function call.
4547      ???  If the call does not have an associated block then we will
4548      remap all callee blocks to NULL, effectively dropping most of
4549      its debug information.  This should only happen for calls to
4550      artificial decls inserted by the compiler itself.  We need to
4551      either link the inlined blocks into the caller block tree or
4552      not refer to them in any way to not break GC for locations.  */
4553   if (gimple_block (stmt))
4554     {
4555       id->block = make_node (BLOCK);
4556       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4557       BLOCK_SOURCE_LOCATION (id->block)
4558 	= LOCATION_LOCUS (gimple_location (stmt));
4559       prepend_lexical_block (gimple_block (stmt), id->block);
4560     }
4561 
4562   /* Local declarations will be replaced by their equivalents in this
4563      map.  */
4564   st = id->decl_map;
4565   id->decl_map = new hash_map<tree, tree>;
4566   dst = id->debug_map;
4567   id->debug_map = NULL;
4568 
4569   /* Record the function we are about to inline.  */
4570   id->src_fn = fn;
4571   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4572   id->call_stmt = call_stmt;
4573 
4574   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4575      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4576   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4577   simtvars_save = id->dst_simt_vars;
4578   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4579       && (simduid = bb->loop_father->simduid) != NULL_TREE
4580       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4581       && single_imm_use (simduid, &use, &simtenter_stmt)
4582       && is_gimple_call (simtenter_stmt)
4583       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4584     vec_alloc (id->dst_simt_vars, 0);
4585   else
4586     id->dst_simt_vars = NULL;
4587 
4588   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4589     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4590 
4591   /* If the src function contains an IFN_VA_ARG, then so will the dst
4592      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4593   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4594   src_properties = id->src_cfun->curr_properties & prop_mask;
4595   if (src_properties != prop_mask)
4596     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4597 
4598   gcc_assert (!id->src_cfun->after_inlining);
4599 
4600   id->entry_bb = bb;
4601   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4602     {
4603       gimple_stmt_iterator si = gsi_last_bb (bb);
4604       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4605       						   NOT_TAKEN),
4606 			GSI_NEW_STMT);
4607     }
4608   initialize_inlined_parameters (id, stmt, fn, bb);
4609   if (debug_nonbind_markers_p && debug_inline_points && id->block
4610       && inlined_function_outer_scope_p (id->block))
4611     {
4612       gimple_stmt_iterator si = gsi_last_bb (bb);
4613       gsi_insert_after (&si, gimple_build_debug_inline_entry
4614 			(id->block, input_location), GSI_NEW_STMT);
4615     }
4616 
4617   if (DECL_INITIAL (fn))
4618     {
4619       if (gimple_block (stmt))
4620 	{
4621 	  tree *var;
4622 
4623 	  prepend_lexical_block (id->block,
4624 				 remap_blocks (DECL_INITIAL (fn), id));
4625 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4626 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4627 				   == NULL_TREE));
4628 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4629 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4630 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4631 	     under it.  The parameters can be then evaluated in the debugger,
4632 	     but don't show in backtraces.  */
4633 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4634 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4635 	      {
4636 		tree v = *var;
4637 		*var = TREE_CHAIN (v);
4638 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4639 		BLOCK_VARS (id->block) = v;
4640 	      }
4641 	    else
4642 	      var = &TREE_CHAIN (*var);
4643 	}
4644       else
4645 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4646     }
4647 
4648   /* Return statements in the function body will be replaced by jumps
4649      to the RET_LABEL.  */
4650   gcc_assert (DECL_INITIAL (fn));
4651   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4652 
4653   /* Find the LHS to which the result of this call is assigned.  */
4654   return_slot = NULL;
4655   if (gimple_call_lhs (stmt))
4656     {
4657       modify_dest = gimple_call_lhs (stmt);
4658 
4659       /* Remember where to copy returned bounds.  */
4660       if (gimple_call_with_bounds_p (stmt)
4661 	  && TREE_CODE (modify_dest) == SSA_NAME)
4662 	{
4663 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4664 	  if (retbnd)
4665 	    {
4666 	      return_bounds = gimple_call_lhs (retbnd);
4667 	      /* If returned bounds are not used then just
4668 		 remove unused call.  */
4669 	      if (!return_bounds)
4670 		{
4671 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4672 		  gsi_remove (&iter, true);
4673 		}
4674 	    }
4675 	}
4676 
4677       /* The function which we are inlining might not return a value,
4678 	 in which case we should issue a warning that the function
4679 	 does not return a value.  In that case the optimizers will
4680 	 see that the variable to which the value is assigned was not
4681 	 initialized.  We do not want to issue a warning about that
4682 	 uninitialized variable.  */
4683       if (DECL_P (modify_dest))
4684 	TREE_NO_WARNING (modify_dest) = 1;
4685 
4686       if (gimple_call_return_slot_opt_p (call_stmt))
4687 	{
4688 	  return_slot = modify_dest;
4689 	  modify_dest = NULL;
4690 	}
4691     }
4692   else
4693     modify_dest = NULL;
4694 
4695   /* If we are inlining a call to the C++ operator new, we don't want
4696      to use type based alias analysis on the return value.  Otherwise
4697      we may get confused if the compiler sees that the inlined new
4698      function returns a pointer which was just deleted.  See bug
4699      33407.  */
4700   if (DECL_IS_OPERATOR_NEW (fn))
4701     {
4702       return_slot = NULL;
4703       modify_dest = NULL;
4704     }
4705 
4706   /* Declare the return variable for the function.  */
4707   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4708 					return_bounds, bb);
4709 
4710   /* Add local vars in this inlined callee to caller.  */
4711   add_local_variables (id->src_cfun, cfun, id);
4712 
4713   if (dump_file && (dump_flags & TDF_DETAILS))
4714     {
4715       fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4716 	       id->src_node->dump_name (),
4717 	       id->dst_node->dump_name (),
4718 	       cg_edge->sreal_frequency ().to_double ());
4719       id->src_node->dump (dump_file);
4720       id->dst_node->dump (dump_file);
4721     }
4722 
4723   /* This is it.  Duplicate the callee body.  Assume callee is
4724      pre-gimplified.  Note that we must not alter the caller
4725      function in any way before this point, as this CALL_EXPR may be
4726      a self-referential call; if we're calling ourselves, we need to
4727      duplicate our body before altering anything.  */
4728   copy_body (id, bb, return_block, NULL);
4729 
4730   reset_debug_bindings (id, stmt_gsi);
4731 
4732   if (flag_stack_reuse != SR_NONE)
4733     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4734       if (!TREE_THIS_VOLATILE (p))
4735 	{
4736 	  tree *varp = id->decl_map->get (p);
4737 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4738 	    {
4739 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4740 	      gimple *clobber_stmt;
4741 	      TREE_THIS_VOLATILE (clobber) = 1;
4742 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4743 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4744 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4745 	    }
4746 	}
4747 
4748   /* Reset the escaped solution.  */
4749   if (cfun->gimple_df)
4750     pt_solution_reset (&cfun->gimple_df->escaped);
4751 
4752   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4753   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4754     {
4755       size_t nargs = gimple_call_num_args (simtenter_stmt);
4756       vec<tree> *vars = id->dst_simt_vars;
4757       auto_vec<tree> newargs (nargs + vars->length ());
4758       for (size_t i = 0; i < nargs; i++)
4759 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4760       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4761 	{
4762 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4763 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4764 	}
4765       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4766       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4767       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4768       gsi_replace (&gsi, g, false);
4769     }
4770   vec_free (id->dst_simt_vars);
4771   id->dst_simt_vars = simtvars_save;
4772 
4773   /* Clean up.  */
4774   if (id->debug_map)
4775     {
4776       delete id->debug_map;
4777       id->debug_map = dst;
4778     }
4779   delete id->decl_map;
4780   id->decl_map = st;
4781 
4782   /* Unlink the calls virtual operands before replacing it.  */
4783   unlink_stmt_vdef (stmt);
4784   if (gimple_vdef (stmt)
4785       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4786     release_ssa_name (gimple_vdef (stmt));
4787 
4788   /* If the inlined function returns a result that we care about,
4789      substitute the GIMPLE_CALL with an assignment of the return
4790      variable to the LHS of the call.  That is, if STMT was
4791      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4792   if (use_retvar && gimple_call_lhs (stmt))
4793     {
4794       gimple *old_stmt = stmt;
4795       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4796       gsi_replace (&stmt_gsi, stmt, false);
4797       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4798       /* Append a clobber for id->retvar if easily possible.  */
4799       if (flag_stack_reuse != SR_NONE
4800 	  && id->retvar
4801 	  && VAR_P (id->retvar)
4802 	  && id->retvar != return_slot
4803 	  && id->retvar != modify_dest
4804 	  && !TREE_THIS_VOLATILE (id->retvar)
4805 	  && !is_gimple_reg (id->retvar)
4806 	  && !stmt_ends_bb_p (stmt))
4807 	{
4808 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4809 	  gimple *clobber_stmt;
4810 	  TREE_THIS_VOLATILE (clobber) = 1;
4811 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4812 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4813 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4814 	}
4815 
4816       /* Copy bounds if we copy structure with bounds.  */
4817       if (chkp_function_instrumented_p (id->dst_fn)
4818 	  && !BOUNDED_P (use_retvar)
4819 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4820 	id->assign_stmts.safe_push (stmt);
4821     }
4822   else
4823     {
4824       /* Handle the case of inlining a function with no return
4825 	 statement, which causes the return value to become undefined.  */
4826       if (gimple_call_lhs (stmt)
4827 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4828 	{
4829 	  tree name = gimple_call_lhs (stmt);
4830 	  tree var = SSA_NAME_VAR (name);
4831 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4832 
4833 	  if (def)
4834 	    {
4835 	      /* If the variable is used undefined, make this name
4836 		 undefined via a move.  */
4837 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4838 	      gsi_replace (&stmt_gsi, stmt, true);
4839 	    }
4840 	  else
4841 	    {
4842 	      if (!var)
4843 		{
4844 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4845 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4846 		}
4847 	      /* Otherwise make this variable undefined.  */
4848 	      gsi_remove (&stmt_gsi, true);
4849 	      set_ssa_default_def (cfun, var, name);
4850 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4851 	    }
4852 	}
4853       /* Replace with a clobber for id->retvar.  */
4854       else if (flag_stack_reuse != SR_NONE
4855 	       && id->retvar
4856 	       && VAR_P (id->retvar)
4857 	       && id->retvar != return_slot
4858 	       && id->retvar != modify_dest
4859 	       && !TREE_THIS_VOLATILE (id->retvar)
4860 	       && !is_gimple_reg (id->retvar))
4861 	{
4862 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4863 	  gimple *clobber_stmt;
4864 	  TREE_THIS_VOLATILE (clobber) = 1;
4865 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4866 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4867 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
4868 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4869 	}
4870       else
4871 	gsi_remove (&stmt_gsi, true);
4872     }
4873 
4874   /* Put returned bounds into the correct place if required.  */
4875   if (return_bounds)
4876     {
4877       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4878       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4879       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4880       unlink_stmt_vdef (old_stmt);
4881       gsi_replace (&bnd_gsi, new_stmt, false);
4882       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4883       cgraph_update_edges_for_call_stmt (old_stmt,
4884 					 gimple_call_fndecl (old_stmt),
4885 					 new_stmt);
4886     }
4887 
4888   if (purge_dead_abnormal_edges)
4889     {
4890       gimple_purge_dead_eh_edges (return_block);
4891       gimple_purge_dead_abnormal_call_edges (return_block);
4892     }
4893 
4894   /* If the value of the new expression is ignored, that's OK.  We
4895      don't warn about this for CALL_EXPRs, so we shouldn't warn about
4896      the equivalent inlined version either.  */
4897   if (is_gimple_assign (stmt))
4898     {
4899       gcc_assert (gimple_assign_single_p (stmt)
4900 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4901       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4902     }
4903 
4904   /* Copy bounds for all generated assigns that need it.  */
4905   for (i = 0; i < id->assign_stmts.length (); i++)
4906     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4907   id->assign_stmts.release ();
4908 
4909   /* Output the inlining info for this abstract function, since it has been
4910      inlined.  If we don't do this now, we can lose the information about the
4911      variables in the function when the blocks get blown away as soon as we
4912      remove the cgraph node.  */
4913   if (gimple_block (stmt))
4914     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4915 
4916   /* Update callgraph if needed.  */
4917   cg_edge->callee->remove ();
4918 
4919   id->block = NULL_TREE;
4920   id->retvar = NULL_TREE;
4921   id->retbnd = NULL_TREE;
4922   successfully_inlined = true;
4923 
4924  egress:
4925   input_location = saved_location;
4926   return successfully_inlined;
4927 }
4928 
4929 /* Expand call statements reachable from STMT_P.
4930    We can only have CALL_EXPRs as the "toplevel" tree code or nested
4931    in a MODIFY_EXPR.  */
4932 
4933 static bool
4934 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4935 {
4936   gimple_stmt_iterator gsi;
4937   bool inlined = false;
4938 
4939   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4940     {
4941       gimple *stmt = gsi_stmt (gsi);
4942       gsi_prev (&gsi);
4943 
4944       if (is_gimple_call (stmt)
4945 	  && !gimple_call_internal_p (stmt))
4946 	inlined |= expand_call_inline (bb, stmt, id);
4947     }
4948 
4949   return inlined;
4950 }
4951 
4952 
4953 /* Walk all basic blocks created after FIRST and try to fold every statement
4954    in the STATEMENTS pointer set.  */
4955 
4956 static void
4957 fold_marked_statements (int first, hash_set<gimple *> *statements)
4958 {
4959   for (; first < n_basic_blocks_for_fn (cfun); first++)
4960     if (BASIC_BLOCK_FOR_FN (cfun, first))
4961       {
4962         gimple_stmt_iterator gsi;
4963 
4964 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4965 	     !gsi_end_p (gsi);
4966 	     gsi_next (&gsi))
4967 	  if (statements->contains (gsi_stmt (gsi)))
4968 	    {
4969 	      gimple *old_stmt = gsi_stmt (gsi);
4970 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4971 
4972 	      if (old_decl && DECL_BUILT_IN (old_decl))
4973 		{
4974 		  /* Folding builtins can create multiple instructions,
4975 		     we need to look at all of them.  */
4976 		  gimple_stmt_iterator i2 = gsi;
4977 		  gsi_prev (&i2);
4978 		  if (fold_stmt (&gsi))
4979 		    {
4980 		      gimple *new_stmt;
4981 		      /* If a builtin at the end of a bb folded into nothing,
4982 			 the following loop won't work.  */
4983 		      if (gsi_end_p (gsi))
4984 			{
4985 			  cgraph_update_edges_for_call_stmt (old_stmt,
4986 							     old_decl, NULL);
4987 			  break;
4988 			}
4989 		      if (gsi_end_p (i2))
4990 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4991 		      else
4992 			gsi_next (&i2);
4993 		      while (1)
4994 			{
4995 			  new_stmt = gsi_stmt (i2);
4996 			  update_stmt (new_stmt);
4997 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4998 							     new_stmt);
4999 
5000 			  if (new_stmt == gsi_stmt (gsi))
5001 			    {
5002 			      /* It is okay to check only for the very last
5003 				 of these statements.  If it is a throwing
5004 				 statement nothing will change.  If it isn't
5005 				 this can remove EH edges.  If that weren't
5006 				 correct then because some intermediate stmts
5007 				 throw, but not the last one.  That would mean
5008 				 we'd have to split the block, which we can't
5009 				 here and we'd loose anyway.  And as builtins
5010 				 probably never throw, this all
5011 				 is mood anyway.  */
5012 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5013 								  new_stmt))
5014 				gimple_purge_dead_eh_edges (
5015 				  BASIC_BLOCK_FOR_FN (cfun, first));
5016 			      break;
5017 			    }
5018 			  gsi_next (&i2);
5019 			}
5020 		    }
5021 		}
5022 	      else if (fold_stmt (&gsi))
5023 		{
5024 		  /* Re-read the statement from GSI as fold_stmt() may
5025 		     have changed it.  */
5026 		  gimple *new_stmt = gsi_stmt (gsi);
5027 		  update_stmt (new_stmt);
5028 
5029 		  if (is_gimple_call (old_stmt)
5030 		      || is_gimple_call (new_stmt))
5031 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5032 						       new_stmt);
5033 
5034 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5035 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5036 								    first));
5037 		}
5038 	    }
5039       }
5040 }
5041 
5042 /* Expand calls to inline functions in the body of FN.  */
5043 
5044 unsigned int
5045 optimize_inline_calls (tree fn)
5046 {
5047   copy_body_data id;
5048   basic_block bb;
5049   int last = n_basic_blocks_for_fn (cfun);
5050   bool inlined_p = false;
5051 
5052   /* Clear out ID.  */
5053   memset (&id, 0, sizeof (id));
5054 
5055   id.src_node = id.dst_node = cgraph_node::get (fn);
5056   gcc_assert (id.dst_node->definition);
5057   id.dst_fn = fn;
5058   /* Or any functions that aren't finished yet.  */
5059   if (current_function_decl)
5060     id.dst_fn = current_function_decl;
5061 
5062   id.copy_decl = copy_decl_maybe_to_var;
5063   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5064   id.transform_new_cfg = false;
5065   id.transform_return_to_modify = true;
5066   id.transform_parameter = true;
5067   id.transform_lang_insert_block = NULL;
5068   id.statements_to_fold = new hash_set<gimple *>;
5069 
5070   push_gimplify_context ();
5071 
5072   /* We make no attempts to keep dominance info up-to-date.  */
5073   free_dominance_info (CDI_DOMINATORS);
5074   free_dominance_info (CDI_POST_DOMINATORS);
5075 
5076   /* Register specific gimple functions.  */
5077   gimple_register_cfg_hooks ();
5078 
5079   /* Reach the trees by walking over the CFG, and note the
5080      enclosing basic-blocks in the call edges.  */
5081   /* We walk the blocks going forward, because inlined function bodies
5082      will split id->current_basic_block, and the new blocks will
5083      follow it; we'll trudge through them, processing their CALL_EXPRs
5084      along the way.  */
5085   FOR_EACH_BB_FN (bb, cfun)
5086     inlined_p |= gimple_expand_calls_inline (bb, &id);
5087 
5088   pop_gimplify_context (NULL);
5089 
5090   if (flag_checking)
5091     {
5092       struct cgraph_edge *e;
5093 
5094       id.dst_node->verify ();
5095 
5096       /* Double check that we inlined everything we are supposed to inline.  */
5097       for (e = id.dst_node->callees; e; e = e->next_callee)
5098 	gcc_assert (e->inline_failed);
5099     }
5100 
5101   /* Fold queued statements.  */
5102   update_max_bb_count ();
5103   fold_marked_statements (last, id.statements_to_fold);
5104   delete id.statements_to_fold;
5105 
5106   gcc_assert (!id.debug_stmts.exists ());
5107 
5108   /* If we didn't inline into the function there is nothing to do.  */
5109   if (!inlined_p)
5110     return 0;
5111 
5112   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5113   number_blocks (fn);
5114 
5115   delete_unreachable_blocks_update_callgraph (&id);
5116   if (flag_checking)
5117     id.dst_node->verify ();
5118 
5119   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5120      not possible yet - the IPA passes might make various functions to not
5121      throw and they don't care to proactively update local EH info.  This is
5122      done later in fixup_cfg pass that also execute the verification.  */
5123   return (TODO_update_ssa
5124 	  | TODO_cleanup_cfg
5125 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5126 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5127 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5128 	     ? TODO_rebuild_frequencies : 0));
5129 }
5130 
5131 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5132 
5133 tree
5134 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5135 {
5136   enum tree_code code = TREE_CODE (*tp);
5137   enum tree_code_class cl = TREE_CODE_CLASS (code);
5138 
5139   /* We make copies of most nodes.  */
5140   if (IS_EXPR_CODE_CLASS (cl)
5141       || code == TREE_LIST
5142       || code == TREE_VEC
5143       || code == TYPE_DECL
5144       || code == OMP_CLAUSE)
5145     {
5146       /* Because the chain gets clobbered when we make a copy, we save it
5147 	 here.  */
5148       tree chain = NULL_TREE, new_tree;
5149 
5150       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5151 	chain = TREE_CHAIN (*tp);
5152 
5153       /* Copy the node.  */
5154       new_tree = copy_node (*tp);
5155 
5156       *tp = new_tree;
5157 
5158       /* Now, restore the chain, if appropriate.  That will cause
5159 	 walk_tree to walk into the chain as well.  */
5160       if (code == PARM_DECL
5161 	  || code == TREE_LIST
5162 	  || code == OMP_CLAUSE)
5163 	TREE_CHAIN (*tp) = chain;
5164 
5165       /* For now, we don't update BLOCKs when we make copies.  So, we
5166 	 have to nullify all BIND_EXPRs.  */
5167       if (TREE_CODE (*tp) == BIND_EXPR)
5168 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5169     }
5170   else if (code == CONSTRUCTOR)
5171     {
5172       /* CONSTRUCTOR nodes need special handling because
5173          we need to duplicate the vector of elements.  */
5174       tree new_tree;
5175 
5176       new_tree = copy_node (*tp);
5177       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5178       *tp = new_tree;
5179     }
5180   else if (code == STATEMENT_LIST)
5181     /* We used to just abort on STATEMENT_LIST, but we can run into them
5182        with statement-expressions (c++/40975).  */
5183     copy_statement_list (tp);
5184   else if (TREE_CODE_CLASS (code) == tcc_type)
5185     *walk_subtrees = 0;
5186   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5187     *walk_subtrees = 0;
5188   else if (TREE_CODE_CLASS (code) == tcc_constant)
5189     *walk_subtrees = 0;
5190   return NULL_TREE;
5191 }
5192 
5193 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5194    information indicating to what new SAVE_EXPR this one should be mapped,
5195    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5196    the function into which the copy will be placed.  */
5197 
5198 static void
5199 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5200 {
5201   tree *n;
5202   tree t;
5203 
5204   /* See if we already encountered this SAVE_EXPR.  */
5205   n = st->get (*tp);
5206 
5207   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5208   if (!n)
5209     {
5210       t = copy_node (*tp);
5211 
5212       /* Remember this SAVE_EXPR.  */
5213       st->put (*tp, t);
5214       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5215       st->put (t, t);
5216     }
5217   else
5218     {
5219       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5220       *walk_subtrees = 0;
5221       t = *n;
5222     }
5223 
5224   /* Replace this SAVE_EXPR with the copy.  */
5225   *tp = t;
5226 }
5227 
5228 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5229    label, copies the declaration and enters it in the splay_tree in DATA (which
5230    is really a 'copy_body_data *'.  */
5231 
5232 static tree
5233 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5234 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5235 		        struct walk_stmt_info *wi)
5236 {
5237   copy_body_data *id = (copy_body_data *) wi->info;
5238   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5239 
5240   if (stmt)
5241     {
5242       tree decl = gimple_label_label (stmt);
5243 
5244       /* Copy the decl and remember the copy.  */
5245       insert_decl_map (id, decl, id->copy_decl (decl, id));
5246     }
5247 
5248   return NULL_TREE;
5249 }
5250 
5251 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5252 						  struct walk_stmt_info *wi);
5253 
5254 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5255    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5256    remaps all local declarations to appropriate replacements in gimple
5257    operands. */
5258 
5259 static tree
5260 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5261 {
5262   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5263   copy_body_data *id = (copy_body_data *) wi->info;
5264   hash_map<tree, tree> *st = id->decl_map;
5265   tree *n;
5266   tree expr = *tp;
5267 
5268   /* For recursive invocations this is no longer the LHS itself.  */
5269   bool is_lhs = wi->is_lhs;
5270   wi->is_lhs = false;
5271 
5272   if (TREE_CODE (expr) == SSA_NAME)
5273     {
5274       *tp = remap_ssa_name (*tp, id);
5275       *walk_subtrees = 0;
5276       if (is_lhs)
5277 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5278     }
5279   /* Only a local declaration (variable or label).  */
5280   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5281 	   || TREE_CODE (expr) == LABEL_DECL)
5282     {
5283       /* Lookup the declaration.  */
5284       n = st->get (expr);
5285 
5286       /* If it's there, remap it.  */
5287       if (n)
5288 	*tp = *n;
5289       *walk_subtrees = 0;
5290     }
5291   else if (TREE_CODE (expr) == STATEMENT_LIST
5292 	   || TREE_CODE (expr) == BIND_EXPR
5293 	   || TREE_CODE (expr) == SAVE_EXPR)
5294     gcc_unreachable ();
5295   else if (TREE_CODE (expr) == TARGET_EXPR)
5296     {
5297       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5298          It's OK for this to happen if it was part of a subtree that
5299          isn't immediately expanded, such as operand 2 of another
5300          TARGET_EXPR.  */
5301       if (!TREE_OPERAND (expr, 1))
5302 	{
5303 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5304 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5305 	}
5306     }
5307   else if (TREE_CODE (expr) == OMP_CLAUSE)
5308     {
5309       /* Before the omplower pass completes, some OMP clauses can contain
5310 	 sequences that are neither copied by gimple_seq_copy nor walked by
5311 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5312 	 in those situations, we have to copy and process them explicitely.  */
5313 
5314       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5315 	{
5316 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5317 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5318 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5319 	}
5320       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5321 	{
5322 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5323 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5324 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5325 	}
5326       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5327 	{
5328 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5329 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5330 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5331 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5332 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5333 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5334 	}
5335     }
5336 
5337   /* Keep iterating.  */
5338   return NULL_TREE;
5339 }
5340 
5341 
5342 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5343    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5344    remaps all local declarations to appropriate replacements in gimple
5345    statements. */
5346 
5347 static tree
5348 replace_locals_stmt (gimple_stmt_iterator *gsip,
5349 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5350 		     struct walk_stmt_info *wi)
5351 {
5352   copy_body_data *id = (copy_body_data *) wi->info;
5353   gimple *gs = gsi_stmt (*gsip);
5354 
5355   if (gbind *stmt = dyn_cast <gbind *> (gs))
5356     {
5357       tree block = gimple_bind_block (stmt);
5358 
5359       if (block)
5360 	{
5361 	  remap_block (&block, id);
5362 	  gimple_bind_set_block (stmt, block);
5363 	}
5364 
5365       /* This will remap a lot of the same decls again, but this should be
5366 	 harmless.  */
5367       if (gimple_bind_vars (stmt))
5368 	{
5369 	  tree old_var, decls = gimple_bind_vars (stmt);
5370 
5371 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5372 	    if (!can_be_nonlocal (old_var, id)
5373 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5374 	      remap_decl (old_var, id);
5375 
5376 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5377 	  id->prevent_decl_creation_for_types = true;
5378 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5379 	  id->prevent_decl_creation_for_types = false;
5380 	}
5381     }
5382 
5383   /* Keep iterating.  */
5384   return NULL_TREE;
5385 }
5386 
5387 /* Create a copy of SEQ and remap all decls in it.  */
5388 
5389 static gimple_seq
5390 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5391 {
5392   if (!seq)
5393     return NULL;
5394 
5395   /* If there are any labels in OMP sequences, they can be only referred to in
5396      the sequence itself and therefore we can do both here.  */
5397   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5398   gimple_seq copy = gimple_seq_copy (seq);
5399   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5400   return copy;
5401 }
5402 
5403 /* Copies everything in SEQ and replaces variables and labels local to
5404    current_function_decl.  */
5405 
5406 gimple_seq
5407 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5408 {
5409   copy_body_data id;
5410   struct walk_stmt_info wi;
5411   gimple_seq copy;
5412 
5413   /* There's nothing to do for NULL_TREE.  */
5414   if (seq == NULL)
5415     return seq;
5416 
5417   /* Set up ID.  */
5418   memset (&id, 0, sizeof (id));
5419   id.src_fn = current_function_decl;
5420   id.dst_fn = current_function_decl;
5421   id.src_cfun = cfun;
5422   id.decl_map = new hash_map<tree, tree>;
5423   id.debug_map = NULL;
5424 
5425   id.copy_decl = copy_decl_no_change;
5426   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5427   id.transform_new_cfg = false;
5428   id.transform_return_to_modify = false;
5429   id.transform_parameter = false;
5430   id.transform_lang_insert_block = NULL;
5431 
5432   /* Walk the tree once to find local labels.  */
5433   memset (&wi, 0, sizeof (wi));
5434   hash_set<tree> visited;
5435   wi.info = &id;
5436   wi.pset = &visited;
5437   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5438 
5439   copy = gimple_seq_copy (seq);
5440 
5441   /* Walk the copy, remapping decls.  */
5442   memset (&wi, 0, sizeof (wi));
5443   wi.info = &id;
5444   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5445 
5446   /* Clean up.  */
5447   delete id.decl_map;
5448   if (id.debug_map)
5449     delete id.debug_map;
5450   if (id.dependence_map)
5451     {
5452       delete id.dependence_map;
5453       id.dependence_map = NULL;
5454     }
5455 
5456   return copy;
5457 }
5458 
5459 
5460 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5461 
5462 static tree
5463 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5464 {
5465   if (*tp == data)
5466     return (tree) data;
5467   else
5468     return NULL;
5469 }
5470 
5471 DEBUG_FUNCTION bool
5472 debug_find_tree (tree top, tree search)
5473 {
5474   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5475 }
5476 
5477 
5478 /* Declare the variables created by the inliner.  Add all the variables in
5479    VARS to BIND_EXPR.  */
5480 
5481 static void
5482 declare_inline_vars (tree block, tree vars)
5483 {
5484   tree t;
5485   for (t = vars; t; t = DECL_CHAIN (t))
5486     {
5487       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5488       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5489       add_local_decl (cfun, t);
5490     }
5491 
5492   if (block)
5493     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5494 }
5495 
5496 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5497    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5498    VAR_DECL translation.  */
5499 
5500 tree
5501 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5502 {
5503   /* Don't generate debug information for the copy if we wouldn't have
5504      generated it for the copy either.  */
5505   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5506   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5507 
5508   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5509      declaration inspired this copy.  */
5510   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5511 
5512   /* The new variable/label has no RTL, yet.  */
5513   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5514       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5515     SET_DECL_RTL (copy, 0);
5516 
5517   /* These args would always appear unused, if not for this.  */
5518   TREE_USED (copy) = 1;
5519 
5520   /* Set the context for the new declaration.  */
5521   if (!DECL_CONTEXT (decl))
5522     /* Globals stay global.  */
5523     ;
5524   else if (DECL_CONTEXT (decl) != id->src_fn)
5525     /* Things that weren't in the scope of the function we're inlining
5526        from aren't in the scope we're inlining to, either.  */
5527     ;
5528   else if (TREE_STATIC (decl))
5529     /* Function-scoped static variables should stay in the original
5530        function.  */
5531     ;
5532   else
5533     {
5534       /* Ordinary automatic local variables are now in the scope of the
5535 	 new function.  */
5536       DECL_CONTEXT (copy) = id->dst_fn;
5537       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5538 	{
5539 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5540 	    DECL_ATTRIBUTES (copy)
5541 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5542 			   DECL_ATTRIBUTES (copy));
5543 	  id->dst_simt_vars->safe_push (copy);
5544 	}
5545     }
5546 
5547   return copy;
5548 }
5549 
5550 static tree
5551 copy_decl_to_var (tree decl, copy_body_data *id)
5552 {
5553   tree copy, type;
5554 
5555   gcc_assert (TREE_CODE (decl) == PARM_DECL
5556 	      || TREE_CODE (decl) == RESULT_DECL);
5557 
5558   type = TREE_TYPE (decl);
5559 
5560   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5561 		     VAR_DECL, DECL_NAME (decl), type);
5562   if (DECL_PT_UID_SET_P (decl))
5563     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5564   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5565   TREE_READONLY (copy) = TREE_READONLY (decl);
5566   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5567   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5568 
5569   return copy_decl_for_dup_finish (id, decl, copy);
5570 }
5571 
5572 /* Like copy_decl_to_var, but create a return slot object instead of a
5573    pointer variable for return by invisible reference.  */
5574 
5575 static tree
5576 copy_result_decl_to_var (tree decl, copy_body_data *id)
5577 {
5578   tree copy, type;
5579 
5580   gcc_assert (TREE_CODE (decl) == PARM_DECL
5581 	      || TREE_CODE (decl) == RESULT_DECL);
5582 
5583   type = TREE_TYPE (decl);
5584   if (DECL_BY_REFERENCE (decl))
5585     type = TREE_TYPE (type);
5586 
5587   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5588 		     VAR_DECL, DECL_NAME (decl), type);
5589   if (DECL_PT_UID_SET_P (decl))
5590     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5591   TREE_READONLY (copy) = TREE_READONLY (decl);
5592   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5593   if (!DECL_BY_REFERENCE (decl))
5594     {
5595       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5596       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5597     }
5598 
5599   return copy_decl_for_dup_finish (id, decl, copy);
5600 }
5601 
5602 tree
5603 copy_decl_no_change (tree decl, copy_body_data *id)
5604 {
5605   tree copy;
5606 
5607   copy = copy_node (decl);
5608 
5609   /* The COPY is not abstract; it will be generated in DST_FN.  */
5610   DECL_ABSTRACT_P (copy) = false;
5611   lang_hooks.dup_lang_specific_decl (copy);
5612 
5613   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5614      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5615   if (TREE_CODE (copy) == LABEL_DECL)
5616     {
5617       TREE_ADDRESSABLE (copy) = 0;
5618       LABEL_DECL_UID (copy) = -1;
5619     }
5620 
5621   return copy_decl_for_dup_finish (id, decl, copy);
5622 }
5623 
5624 static tree
5625 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5626 {
5627   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5628     return copy_decl_to_var (decl, id);
5629   else
5630     return copy_decl_no_change (decl, id);
5631 }
5632 
5633 /* Return a copy of the function's argument tree.  */
5634 static tree
5635 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5636 			       bitmap args_to_skip, tree *vars)
5637 {
5638   tree arg, *parg;
5639   tree new_parm = NULL;
5640   int i = 0;
5641 
5642   parg = &new_parm;
5643 
5644   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5645     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5646       {
5647         tree new_tree = remap_decl (arg, id);
5648 	if (TREE_CODE (new_tree) != PARM_DECL)
5649 	  new_tree = id->copy_decl (arg, id);
5650         lang_hooks.dup_lang_specific_decl (new_tree);
5651         *parg = new_tree;
5652 	parg = &DECL_CHAIN (new_tree);
5653       }
5654     else if (!id->decl_map->get (arg))
5655       {
5656 	/* Make an equivalent VAR_DECL.  If the argument was used
5657 	   as temporary variable later in function, the uses will be
5658 	   replaced by local variable.  */
5659 	tree var = copy_decl_to_var (arg, id);
5660 	insert_decl_map (id, arg, var);
5661         /* Declare this new variable.  */
5662         DECL_CHAIN (var) = *vars;
5663         *vars = var;
5664       }
5665   return new_parm;
5666 }
5667 
5668 /* Return a copy of the function's static chain.  */
5669 static tree
5670 copy_static_chain (tree static_chain, copy_body_data * id)
5671 {
5672   tree *chain_copy, *pvar;
5673 
5674   chain_copy = &static_chain;
5675   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5676     {
5677       tree new_tree = remap_decl (*pvar, id);
5678       lang_hooks.dup_lang_specific_decl (new_tree);
5679       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5680       *pvar = new_tree;
5681     }
5682   return static_chain;
5683 }
5684 
5685 /* Return true if the function is allowed to be versioned.
5686    This is a guard for the versioning functionality.  */
5687 
5688 bool
5689 tree_versionable_function_p (tree fndecl)
5690 {
5691   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5692 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5693 }
5694 
5695 /* Delete all unreachable basic blocks and update callgraph.
5696    Doing so is somewhat nontrivial because we need to update all clones and
5697    remove inline function that become unreachable.  */
5698 
5699 static bool
5700 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5701 {
5702   bool changed = false;
5703   basic_block b, next_bb;
5704 
5705   find_unreachable_blocks ();
5706 
5707   /* Delete all unreachable basic blocks.  */
5708 
5709   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5710        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5711     {
5712       next_bb = b->next_bb;
5713 
5714       if (!(b->flags & BB_REACHABLE))
5715 	{
5716           gimple_stmt_iterator bsi;
5717 
5718           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5719 	    {
5720 	      struct cgraph_edge *e;
5721 	      struct cgraph_node *node;
5722 
5723 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5724 
5725 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5726 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5727 		{
5728 		  if (!e->inline_failed)
5729 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5730 		  else
5731 		    e->remove ();
5732 		}
5733 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5734 		  && id->dst_node->clones)
5735 		for (node = id->dst_node->clones; node != id->dst_node;)
5736 		  {
5737 		    node->remove_stmt_references (gsi_stmt (bsi));
5738 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5739 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5740 		      {
5741 			if (!e->inline_failed)
5742 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5743 			else
5744 			  e->remove ();
5745 		      }
5746 
5747 		    if (node->clones)
5748 		      node = node->clones;
5749 		    else if (node->next_sibling_clone)
5750 		      node = node->next_sibling_clone;
5751 		    else
5752 		      {
5753 			while (node != id->dst_node && !node->next_sibling_clone)
5754 			  node = node->clone_of;
5755 			if (node != id->dst_node)
5756 			  node = node->next_sibling_clone;
5757 		      }
5758 		  }
5759 	    }
5760 	  delete_basic_block (b);
5761 	  changed = true;
5762 	}
5763     }
5764 
5765   return changed;
5766 }
5767 
5768 /* Update clone info after duplication.  */
5769 
5770 static void
5771 update_clone_info (copy_body_data * id)
5772 {
5773   struct cgraph_node *node;
5774   if (!id->dst_node->clones)
5775     return;
5776   for (node = id->dst_node->clones; node != id->dst_node;)
5777     {
5778       /* First update replace maps to match the new body.  */
5779       if (node->clone.tree_map)
5780         {
5781 	  unsigned int i;
5782           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5783 	    {
5784 	      struct ipa_replace_map *replace_info;
5785 	      replace_info = (*node->clone.tree_map)[i];
5786 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5787 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5788 	    }
5789 	}
5790       if (node->clones)
5791 	node = node->clones;
5792       else if (node->next_sibling_clone)
5793 	node = node->next_sibling_clone;
5794       else
5795 	{
5796 	  while (node != id->dst_node && !node->next_sibling_clone)
5797 	    node = node->clone_of;
5798 	  if (node != id->dst_node)
5799 	    node = node->next_sibling_clone;
5800 	}
5801     }
5802 }
5803 
5804 /* Create a copy of a function's tree.
5805    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5806    of the original function and the new copied function
5807    respectively.  In case we want to replace a DECL
5808    tree with another tree while duplicating the function's
5809    body, TREE_MAP represents the mapping between these
5810    trees. If UPDATE_CLONES is set, the call_stmt fields
5811    of edges of clones of the function will be updated.
5812 
5813    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5814    from new version.
5815    If SKIP_RETURN is true, the new version will return void.
5816    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5817    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5818 */
5819 void
5820 tree_function_versioning (tree old_decl, tree new_decl,
5821 			  vec<ipa_replace_map *, va_gc> *tree_map,
5822 			  bool update_clones, bitmap args_to_skip,
5823 			  bool skip_return, bitmap blocks_to_copy,
5824 			  basic_block new_entry)
5825 {
5826   struct cgraph_node *old_version_node;
5827   struct cgraph_node *new_version_node;
5828   copy_body_data id;
5829   tree p;
5830   unsigned i;
5831   struct ipa_replace_map *replace_info;
5832   basic_block old_entry_block, bb;
5833   auto_vec<gimple *, 10> init_stmts;
5834   tree vars = NULL_TREE;
5835   bitmap debug_args_to_skip = args_to_skip;
5836 
5837   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5838 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5839   DECL_POSSIBLY_INLINED (old_decl) = 1;
5840 
5841   old_version_node = cgraph_node::get (old_decl);
5842   gcc_checking_assert (old_version_node);
5843   new_version_node = cgraph_node::get (new_decl);
5844   gcc_checking_assert (new_version_node);
5845 
5846   /* Copy over debug args.  */
5847   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5848     {
5849       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5850       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5851       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5852       old_debug_args = decl_debug_args_lookup (old_decl);
5853       if (old_debug_args)
5854 	{
5855 	  new_debug_args = decl_debug_args_insert (new_decl);
5856 	  *new_debug_args = vec_safe_copy (*old_debug_args);
5857 	}
5858     }
5859 
5860   /* Output the inlining info for this abstract function, since it has been
5861      inlined.  If we don't do this now, we can lose the information about the
5862      variables in the function when the blocks get blown away as soon as we
5863      remove the cgraph node.  */
5864   (*debug_hooks->outlining_inline_function) (old_decl);
5865 
5866   DECL_ARTIFICIAL (new_decl) = 1;
5867   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5868   if (DECL_ORIGIN (old_decl) == old_decl)
5869     old_version_node->used_as_abstract_origin = true;
5870   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5871 
5872   /* Prepare the data structures for the tree copy.  */
5873   memset (&id, 0, sizeof (id));
5874 
5875   /* Generate a new name for the new version. */
5876   id.statements_to_fold = new hash_set<gimple *>;
5877 
5878   id.decl_map = new hash_map<tree, tree>;
5879   id.debug_map = NULL;
5880   id.src_fn = old_decl;
5881   id.dst_fn = new_decl;
5882   id.src_node = old_version_node;
5883   id.dst_node = new_version_node;
5884   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5885   id.blocks_to_copy = blocks_to_copy;
5886 
5887   id.copy_decl = copy_decl_no_change;
5888   id.transform_call_graph_edges
5889     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5890   id.transform_new_cfg = true;
5891   id.transform_return_to_modify = false;
5892   id.transform_parameter = false;
5893   id.transform_lang_insert_block = NULL;
5894 
5895   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5896     (DECL_STRUCT_FUNCTION (old_decl));
5897   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5898   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5899   initialize_cfun (new_decl, old_decl,
5900 		   new_entry ? new_entry->count : old_entry_block->count);
5901   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5902     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5903       = id.src_cfun->gimple_df->ipa_pta;
5904 
5905   /* Copy the function's static chain.  */
5906   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5907   if (p)
5908     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5909       = copy_static_chain (p, &id);
5910 
5911   /* If there's a tree_map, prepare for substitution.  */
5912   if (tree_map)
5913     for (i = 0; i < tree_map->length (); i++)
5914       {
5915 	gimple *init;
5916 	replace_info = (*tree_map)[i];
5917 	if (replace_info->replace_p)
5918 	  {
5919 	    int parm_num = -1;
5920 	    if (!replace_info->old_tree)
5921 	      {
5922 		int p = replace_info->parm_num;
5923 		tree parm;
5924 		tree req_type, new_type;
5925 
5926 		for (parm = DECL_ARGUMENTS (old_decl); p;
5927 		     parm = DECL_CHAIN (parm))
5928 		  p--;
5929 		replace_info->old_tree = parm;
5930 		parm_num = replace_info->parm_num;
5931 		req_type = TREE_TYPE (parm);
5932 		new_type = TREE_TYPE (replace_info->new_tree);
5933 		if (!useless_type_conversion_p (req_type, new_type))
5934 		  {
5935 		    if (fold_convertible_p (req_type, replace_info->new_tree))
5936 		      replace_info->new_tree
5937 			= fold_build1 (NOP_EXPR, req_type,
5938 				       replace_info->new_tree);
5939 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5940 		      replace_info->new_tree
5941 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
5942 				       replace_info->new_tree);
5943 		    else
5944 		      {
5945 			if (dump_file)
5946 			  {
5947 			    fprintf (dump_file, "    const ");
5948 			    print_generic_expr (dump_file,
5949 						replace_info->new_tree);
5950 			    fprintf (dump_file,
5951 				     "  can't be converted to param ");
5952 			    print_generic_expr (dump_file, parm);
5953 			    fprintf (dump_file, "\n");
5954 			  }
5955 			replace_info->old_tree = NULL;
5956 		      }
5957 		  }
5958 	      }
5959 	    else
5960 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5961 	    if (replace_info->old_tree)
5962 	      {
5963 		init = setup_one_parameter (&id, replace_info->old_tree,
5964 					    replace_info->new_tree, id.src_fn,
5965 					    NULL,
5966 					    &vars);
5967 		if (init)
5968 		  init_stmts.safe_push (init);
5969 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5970 		  {
5971 		    if (parm_num == -1)
5972 		      {
5973 			tree parm;
5974 			int p;
5975 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5976 			     parm = DECL_CHAIN (parm), p++)
5977 			  if (parm == replace_info->old_tree)
5978 			    {
5979 			      parm_num = p;
5980 			      break;
5981 			    }
5982 		      }
5983 		    if (parm_num != -1)
5984 		      {
5985 			if (debug_args_to_skip == args_to_skip)
5986 			  {
5987 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
5988 			    bitmap_copy (debug_args_to_skip, args_to_skip);
5989 			  }
5990 			bitmap_clear_bit (debug_args_to_skip, parm_num);
5991 		      }
5992 		  }
5993 	      }
5994 	  }
5995       }
5996   /* Copy the function's arguments.  */
5997   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5998     DECL_ARGUMENTS (new_decl)
5999       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6000 				       args_to_skip, &vars);
6001 
6002   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6003   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6004 
6005   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6006 
6007   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6008     /* Add local vars.  */
6009     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6010 
6011   if (DECL_RESULT (old_decl) == NULL_TREE)
6012     ;
6013   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6014     {
6015       DECL_RESULT (new_decl)
6016 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6017 		      RESULT_DECL, NULL_TREE, void_type_node);
6018       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6019       cfun->returns_struct = 0;
6020       cfun->returns_pcc_struct = 0;
6021     }
6022   else
6023     {
6024       tree old_name;
6025       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6026       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6027       if (gimple_in_ssa_p (id.src_cfun)
6028 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6029 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6030 	{
6031 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6032 	  insert_decl_map (&id, old_name, new_name);
6033 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6034 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6035 	}
6036     }
6037 
6038   /* Set up the destination functions loop tree.  */
6039   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6040     {
6041       cfun->curr_properties &= ~PROP_loops;
6042       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6043       cfun->curr_properties |= PROP_loops;
6044     }
6045 
6046   /* Copy the Function's body.  */
6047   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6048 	     new_entry);
6049 
6050   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6051   number_blocks (new_decl);
6052 
6053   /* We want to create the BB unconditionally, so that the addition of
6054      debug stmts doesn't affect BB count, which may in the end cause
6055      codegen differences.  */
6056   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6057   while (init_stmts.length ())
6058     insert_init_stmt (&id, bb, init_stmts.pop ());
6059   update_clone_info (&id);
6060 
6061   /* Remap the nonlocal_goto_save_area, if any.  */
6062   if (cfun->nonlocal_goto_save_area)
6063     {
6064       struct walk_stmt_info wi;
6065 
6066       memset (&wi, 0, sizeof (wi));
6067       wi.info = &id;
6068       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6069     }
6070 
6071   /* Clean up.  */
6072   delete id.decl_map;
6073   if (id.debug_map)
6074     delete id.debug_map;
6075   free_dominance_info (CDI_DOMINATORS);
6076   free_dominance_info (CDI_POST_DOMINATORS);
6077 
6078   update_max_bb_count ();
6079   fold_marked_statements (0, id.statements_to_fold);
6080   delete id.statements_to_fold;
6081   delete_unreachable_blocks_update_callgraph (&id);
6082   if (id.dst_node->definition)
6083     cgraph_edge::rebuild_references ();
6084   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6085     {
6086       calculate_dominance_info (CDI_DOMINATORS);
6087       fix_loop_structure (NULL);
6088     }
6089   update_ssa (TODO_update_ssa);
6090 
6091   /* After partial cloning we need to rescale frequencies, so they are
6092      within proper range in the cloned function.  */
6093   if (new_entry)
6094     {
6095       struct cgraph_edge *e;
6096       rebuild_frequencies ();
6097 
6098       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6099       for (e = new_version_node->callees; e; e = e->next_callee)
6100 	{
6101 	  basic_block bb = gimple_bb (e->call_stmt);
6102 	  e->count = bb->count;
6103 	}
6104       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6105 	{
6106 	  basic_block bb = gimple_bb (e->call_stmt);
6107 	  e->count = bb->count;
6108 	}
6109     }
6110 
6111   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6112     {
6113       tree parm;
6114       vec<tree, va_gc> **debug_args = NULL;
6115       unsigned int len = 0;
6116       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6117 	   parm; parm = DECL_CHAIN (parm), i++)
6118 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6119 	  {
6120 	    tree ddecl;
6121 
6122 	    if (debug_args == NULL)
6123 	      {
6124 		debug_args = decl_debug_args_insert (new_decl);
6125 		len = vec_safe_length (*debug_args);
6126 	      }
6127 	    ddecl = make_node (DEBUG_EXPR_DECL);
6128 	    DECL_ARTIFICIAL (ddecl) = 1;
6129 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6130 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6131 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6132 	    vec_safe_push (*debug_args, ddecl);
6133 	  }
6134       if (debug_args != NULL)
6135 	{
6136 	  /* On the callee side, add
6137 	     DEBUG D#Y s=> parm
6138 	     DEBUG var => D#Y
6139 	     stmts to the first bb where var is a VAR_DECL created for the
6140 	     optimized away parameter in DECL_INITIAL block.  This hints
6141 	     in the debug info that var (whole DECL_ORIGIN is the parm
6142 	     PARM_DECL) is optimized away, but could be looked up at the
6143 	     call site as value of D#X there.  */
6144 	  tree var = vars, vexpr;
6145 	  gimple_stmt_iterator cgsi
6146 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6147 	  gimple *def_temp;
6148 	  var = vars;
6149 	  i = vec_safe_length (*debug_args);
6150 	  do
6151 	    {
6152 	      i -= 2;
6153 	      while (var != NULL_TREE
6154 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6155 		var = TREE_CHAIN (var);
6156 	      if (var == NULL_TREE)
6157 		break;
6158 	      vexpr = make_node (DEBUG_EXPR_DECL);
6159 	      parm = (**debug_args)[i];
6160 	      DECL_ARTIFICIAL (vexpr) = 1;
6161 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6162 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6163 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6164 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6165 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6166 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6167 	    }
6168 	  while (i > len);
6169 	}
6170     }
6171 
6172   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6173     BITMAP_FREE (debug_args_to_skip);
6174   free_dominance_info (CDI_DOMINATORS);
6175   free_dominance_info (CDI_POST_DOMINATORS);
6176 
6177   gcc_assert (!id.debug_stmts.exists ());
6178   pop_cfun ();
6179   return;
6180 }
6181 
6182 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6183    the callee and return the inlined body on success.  */
6184 
6185 tree
6186 maybe_inline_call_in_expr (tree exp)
6187 {
6188   tree fn = get_callee_fndecl (exp);
6189 
6190   /* We can only try to inline "const" functions.  */
6191   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6192     {
6193       call_expr_arg_iterator iter;
6194       copy_body_data id;
6195       tree param, arg, t;
6196       hash_map<tree, tree> decl_map;
6197 
6198       /* Remap the parameters.  */
6199       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6200 	   param;
6201 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6202 	decl_map.put (param, arg);
6203 
6204       memset (&id, 0, sizeof (id));
6205       id.src_fn = fn;
6206       id.dst_fn = current_function_decl;
6207       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6208       id.decl_map = &decl_map;
6209 
6210       id.copy_decl = copy_decl_no_change;
6211       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6212       id.transform_new_cfg = false;
6213       id.transform_return_to_modify = true;
6214       id.transform_parameter = true;
6215       id.transform_lang_insert_block = NULL;
6216 
6217       /* Make sure not to unshare trees behind the front-end's back
6218 	 since front-end specific mechanisms may rely on sharing.  */
6219       id.regimplify = false;
6220       id.do_not_unshare = true;
6221 
6222       /* We're not inside any EH region.  */
6223       id.eh_lp_nr = 0;
6224 
6225       t = copy_tree_body (&id);
6226 
6227       /* We can only return something suitable for use in a GENERIC
6228 	 expression tree.  */
6229       if (TREE_CODE (t) == MODIFY_EXPR)
6230 	return TREE_OPERAND (t, 1);
6231     }
6232 
6233    return NULL_TREE;
6234 }
6235 
6236 /* Duplicate a type, fields and all.  */
6237 
6238 tree
6239 build_duplicate_type (tree type)
6240 {
6241   struct copy_body_data id;
6242 
6243   memset (&id, 0, sizeof (id));
6244   id.src_fn = current_function_decl;
6245   id.dst_fn = current_function_decl;
6246   id.src_cfun = cfun;
6247   id.decl_map = new hash_map<tree, tree>;
6248   id.debug_map = NULL;
6249   id.copy_decl = copy_decl_no_change;
6250 
6251   type = remap_type_1 (type, &id);
6252 
6253   delete id.decl_map;
6254   if (id.debug_map)
6255     delete id.debug_map;
6256 
6257   TYPE_CANONICAL (type) = type;
6258 
6259   return type;
6260 }
6261 
6262 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6263    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6264    evaluation.  */
6265 
6266 tree
6267 copy_fn (tree fn, tree& parms, tree& result)
6268 {
6269   copy_body_data id;
6270   tree param;
6271   hash_map<tree, tree> decl_map;
6272 
6273   tree *p = &parms;
6274   *p = NULL_TREE;
6275 
6276   memset (&id, 0, sizeof (id));
6277   id.src_fn = fn;
6278   id.dst_fn = current_function_decl;
6279   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6280   id.decl_map = &decl_map;
6281 
6282   id.copy_decl = copy_decl_no_change;
6283   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6284   id.transform_new_cfg = false;
6285   id.transform_return_to_modify = false;
6286   id.transform_parameter = true;
6287   id.transform_lang_insert_block = NULL;
6288 
6289   /* Make sure not to unshare trees behind the front-end's back
6290      since front-end specific mechanisms may rely on sharing.  */
6291   id.regimplify = false;
6292   id.do_not_unshare = true;
6293 
6294   /* We're not inside any EH region.  */
6295   id.eh_lp_nr = 0;
6296 
6297   /* Remap the parameters and result and return them to the caller.  */
6298   for (param = DECL_ARGUMENTS (fn);
6299        param;
6300        param = DECL_CHAIN (param))
6301     {
6302       *p = remap_decl (param, &id);
6303       p = &DECL_CHAIN (*p);
6304     }
6305 
6306   if (DECL_RESULT (fn))
6307     result = remap_decl (DECL_RESULT (fn), &id);
6308   else
6309     result = NULL_TREE;
6310 
6311   return copy_tree_body (&id);
6312 }
6313