xref: /dragonfly/contrib/gcc-8.0/gcc/tree-inline.c (revision e215fc28)
138fd1498Szrj /* Tree inlining.
238fd1498Szrj    Copyright (C) 2001-2018 Free Software Foundation, Inc.
338fd1498Szrj    Contributed by Alexandre Oliva <aoliva@redhat.com>
438fd1498Szrj 
538fd1498Szrj This file is part of GCC.
638fd1498Szrj 
738fd1498Szrj GCC is free software; you can redistribute it and/or modify
838fd1498Szrj it under the terms of the GNU General Public License as published by
938fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
1038fd1498Szrj any later version.
1138fd1498Szrj 
1238fd1498Szrj GCC is distributed in the hope that it will be useful,
1338fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
1438fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1538fd1498Szrj GNU General Public License for more details.
1638fd1498Szrj 
1738fd1498Szrj You should have received a copy of the GNU General Public License
1838fd1498Szrj along with GCC; see the file COPYING3.  If not see
1938fd1498Szrj <http://www.gnu.org/licenses/>.  */
2038fd1498Szrj 
2138fd1498Szrj #include "config.h"
2238fd1498Szrj #include "system.h"
2338fd1498Szrj #include "coretypes.h"
2438fd1498Szrj #include "backend.h"
2538fd1498Szrj #include "target.h"
2638fd1498Szrj #include "rtl.h"
2738fd1498Szrj #include "tree.h"
2838fd1498Szrj #include "gimple.h"
2938fd1498Szrj #include "cfghooks.h"
3038fd1498Szrj #include "tree-pass.h"
3138fd1498Szrj #include "ssa.h"
3238fd1498Szrj #include "cgraph.h"
3338fd1498Szrj #include "tree-pretty-print.h"
3438fd1498Szrj #include "diagnostic-core.h"
3538fd1498Szrj #include "gimple-predict.h"
3638fd1498Szrj #include "fold-const.h"
3738fd1498Szrj #include "stor-layout.h"
3838fd1498Szrj #include "calls.h"
3938fd1498Szrj #include "tree-inline.h"
4038fd1498Szrj #include "langhooks.h"
4138fd1498Szrj #include "cfganal.h"
4238fd1498Szrj #include "tree-iterator.h"
4338fd1498Szrj #include "intl.h"
4438fd1498Szrj #include "gimple-fold.h"
4538fd1498Szrj #include "tree-eh.h"
4638fd1498Szrj #include "gimplify.h"
4738fd1498Szrj #include "gimple-iterator.h"
4838fd1498Szrj #include "gimplify-me.h"
4938fd1498Szrj #include "gimple-walk.h"
5038fd1498Szrj #include "tree-cfg.h"
5138fd1498Szrj #include "tree-into-ssa.h"
5238fd1498Szrj #include "tree-dfa.h"
5338fd1498Szrj #include "tree-ssa.h"
5438fd1498Szrj #include "except.h"
5538fd1498Szrj #include "debug.h"
5638fd1498Szrj #include "params.h"
5738fd1498Szrj #include "value-prof.h"
5838fd1498Szrj #include "cfgloop.h"
5938fd1498Szrj #include "builtins.h"
6038fd1498Szrj #include "tree-chkp.h"
6138fd1498Szrj #include "stringpool.h"
6238fd1498Szrj #include "attribs.h"
6338fd1498Szrj #include "sreal.h"
6438fd1498Szrj 
6538fd1498Szrj /* I'm not real happy about this, but we need to handle gimple and
6638fd1498Szrj    non-gimple trees.  */
6738fd1498Szrj 
6838fd1498Szrj /* Inlining, Cloning, Versioning, Parallelization
6938fd1498Szrj 
7038fd1498Szrj    Inlining: a function body is duplicated, but the PARM_DECLs are
7138fd1498Szrj    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
7238fd1498Szrj    MODIFY_EXPRs that store to a dedicated returned-value variable.
7338fd1498Szrj    The duplicated eh_region info of the copy will later be appended
7438fd1498Szrj    to the info for the caller; the eh_region info in copied throwing
7538fd1498Szrj    statements and RESX statements are adjusted accordingly.
7638fd1498Szrj 
7738fd1498Szrj    Cloning: (only in C++) We have one body for a con/de/structor, and
7838fd1498Szrj    multiple function decls, each with a unique parameter list.
7938fd1498Szrj    Duplicate the body, using the given splay tree; some parameters
8038fd1498Szrj    will become constants (like 0 or 1).
8138fd1498Szrj 
8238fd1498Szrj    Versioning: a function body is duplicated and the result is a new
8338fd1498Szrj    function rather than into blocks of an existing function as with
8438fd1498Szrj    inlining.  Some parameters will become constants.
8538fd1498Szrj 
8638fd1498Szrj    Parallelization: a region of a function is duplicated resulting in
8738fd1498Szrj    a new function.  Variables may be replaced with complex expressions
8838fd1498Szrj    to enable shared variable semantics.
8938fd1498Szrj 
9038fd1498Szrj    All of these will simultaneously lookup any callgraph edges.  If
9138fd1498Szrj    we're going to inline the duplicated function body, and the given
9238fd1498Szrj    function has some cloned callgraph nodes (one for each place this
9338fd1498Szrj    function will be inlined) those callgraph edges will be duplicated.
9438fd1498Szrj    If we're cloning the body, those callgraph edges will be
9538fd1498Szrj    updated to point into the new body.  (Note that the original
9638fd1498Szrj    callgraph node and edge list will not be altered.)
9738fd1498Szrj 
9838fd1498Szrj    See the CALL_EXPR handling case in copy_tree_body_r ().  */
9938fd1498Szrj 
10038fd1498Szrj /* To Do:
10138fd1498Szrj 
10238fd1498Szrj    o In order to make inlining-on-trees work, we pessimized
10338fd1498Szrj      function-local static constants.  In particular, they are now
10438fd1498Szrj      always output, even when not addressed.  Fix this by treating
10538fd1498Szrj      function-local static constants just like global static
10638fd1498Szrj      constants; the back-end already knows not to output them if they
10738fd1498Szrj      are not needed.
10838fd1498Szrj 
10938fd1498Szrj    o Provide heuristics to clamp inlining of recursive template
11038fd1498Szrj      calls?  */
11138fd1498Szrj 
11238fd1498Szrj 
11338fd1498Szrj /* Weights that estimate_num_insns uses to estimate the size of the
11438fd1498Szrj    produced code.  */
11538fd1498Szrj 
11638fd1498Szrj eni_weights eni_size_weights;
11738fd1498Szrj 
11838fd1498Szrj /* Weights that estimate_num_insns uses to estimate the time necessary
11938fd1498Szrj    to execute the produced code.  */
12038fd1498Szrj 
12138fd1498Szrj eni_weights eni_time_weights;
12238fd1498Szrj 
12338fd1498Szrj /* Prototypes.  */
12438fd1498Szrj 
12538fd1498Szrj static tree declare_return_variable (copy_body_data *, tree, tree, tree,
12638fd1498Szrj 				     basic_block);
12738fd1498Szrj static void remap_block (tree *, copy_body_data *);
12838fd1498Szrj static void copy_bind_expr (tree *, int *, copy_body_data *);
12938fd1498Szrj static void declare_inline_vars (tree, tree);
13038fd1498Szrj static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
13138fd1498Szrj static void prepend_lexical_block (tree current_block, tree new_block);
13238fd1498Szrj static tree copy_decl_to_var (tree, copy_body_data *);
13338fd1498Szrj static tree copy_result_decl_to_var (tree, copy_body_data *);
13438fd1498Szrj static tree copy_decl_maybe_to_var (tree, copy_body_data *);
13538fd1498Szrj static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
13638fd1498Szrj static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
13738fd1498Szrj static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
13838fd1498Szrj 
13938fd1498Szrj /* Insert a tree->tree mapping for ID.  Despite the name suggests
14038fd1498Szrj    that the trees should be variables, it is used for more than that.  */
14138fd1498Szrj 
14238fd1498Szrj void
insert_decl_map(copy_body_data * id,tree key,tree value)14338fd1498Szrj insert_decl_map (copy_body_data *id, tree key, tree value)
14438fd1498Szrj {
14538fd1498Szrj   id->decl_map->put (key, value);
14638fd1498Szrj 
14738fd1498Szrj   /* Always insert an identity map as well.  If we see this same new
14838fd1498Szrj      node again, we won't want to duplicate it a second time.  */
14938fd1498Szrj   if (key != value)
15038fd1498Szrj     id->decl_map->put (value, value);
15138fd1498Szrj }
15238fd1498Szrj 
15338fd1498Szrj /* Insert a tree->tree mapping for ID.  This is only used for
15438fd1498Szrj    variables.  */
15538fd1498Szrj 
15638fd1498Szrj static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)15738fd1498Szrj insert_debug_decl_map (copy_body_data *id, tree key, tree value)
15838fd1498Szrj {
15938fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
16038fd1498Szrj     return;
16138fd1498Szrj 
16238fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
16338fd1498Szrj     return;
16438fd1498Szrj 
16538fd1498Szrj   if (!target_for_debug_bind (key))
16638fd1498Szrj     return;
16738fd1498Szrj 
16838fd1498Szrj   gcc_assert (TREE_CODE (key) == PARM_DECL);
16938fd1498Szrj   gcc_assert (VAR_P (value));
17038fd1498Szrj 
17138fd1498Szrj   if (!id->debug_map)
17238fd1498Szrj     id->debug_map = new hash_map<tree, tree>;
17338fd1498Szrj 
17438fd1498Szrj   id->debug_map->put (key, value);
17538fd1498Szrj }
17638fd1498Szrj 
17738fd1498Szrj /* If nonzero, we're remapping the contents of inlined debug
17838fd1498Szrj    statements.  If negative, an error has occurred, such as a
17938fd1498Szrj    reference to a variable that isn't available in the inlined
18038fd1498Szrj    context.  */
18138fd1498Szrj static int processing_debug_stmt = 0;
18238fd1498Szrj 
18338fd1498Szrj /* Construct new SSA name for old NAME. ID is the inline context.  */
18438fd1498Szrj 
18538fd1498Szrj static tree
remap_ssa_name(tree name,copy_body_data * id)18638fd1498Szrj remap_ssa_name (tree name, copy_body_data *id)
18738fd1498Szrj {
18838fd1498Szrj   tree new_tree, var;
18938fd1498Szrj   tree *n;
19038fd1498Szrj 
19138fd1498Szrj   gcc_assert (TREE_CODE (name) == SSA_NAME);
19238fd1498Szrj 
19338fd1498Szrj   n = id->decl_map->get (name);
19438fd1498Szrj   if (n)
19538fd1498Szrj     return unshare_expr (*n);
19638fd1498Szrj 
19738fd1498Szrj   if (processing_debug_stmt)
19838fd1498Szrj     {
19938fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (name)
20038fd1498Szrj 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
20138fd1498Szrj 	  && id->entry_bb == NULL
20238fd1498Szrj 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
20338fd1498Szrj 	{
20438fd1498Szrj 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
20538fd1498Szrj 	  gimple *def_temp;
20638fd1498Szrj 	  gimple_stmt_iterator gsi;
20738fd1498Szrj 	  tree val = SSA_NAME_VAR (name);
20838fd1498Szrj 
20938fd1498Szrj 	  n = id->decl_map->get (val);
21038fd1498Szrj 	  if (n != NULL)
21138fd1498Szrj 	    val = *n;
21238fd1498Szrj 	  if (TREE_CODE (val) != PARM_DECL)
21338fd1498Szrj 	    {
21438fd1498Szrj 	      processing_debug_stmt = -1;
21538fd1498Szrj 	      return name;
21638fd1498Szrj 	    }
21738fd1498Szrj 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
21838fd1498Szrj 	  DECL_ARTIFICIAL (vexpr) = 1;
21938fd1498Szrj 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
22038fd1498Szrj 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
22138fd1498Szrj 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
22238fd1498Szrj 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
22338fd1498Szrj 	  return vexpr;
22438fd1498Szrj 	}
22538fd1498Szrj 
22638fd1498Szrj       processing_debug_stmt = -1;
22738fd1498Szrj       return name;
22838fd1498Szrj     }
22938fd1498Szrj 
23038fd1498Szrj   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
23138fd1498Szrj   var = SSA_NAME_VAR (name);
23238fd1498Szrj   if (!var
23338fd1498Szrj       || (!SSA_NAME_IS_DEFAULT_DEF (name)
23438fd1498Szrj 	  && VAR_P (var)
23538fd1498Szrj 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
23638fd1498Szrj 	  && DECL_ARTIFICIAL (var)
23738fd1498Szrj 	  && DECL_IGNORED_P (var)
23838fd1498Szrj 	  && !DECL_NAME (var)))
23938fd1498Szrj     {
24038fd1498Szrj       struct ptr_info_def *pi;
24138fd1498Szrj       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
24238fd1498Szrj       if (!var && SSA_NAME_IDENTIFIER (name))
24338fd1498Szrj 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
24438fd1498Szrj       insert_decl_map (id, name, new_tree);
24538fd1498Szrj       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
24638fd1498Szrj 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
24738fd1498Szrj       /* At least IPA points-to info can be directly transferred.  */
24838fd1498Szrj       if (id->src_cfun->gimple_df
24938fd1498Szrj 	  && id->src_cfun->gimple_df->ipa_pta
25038fd1498Szrj 	  && POINTER_TYPE_P (TREE_TYPE (name))
25138fd1498Szrj 	  && (pi = SSA_NAME_PTR_INFO (name))
25238fd1498Szrj 	  && !pi->pt.anything)
25338fd1498Szrj 	{
25438fd1498Szrj 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
25538fd1498Szrj 	  new_pi->pt = pi->pt;
25638fd1498Szrj 	}
25738fd1498Szrj       return new_tree;
25838fd1498Szrj     }
25938fd1498Szrj 
26038fd1498Szrj   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
26138fd1498Szrj      in copy_bb.  */
26238fd1498Szrj   new_tree = remap_decl (var, id);
26338fd1498Szrj 
26438fd1498Szrj   /* We might've substituted constant or another SSA_NAME for
26538fd1498Szrj      the variable.
26638fd1498Szrj 
26738fd1498Szrj      Replace the SSA name representing RESULT_DECL by variable during
26838fd1498Szrj      inlining:  this saves us from need to introduce PHI node in a case
26938fd1498Szrj      return value is just partly initialized.  */
27038fd1498Szrj   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
27138fd1498Szrj       && (!SSA_NAME_VAR (name)
27238fd1498Szrj 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
27338fd1498Szrj 	  || !id->transform_return_to_modify))
27438fd1498Szrj     {
27538fd1498Szrj       struct ptr_info_def *pi;
27638fd1498Szrj       new_tree = make_ssa_name (new_tree);
27738fd1498Szrj       insert_decl_map (id, name, new_tree);
27838fd1498Szrj       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
27938fd1498Szrj 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
28038fd1498Szrj       /* At least IPA points-to info can be directly transferred.  */
28138fd1498Szrj       if (id->src_cfun->gimple_df
28238fd1498Szrj 	  && id->src_cfun->gimple_df->ipa_pta
28338fd1498Szrj 	  && POINTER_TYPE_P (TREE_TYPE (name))
28438fd1498Szrj 	  && (pi = SSA_NAME_PTR_INFO (name))
28538fd1498Szrj 	  && !pi->pt.anything)
28638fd1498Szrj 	{
28738fd1498Szrj 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
28838fd1498Szrj 	  new_pi->pt = pi->pt;
28938fd1498Szrj 	}
29038fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (name))
29138fd1498Szrj 	{
29238fd1498Szrj 	  /* By inlining function having uninitialized variable, we might
29338fd1498Szrj 	     extend the lifetime (variable might get reused).  This cause
29438fd1498Szrj 	     ICE in the case we end up extending lifetime of SSA name across
29538fd1498Szrj 	     abnormal edge, but also increase register pressure.
29638fd1498Szrj 
29738fd1498Szrj 	     We simply initialize all uninitialized vars by 0 except
29838fd1498Szrj 	     for case we are inlining to very first BB.  We can avoid
29938fd1498Szrj 	     this for all BBs that are not inside strongly connected
30038fd1498Szrj 	     regions of the CFG, but this is expensive to test.  */
30138fd1498Szrj 	  if (id->entry_bb
30238fd1498Szrj 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
30338fd1498Szrj 	      && (!SSA_NAME_VAR (name)
30438fd1498Szrj 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
30538fd1498Szrj 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
30638fd1498Szrj 					     0)->dest
30738fd1498Szrj 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
30838fd1498Szrj 	    {
30938fd1498Szrj 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
31038fd1498Szrj 	      gimple *init_stmt;
31138fd1498Szrj 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
31238fd1498Szrj 
31338fd1498Szrj 	      init_stmt = gimple_build_assign (new_tree, zero);
31438fd1498Szrj 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
31538fd1498Szrj 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
31638fd1498Szrj 	    }
31738fd1498Szrj 	  else
31838fd1498Szrj 	    {
31938fd1498Szrj 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
32038fd1498Szrj 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
32138fd1498Szrj 	    }
32238fd1498Szrj 	}
32338fd1498Szrj     }
32438fd1498Szrj   else
32538fd1498Szrj     insert_decl_map (id, name, new_tree);
32638fd1498Szrj   return new_tree;
32738fd1498Szrj }
32838fd1498Szrj 
32938fd1498Szrj /* Remap DECL during the copying of the BLOCK tree for the function.  */
33038fd1498Szrj 
33138fd1498Szrj tree
remap_decl(tree decl,copy_body_data * id)33238fd1498Szrj remap_decl (tree decl, copy_body_data *id)
33338fd1498Szrj {
33438fd1498Szrj   tree *n;
33538fd1498Szrj 
33638fd1498Szrj   /* We only remap local variables in the current function.  */
33738fd1498Szrj 
33838fd1498Szrj   /* See if we have remapped this declaration.  */
33938fd1498Szrj 
34038fd1498Szrj   n = id->decl_map->get (decl);
34138fd1498Szrj 
34238fd1498Szrj   if (!n && processing_debug_stmt)
34338fd1498Szrj     {
34438fd1498Szrj       processing_debug_stmt = -1;
34538fd1498Szrj       return decl;
34638fd1498Szrj     }
34738fd1498Szrj 
34838fd1498Szrj   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
34938fd1498Szrj      necessary DECLs have already been remapped and we do not want to duplicate
35038fd1498Szrj      a decl coming from outside of the sequence we are copying.  */
35138fd1498Szrj   if (!n
35238fd1498Szrj       && id->prevent_decl_creation_for_types
35338fd1498Szrj       && id->remapping_type_depth > 0
35438fd1498Szrj       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
35538fd1498Szrj     return decl;
35638fd1498Szrj 
35738fd1498Szrj   /* If we didn't already have an equivalent for this declaration, create one
35838fd1498Szrj      now.  */
35938fd1498Szrj   if (!n)
36038fd1498Szrj     {
36138fd1498Szrj       /* Make a copy of the variable or label.  */
36238fd1498Szrj       tree t = id->copy_decl (decl, id);
36338fd1498Szrj 
36438fd1498Szrj       /* Remember it, so that if we encounter this local entity again
36538fd1498Szrj 	 we can reuse this copy.  Do this early because remap_type may
36638fd1498Szrj 	 need this decl for TYPE_STUB_DECL.  */
36738fd1498Szrj       insert_decl_map (id, decl, t);
36838fd1498Szrj 
36938fd1498Szrj       if (!DECL_P (t))
37038fd1498Szrj 	return t;
37138fd1498Szrj 
37238fd1498Szrj       /* Remap types, if necessary.  */
37338fd1498Szrj       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
37438fd1498Szrj       if (TREE_CODE (t) == TYPE_DECL)
37538fd1498Szrj 	{
37638fd1498Szrj 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
37738fd1498Szrj 
37838fd1498Szrj 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
37938fd1498Szrj 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
38038fd1498Szrj 	     is not set on the TYPE_DECL, for example in LTO mode.  */
38138fd1498Szrj 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
38238fd1498Szrj 	    {
38338fd1498Szrj 	      tree x = build_variant_type_copy (TREE_TYPE (t));
38438fd1498Szrj 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
38538fd1498Szrj 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
38638fd1498Szrj 	      DECL_ORIGINAL_TYPE (t) = x;
38738fd1498Szrj 	    }
38838fd1498Szrj 	}
38938fd1498Szrj 
39038fd1498Szrj       /* Remap sizes as necessary.  */
39138fd1498Szrj       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
39238fd1498Szrj       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
39338fd1498Szrj 
39438fd1498Szrj       /* If fields, do likewise for offset and qualifier.  */
39538fd1498Szrj       if (TREE_CODE (t) == FIELD_DECL)
39638fd1498Szrj 	{
39738fd1498Szrj 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
39838fd1498Szrj 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
39938fd1498Szrj 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
40038fd1498Szrj 	}
40138fd1498Szrj 
40238fd1498Szrj       return t;
40338fd1498Szrj     }
40438fd1498Szrj 
40538fd1498Szrj   if (id->do_not_unshare)
40638fd1498Szrj     return *n;
40738fd1498Szrj   else
40838fd1498Szrj     return unshare_expr (*n);
40938fd1498Szrj }
41038fd1498Szrj 
41138fd1498Szrj static tree
remap_type_1(tree type,copy_body_data * id)41238fd1498Szrj remap_type_1 (tree type, copy_body_data *id)
41338fd1498Szrj {
41438fd1498Szrj   tree new_tree, t;
41538fd1498Szrj 
41638fd1498Szrj   /* We do need a copy.  build and register it now.  If this is a pointer or
41738fd1498Szrj      reference type, remap the designated type and make a new pointer or
41838fd1498Szrj      reference type.  */
41938fd1498Szrj   if (TREE_CODE (type) == POINTER_TYPE)
42038fd1498Szrj     {
42138fd1498Szrj       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
42238fd1498Szrj 					 TYPE_MODE (type),
42338fd1498Szrj 					 TYPE_REF_CAN_ALIAS_ALL (type));
42438fd1498Szrj       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
42538fd1498Szrj 	new_tree = build_type_attribute_qual_variant (new_tree,
42638fd1498Szrj 						      TYPE_ATTRIBUTES (type),
42738fd1498Szrj 						      TYPE_QUALS (type));
42838fd1498Szrj       insert_decl_map (id, type, new_tree);
42938fd1498Szrj       return new_tree;
43038fd1498Szrj     }
43138fd1498Szrj   else if (TREE_CODE (type) == REFERENCE_TYPE)
43238fd1498Szrj     {
43338fd1498Szrj       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
43438fd1498Szrj 					    TYPE_MODE (type),
43538fd1498Szrj 					    TYPE_REF_CAN_ALIAS_ALL (type));
43638fd1498Szrj       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
43738fd1498Szrj 	new_tree = build_type_attribute_qual_variant (new_tree,
43838fd1498Szrj 						      TYPE_ATTRIBUTES (type),
43938fd1498Szrj 						      TYPE_QUALS (type));
44038fd1498Szrj       insert_decl_map (id, type, new_tree);
44138fd1498Szrj       return new_tree;
44238fd1498Szrj     }
44338fd1498Szrj   else
44438fd1498Szrj     new_tree = copy_node (type);
44538fd1498Szrj 
44638fd1498Szrj   insert_decl_map (id, type, new_tree);
44738fd1498Szrj 
44838fd1498Szrj   /* This is a new type, not a copy of an old type.  Need to reassociate
44938fd1498Szrj      variants.  We can handle everything except the main variant lazily.  */
45038fd1498Szrj   t = TYPE_MAIN_VARIANT (type);
45138fd1498Szrj   if (type != t)
45238fd1498Szrj     {
45338fd1498Szrj       t = remap_type (t, id);
45438fd1498Szrj       TYPE_MAIN_VARIANT (new_tree) = t;
45538fd1498Szrj       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
45638fd1498Szrj       TYPE_NEXT_VARIANT (t) = new_tree;
45738fd1498Szrj     }
45838fd1498Szrj   else
45938fd1498Szrj     {
46038fd1498Szrj       TYPE_MAIN_VARIANT (new_tree) = new_tree;
46138fd1498Szrj       TYPE_NEXT_VARIANT (new_tree) = NULL;
46238fd1498Szrj     }
46338fd1498Szrj 
46438fd1498Szrj   if (TYPE_STUB_DECL (type))
46538fd1498Szrj     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
46638fd1498Szrj 
46738fd1498Szrj   /* Lazily create pointer and reference types.  */
46838fd1498Szrj   TYPE_POINTER_TO (new_tree) = NULL;
46938fd1498Szrj   TYPE_REFERENCE_TO (new_tree) = NULL;
47038fd1498Szrj 
47138fd1498Szrj   /* Copy all types that may contain references to local variables; be sure to
47238fd1498Szrj      preserve sharing in between type and its main variant when possible.  */
47338fd1498Szrj   switch (TREE_CODE (new_tree))
47438fd1498Szrj     {
47538fd1498Szrj     case INTEGER_TYPE:
47638fd1498Szrj     case REAL_TYPE:
47738fd1498Szrj     case FIXED_POINT_TYPE:
47838fd1498Szrj     case ENUMERAL_TYPE:
47938fd1498Szrj     case BOOLEAN_TYPE:
48038fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
48138fd1498Szrj 	{
48238fd1498Szrj 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
48338fd1498Szrj 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
48438fd1498Szrj 
48538fd1498Szrj 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
48638fd1498Szrj 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
48738fd1498Szrj 	}
48838fd1498Szrj       else
48938fd1498Szrj 	{
49038fd1498Szrj 	  t = TYPE_MIN_VALUE (new_tree);
49138fd1498Szrj 	  if (t && TREE_CODE (t) != INTEGER_CST)
49238fd1498Szrj 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
49338fd1498Szrj 
49438fd1498Szrj 	  t = TYPE_MAX_VALUE (new_tree);
49538fd1498Szrj 	  if (t && TREE_CODE (t) != INTEGER_CST)
49638fd1498Szrj 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
49738fd1498Szrj 	}
49838fd1498Szrj       return new_tree;
49938fd1498Szrj 
50038fd1498Szrj     case FUNCTION_TYPE:
50138fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
50238fd1498Szrj 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
50338fd1498Szrj 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
50438fd1498Szrj       else
50538fd1498Szrj         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
50638fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
50738fd1498Szrj 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
50838fd1498Szrj 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
50938fd1498Szrj       else
51038fd1498Szrj         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
51138fd1498Szrj       return new_tree;
51238fd1498Szrj 
51338fd1498Szrj     case ARRAY_TYPE:
51438fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
51538fd1498Szrj 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
51638fd1498Szrj 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
51738fd1498Szrj       else
51838fd1498Szrj 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
51938fd1498Szrj 
52038fd1498Szrj       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
52138fd1498Szrj 	{
52238fd1498Szrj 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
52338fd1498Szrj 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
52438fd1498Szrj 	}
52538fd1498Szrj       else
52638fd1498Szrj 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
52738fd1498Szrj       break;
52838fd1498Szrj 
52938fd1498Szrj     case RECORD_TYPE:
53038fd1498Szrj     case UNION_TYPE:
53138fd1498Szrj     case QUAL_UNION_TYPE:
53238fd1498Szrj       if (TYPE_MAIN_VARIANT (type) != type
53338fd1498Szrj 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
53438fd1498Szrj 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
53538fd1498Szrj       else
53638fd1498Szrj 	{
53738fd1498Szrj 	  tree f, nf = NULL;
53838fd1498Szrj 
53938fd1498Szrj 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
54038fd1498Szrj 	    {
54138fd1498Szrj 	      t = remap_decl (f, id);
54238fd1498Szrj 	      DECL_CONTEXT (t) = new_tree;
54338fd1498Szrj 	      DECL_CHAIN (t) = nf;
54438fd1498Szrj 	      nf = t;
54538fd1498Szrj 	    }
54638fd1498Szrj 	  TYPE_FIELDS (new_tree) = nreverse (nf);
54738fd1498Szrj 	}
54838fd1498Szrj       break;
54938fd1498Szrj 
55038fd1498Szrj     case OFFSET_TYPE:
55138fd1498Szrj     default:
55238fd1498Szrj       /* Shouldn't have been thought variable sized.  */
55338fd1498Szrj       gcc_unreachable ();
55438fd1498Szrj     }
55538fd1498Szrj 
55638fd1498Szrj   /* All variants of type share the same size, so use the already remaped data.  */
55738fd1498Szrj   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
55838fd1498Szrj     {
55938fd1498Szrj       tree s = TYPE_SIZE (type);
56038fd1498Szrj       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
56138fd1498Szrj       tree su = TYPE_SIZE_UNIT (type);
56238fd1498Szrj       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
56338fd1498Szrj       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
56438fd1498Szrj 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
56538fd1498Szrj 			   || s == mvs);
56638fd1498Szrj       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
56738fd1498Szrj 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
56838fd1498Szrj 			   || su == mvsu);
56938fd1498Szrj       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
57038fd1498Szrj       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
57138fd1498Szrj     }
57238fd1498Szrj   else
57338fd1498Szrj     {
57438fd1498Szrj       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
57538fd1498Szrj       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
57638fd1498Szrj     }
57738fd1498Szrj 
57838fd1498Szrj   return new_tree;
57938fd1498Szrj }
58038fd1498Szrj 
58138fd1498Szrj tree
remap_type(tree type,copy_body_data * id)58238fd1498Szrj remap_type (tree type, copy_body_data *id)
58338fd1498Szrj {
58438fd1498Szrj   tree *node;
58538fd1498Szrj   tree tmp;
58638fd1498Szrj 
58738fd1498Szrj   if (type == NULL)
58838fd1498Szrj     return type;
58938fd1498Szrj 
59038fd1498Szrj   /* See if we have remapped this type.  */
59138fd1498Szrj   node = id->decl_map->get (type);
59238fd1498Szrj   if (node)
59338fd1498Szrj     return *node;
59438fd1498Szrj 
59538fd1498Szrj   /* The type only needs remapping if it's variably modified.  */
59638fd1498Szrj   if (! variably_modified_type_p (type, id->src_fn))
59738fd1498Szrj     {
59838fd1498Szrj       insert_decl_map (id, type, type);
59938fd1498Szrj       return type;
60038fd1498Szrj     }
60138fd1498Szrj 
60238fd1498Szrj   id->remapping_type_depth++;
60338fd1498Szrj   tmp = remap_type_1 (type, id);
60438fd1498Szrj   id->remapping_type_depth--;
60538fd1498Szrj 
60638fd1498Szrj   return tmp;
60738fd1498Szrj }
60838fd1498Szrj 
60938fd1498Szrj /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
61038fd1498Szrj 
61138fd1498Szrj static bool
can_be_nonlocal(tree decl,copy_body_data * id)61238fd1498Szrj can_be_nonlocal (tree decl, copy_body_data *id)
61338fd1498Szrj {
61438fd1498Szrj   /* We can not duplicate function decls.  */
61538fd1498Szrj   if (TREE_CODE (decl) == FUNCTION_DECL)
61638fd1498Szrj     return true;
61738fd1498Szrj 
61838fd1498Szrj   /* Local static vars must be non-local or we get multiple declaration
61938fd1498Szrj      problems.  */
62038fd1498Szrj   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
62138fd1498Szrj     return true;
62238fd1498Szrj 
62338fd1498Szrj   return false;
62438fd1498Szrj }
62538fd1498Szrj 
62638fd1498Szrj static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)62738fd1498Szrj remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
62838fd1498Szrj 	     copy_body_data *id)
62938fd1498Szrj {
63038fd1498Szrj   tree old_var;
63138fd1498Szrj   tree new_decls = NULL_TREE;
63238fd1498Szrj 
63338fd1498Szrj   /* Remap its variables.  */
63438fd1498Szrj   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
63538fd1498Szrj     {
63638fd1498Szrj       tree new_var;
63738fd1498Szrj 
63838fd1498Szrj       if (can_be_nonlocal (old_var, id))
63938fd1498Szrj 	{
64038fd1498Szrj 	  /* We need to add this variable to the local decls as otherwise
64138fd1498Szrj 	     nothing else will do so.  */
64238fd1498Szrj 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
64338fd1498Szrj 	    add_local_decl (cfun, old_var);
64438fd1498Szrj 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
64538fd1498Szrj 	      && !DECL_IGNORED_P (old_var)
64638fd1498Szrj 	      && nonlocalized_list)
64738fd1498Szrj 	    vec_safe_push (*nonlocalized_list, old_var);
64838fd1498Szrj 	  continue;
64938fd1498Szrj 	}
65038fd1498Szrj 
65138fd1498Szrj       /* Remap the variable.  */
65238fd1498Szrj       new_var = remap_decl (old_var, id);
65338fd1498Szrj 
65438fd1498Szrj       /* If we didn't remap this variable, we can't mess with its
65538fd1498Szrj 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
65638fd1498Szrj 	 already declared somewhere else, so don't declare it here.  */
65738fd1498Szrj 
65838fd1498Szrj       if (new_var == id->retvar)
65938fd1498Szrj 	;
66038fd1498Szrj       else if (!new_var)
66138fd1498Szrj         {
66238fd1498Szrj 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
66338fd1498Szrj 	      && !DECL_IGNORED_P (old_var)
66438fd1498Szrj 	      && nonlocalized_list)
66538fd1498Szrj 	    vec_safe_push (*nonlocalized_list, old_var);
66638fd1498Szrj 	}
66738fd1498Szrj       else
66838fd1498Szrj 	{
66938fd1498Szrj 	  gcc_assert (DECL_P (new_var));
67038fd1498Szrj 	  DECL_CHAIN (new_var) = new_decls;
67138fd1498Szrj 	  new_decls = new_var;
67238fd1498Szrj 
67338fd1498Szrj 	  /* Also copy value-expressions.  */
67438fd1498Szrj 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
67538fd1498Szrj 	    {
67638fd1498Szrj 	      tree tem = DECL_VALUE_EXPR (new_var);
67738fd1498Szrj 	      bool old_regimplify = id->regimplify;
67838fd1498Szrj 	      id->remapping_type_depth++;
67938fd1498Szrj 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
68038fd1498Szrj 	      id->remapping_type_depth--;
68138fd1498Szrj 	      id->regimplify = old_regimplify;
68238fd1498Szrj 	      SET_DECL_VALUE_EXPR (new_var, tem);
68338fd1498Szrj 	    }
68438fd1498Szrj 	}
68538fd1498Szrj     }
68638fd1498Szrj 
68738fd1498Szrj   return nreverse (new_decls);
68838fd1498Szrj }
68938fd1498Szrj 
69038fd1498Szrj /* Copy the BLOCK to contain remapped versions of the variables
69138fd1498Szrj    therein.  And hook the new block into the block-tree.  */
69238fd1498Szrj 
69338fd1498Szrj static void
remap_block(tree * block,copy_body_data * id)69438fd1498Szrj remap_block (tree *block, copy_body_data *id)
69538fd1498Szrj {
69638fd1498Szrj   tree old_block;
69738fd1498Szrj   tree new_block;
69838fd1498Szrj 
69938fd1498Szrj   /* Make the new block.  */
70038fd1498Szrj   old_block = *block;
70138fd1498Szrj   new_block = make_node (BLOCK);
70238fd1498Szrj   TREE_USED (new_block) = TREE_USED (old_block);
70338fd1498Szrj   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
70438fd1498Szrj   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
70538fd1498Szrj   BLOCK_NONLOCALIZED_VARS (new_block)
70638fd1498Szrj     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
70738fd1498Szrj   *block = new_block;
70838fd1498Szrj 
70938fd1498Szrj   /* Remap its variables.  */
71038fd1498Szrj   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
71138fd1498Szrj   					&BLOCK_NONLOCALIZED_VARS (new_block),
71238fd1498Szrj 					id);
71338fd1498Szrj 
71438fd1498Szrj   if (id->transform_lang_insert_block)
71538fd1498Szrj     id->transform_lang_insert_block (new_block);
71638fd1498Szrj 
71738fd1498Szrj   /* Remember the remapped block.  */
71838fd1498Szrj   insert_decl_map (id, old_block, new_block);
71938fd1498Szrj }
72038fd1498Szrj 
72138fd1498Szrj /* Copy the whole block tree and root it in id->block.  */
72238fd1498Szrj static tree
remap_blocks(tree block,copy_body_data * id)72338fd1498Szrj remap_blocks (tree block, copy_body_data *id)
72438fd1498Szrj {
72538fd1498Szrj   tree t;
72638fd1498Szrj   tree new_tree = block;
72738fd1498Szrj 
72838fd1498Szrj   if (!block)
72938fd1498Szrj     return NULL;
73038fd1498Szrj 
73138fd1498Szrj   remap_block (&new_tree, id);
73238fd1498Szrj   gcc_assert (new_tree != block);
73338fd1498Szrj   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
73438fd1498Szrj     prepend_lexical_block (new_tree, remap_blocks (t, id));
73538fd1498Szrj   /* Blocks are in arbitrary order, but make things slightly prettier and do
73638fd1498Szrj      not swap order when producing a copy.  */
73738fd1498Szrj   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
73838fd1498Szrj   return new_tree;
73938fd1498Szrj }
74038fd1498Szrj 
74138fd1498Szrj /* Remap the block tree rooted at BLOCK to nothing.  */
74238fd1498Szrj static void
remap_blocks_to_null(tree block,copy_body_data * id)74338fd1498Szrj remap_blocks_to_null (tree block, copy_body_data *id)
74438fd1498Szrj {
74538fd1498Szrj   tree t;
74638fd1498Szrj   insert_decl_map (id, block, NULL_TREE);
74738fd1498Szrj   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
74838fd1498Szrj     remap_blocks_to_null (t, id);
74938fd1498Szrj }
75038fd1498Szrj 
75138fd1498Szrj static void
copy_statement_list(tree * tp)75238fd1498Szrj copy_statement_list (tree *tp)
75338fd1498Szrj {
75438fd1498Szrj   tree_stmt_iterator oi, ni;
75538fd1498Szrj   tree new_tree;
75638fd1498Szrj 
75738fd1498Szrj   new_tree = alloc_stmt_list ();
75838fd1498Szrj   ni = tsi_start (new_tree);
75938fd1498Szrj   oi = tsi_start (*tp);
76038fd1498Szrj   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
76138fd1498Szrj   *tp = new_tree;
76238fd1498Szrj 
76338fd1498Szrj   for (; !tsi_end_p (oi); tsi_next (&oi))
76438fd1498Szrj     {
76538fd1498Szrj       tree stmt = tsi_stmt (oi);
76638fd1498Szrj       if (TREE_CODE (stmt) == STATEMENT_LIST)
76738fd1498Szrj 	/* This copy is not redundant; tsi_link_after will smash this
76838fd1498Szrj 	   STATEMENT_LIST into the end of the one we're building, and we
76938fd1498Szrj 	   don't want to do that with the original.  */
77038fd1498Szrj 	copy_statement_list (&stmt);
77138fd1498Szrj       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
77238fd1498Szrj     }
77338fd1498Szrj }
77438fd1498Szrj 
77538fd1498Szrj static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)77638fd1498Szrj copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
77738fd1498Szrj {
77838fd1498Szrj   tree block = BIND_EXPR_BLOCK (*tp);
77938fd1498Szrj   /* Copy (and replace) the statement.  */
78038fd1498Szrj   copy_tree_r (tp, walk_subtrees, NULL);
78138fd1498Szrj   if (block)
78238fd1498Szrj     {
78338fd1498Szrj       remap_block (&block, id);
78438fd1498Szrj       BIND_EXPR_BLOCK (*tp) = block;
78538fd1498Szrj     }
78638fd1498Szrj 
78738fd1498Szrj   if (BIND_EXPR_VARS (*tp))
78838fd1498Szrj     /* This will remap a lot of the same decls again, but this should be
78938fd1498Szrj        harmless.  */
79038fd1498Szrj     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
79138fd1498Szrj }
79238fd1498Szrj 
79338fd1498Szrj 
79438fd1498Szrj /* Create a new gimple_seq by remapping all the statements in BODY
79538fd1498Szrj    using the inlining information in ID.  */
79638fd1498Szrj 
79738fd1498Szrj static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)79838fd1498Szrj remap_gimple_seq (gimple_seq body, copy_body_data *id)
79938fd1498Szrj {
80038fd1498Szrj   gimple_stmt_iterator si;
80138fd1498Szrj   gimple_seq new_body = NULL;
80238fd1498Szrj 
80338fd1498Szrj   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
80438fd1498Szrj     {
80538fd1498Szrj       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
80638fd1498Szrj       gimple_seq_add_seq (&new_body, new_stmts);
80738fd1498Szrj     }
80838fd1498Szrj 
80938fd1498Szrj   return new_body;
81038fd1498Szrj }
81138fd1498Szrj 
81238fd1498Szrj 
81338fd1498Szrj /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
81438fd1498Szrj    block using the mapping information in ID.  */
81538fd1498Szrj 
81638fd1498Szrj static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)81738fd1498Szrj copy_gimple_bind (gbind *stmt, copy_body_data *id)
81838fd1498Szrj {
81938fd1498Szrj   gimple *new_bind;
82038fd1498Szrj   tree new_block, new_vars;
82138fd1498Szrj   gimple_seq body, new_body;
82238fd1498Szrj 
82338fd1498Szrj   /* Copy the statement.  Note that we purposely don't use copy_stmt
82438fd1498Szrj      here because we need to remap statements as we copy.  */
82538fd1498Szrj   body = gimple_bind_body (stmt);
82638fd1498Szrj   new_body = remap_gimple_seq (body, id);
82738fd1498Szrj 
82838fd1498Szrj   new_block = gimple_bind_block (stmt);
82938fd1498Szrj   if (new_block)
83038fd1498Szrj     remap_block (&new_block, id);
83138fd1498Szrj 
83238fd1498Szrj   /* This will remap a lot of the same decls again, but this should be
83338fd1498Szrj      harmless.  */
83438fd1498Szrj   new_vars = gimple_bind_vars (stmt);
83538fd1498Szrj   if (new_vars)
83638fd1498Szrj     new_vars = remap_decls (new_vars, NULL, id);
83738fd1498Szrj 
83838fd1498Szrj   new_bind = gimple_build_bind (new_vars, new_body, new_block);
83938fd1498Szrj 
84038fd1498Szrj   return new_bind;
84138fd1498Szrj }
84238fd1498Szrj 
84338fd1498Szrj /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
84438fd1498Szrj 
84538fd1498Szrj static bool
is_parm(tree decl)84638fd1498Szrj is_parm (tree decl)
84738fd1498Szrj {
84838fd1498Szrj   if (TREE_CODE (decl) == SSA_NAME)
84938fd1498Szrj     {
85038fd1498Szrj       decl = SSA_NAME_VAR (decl);
85138fd1498Szrj       if (!decl)
85238fd1498Szrj 	return false;
85338fd1498Szrj     }
85438fd1498Szrj 
85538fd1498Szrj   return (TREE_CODE (decl) == PARM_DECL);
85638fd1498Szrj }
85738fd1498Szrj 
85838fd1498Szrj /* Remap the dependence CLIQUE from the source to the destination function
85938fd1498Szrj    as specified in ID.  */
86038fd1498Szrj 
86138fd1498Szrj static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)86238fd1498Szrj remap_dependence_clique (copy_body_data *id, unsigned short clique)
86338fd1498Szrj {
86438fd1498Szrj   if (clique == 0 || processing_debug_stmt)
86538fd1498Szrj     return 0;
86638fd1498Szrj   if (!id->dependence_map)
86738fd1498Szrj     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
86838fd1498Szrj   bool existed;
86938fd1498Szrj   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
87038fd1498Szrj   if (!existed)
87138fd1498Szrj     newc = ++cfun->last_clique;
87238fd1498Szrj   return newc;
87338fd1498Szrj }
87438fd1498Szrj 
87538fd1498Szrj /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
87638fd1498Szrj    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
87738fd1498Szrj    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
87838fd1498Szrj    recursing into the children nodes of *TP.  */
87938fd1498Szrj 
88038fd1498Szrj static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)88138fd1498Szrj remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
88238fd1498Szrj {
88338fd1498Szrj   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
88438fd1498Szrj   copy_body_data *id = (copy_body_data *) wi_p->info;
88538fd1498Szrj   tree fn = id->src_fn;
88638fd1498Szrj 
88738fd1498Szrj   /* For recursive invocations this is no longer the LHS itself.  */
88838fd1498Szrj   bool is_lhs = wi_p->is_lhs;
88938fd1498Szrj   wi_p->is_lhs = false;
89038fd1498Szrj 
89138fd1498Szrj   if (TREE_CODE (*tp) == SSA_NAME)
89238fd1498Szrj     {
89338fd1498Szrj       *tp = remap_ssa_name (*tp, id);
89438fd1498Szrj       *walk_subtrees = 0;
89538fd1498Szrj       if (is_lhs)
89638fd1498Szrj 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
89738fd1498Szrj       return NULL;
89838fd1498Szrj     }
89938fd1498Szrj   else if (auto_var_in_fn_p (*tp, fn))
90038fd1498Szrj     {
90138fd1498Szrj       /* Local variables and labels need to be replaced by equivalent
90238fd1498Szrj 	 variables.  We don't want to copy static variables; there's
90338fd1498Szrj 	 only one of those, no matter how many times we inline the
90438fd1498Szrj 	 containing function.  Similarly for globals from an outer
90538fd1498Szrj 	 function.  */
90638fd1498Szrj       tree new_decl;
90738fd1498Szrj 
90838fd1498Szrj       /* Remap the declaration.  */
90938fd1498Szrj       new_decl = remap_decl (*tp, id);
91038fd1498Szrj       gcc_assert (new_decl);
91138fd1498Szrj       /* Replace this variable with the copy.  */
91238fd1498Szrj       STRIP_TYPE_NOPS (new_decl);
91338fd1498Szrj       /* ???  The C++ frontend uses void * pointer zero to initialize
91438fd1498Szrj          any other type.  This confuses the middle-end type verification.
91538fd1498Szrj 	 As cloned bodies do not go through gimplification again the fixup
91638fd1498Szrj 	 there doesn't trigger.  */
91738fd1498Szrj       if (TREE_CODE (new_decl) == INTEGER_CST
91838fd1498Szrj 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
91938fd1498Szrj 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
92038fd1498Szrj       *tp = new_decl;
92138fd1498Szrj       *walk_subtrees = 0;
92238fd1498Szrj     }
92338fd1498Szrj   else if (TREE_CODE (*tp) == STATEMENT_LIST)
92438fd1498Szrj     gcc_unreachable ();
92538fd1498Szrj   else if (TREE_CODE (*tp) == SAVE_EXPR)
92638fd1498Szrj     gcc_unreachable ();
92738fd1498Szrj   else if (TREE_CODE (*tp) == LABEL_DECL
92838fd1498Szrj 	   && (!DECL_CONTEXT (*tp)
92938fd1498Szrj 	       || decl_function_context (*tp) == id->src_fn))
93038fd1498Szrj     /* These may need to be remapped for EH handling.  */
93138fd1498Szrj     *tp = remap_decl (*tp, id);
93238fd1498Szrj   else if (TREE_CODE (*tp) == FIELD_DECL)
93338fd1498Szrj     {
93438fd1498Szrj       /* If the enclosing record type is variably_modified_type_p, the field
93538fd1498Szrj 	 has already been remapped.  Otherwise, it need not be.  */
93638fd1498Szrj       tree *n = id->decl_map->get (*tp);
93738fd1498Szrj       if (n)
93838fd1498Szrj 	*tp = *n;
93938fd1498Szrj       *walk_subtrees = 0;
94038fd1498Szrj     }
94138fd1498Szrj   else if (TYPE_P (*tp))
94238fd1498Szrj     /* Types may need remapping as well.  */
94338fd1498Szrj     *tp = remap_type (*tp, id);
94438fd1498Szrj   else if (CONSTANT_CLASS_P (*tp))
94538fd1498Szrj     {
94638fd1498Szrj       /* If this is a constant, we have to copy the node iff the type
94738fd1498Szrj 	 will be remapped.  copy_tree_r will not copy a constant.  */
94838fd1498Szrj       tree new_type = remap_type (TREE_TYPE (*tp), id);
94938fd1498Szrj 
95038fd1498Szrj       if (new_type == TREE_TYPE (*tp))
95138fd1498Szrj 	*walk_subtrees = 0;
95238fd1498Szrj 
95338fd1498Szrj       else if (TREE_CODE (*tp) == INTEGER_CST)
95438fd1498Szrj 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
95538fd1498Szrj       else
95638fd1498Szrj 	{
95738fd1498Szrj 	  *tp = copy_node (*tp);
95838fd1498Szrj 	  TREE_TYPE (*tp) = new_type;
95938fd1498Szrj 	}
96038fd1498Szrj     }
96138fd1498Szrj   else
96238fd1498Szrj     {
96338fd1498Szrj       /* Otherwise, just copy the node.  Note that copy_tree_r already
96438fd1498Szrj 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
96538fd1498Szrj 
96638fd1498Szrj       if (TREE_CODE (*tp) == MEM_REF)
96738fd1498Szrj 	{
96838fd1498Szrj 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
96938fd1498Szrj 	     that can happen when a pointer argument is an ADDR_EXPR.
97038fd1498Szrj 	     Recurse here manually to allow that.  */
97138fd1498Szrj 	  tree ptr = TREE_OPERAND (*tp, 0);
97238fd1498Szrj 	  tree type = remap_type (TREE_TYPE (*tp), id);
97338fd1498Szrj 	  tree old = *tp;
97438fd1498Szrj 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
97538fd1498Szrj 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
97638fd1498Szrj 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
97738fd1498Szrj 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
97838fd1498Szrj 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
97938fd1498Szrj 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
98038fd1498Szrj 	    {
98138fd1498Szrj 	      MR_DEPENDENCE_CLIQUE (*tp)
98238fd1498Szrj 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
98338fd1498Szrj 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
98438fd1498Szrj 	    }
98538fd1498Szrj 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
98638fd1498Szrj 	     remapped a parameter as the property might be valid only
98738fd1498Szrj 	     for the parameter itself.  */
98838fd1498Szrj 	  if (TREE_THIS_NOTRAP (old)
98938fd1498Szrj 	      && (!is_parm (TREE_OPERAND (old, 0))
99038fd1498Szrj 		  || (!id->transform_parameter && is_parm (ptr))))
99138fd1498Szrj 	    TREE_THIS_NOTRAP (*tp) = 1;
99238fd1498Szrj 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
99338fd1498Szrj 	  *walk_subtrees = 0;
99438fd1498Szrj 	  return NULL;
99538fd1498Szrj 	}
99638fd1498Szrj 
99738fd1498Szrj       /* Here is the "usual case".  Copy this tree node, and then
99838fd1498Szrj 	 tweak some special cases.  */
99938fd1498Szrj       copy_tree_r (tp, walk_subtrees, NULL);
100038fd1498Szrj 
100138fd1498Szrj       if (TREE_CODE (*tp) != OMP_CLAUSE)
100238fd1498Szrj 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
100338fd1498Szrj 
100438fd1498Szrj       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
100538fd1498Szrj 	{
100638fd1498Szrj 	  /* The copied TARGET_EXPR has never been expanded, even if the
100738fd1498Szrj 	     original node was expanded already.  */
100838fd1498Szrj 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
100938fd1498Szrj 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
101038fd1498Szrj 	}
101138fd1498Szrj       else if (TREE_CODE (*tp) == ADDR_EXPR)
101238fd1498Szrj 	{
101338fd1498Szrj 	  /* Variable substitution need not be simple.  In particular,
101438fd1498Szrj 	     the MEM_REF substitution above.  Make sure that
101538fd1498Szrj 	     TREE_CONSTANT and friends are up-to-date.  */
101638fd1498Szrj 	  int invariant = is_gimple_min_invariant (*tp);
101738fd1498Szrj 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
101838fd1498Szrj 	  recompute_tree_invariant_for_addr_expr (*tp);
101938fd1498Szrj 
102038fd1498Szrj 	  /* If this used to be invariant, but is not any longer,
102138fd1498Szrj 	     then regimplification is probably needed.  */
102238fd1498Szrj 	  if (invariant && !is_gimple_min_invariant (*tp))
102338fd1498Szrj 	    id->regimplify = true;
102438fd1498Szrj 
102538fd1498Szrj 	  *walk_subtrees = 0;
102638fd1498Szrj 	}
102738fd1498Szrj     }
102838fd1498Szrj 
102938fd1498Szrj   /* Update the TREE_BLOCK for the cloned expr.  */
103038fd1498Szrj   if (EXPR_P (*tp))
103138fd1498Szrj     {
103238fd1498Szrj       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
103338fd1498Szrj       tree old_block = TREE_BLOCK (*tp);
103438fd1498Szrj       if (old_block)
103538fd1498Szrj 	{
103638fd1498Szrj 	  tree *n;
103738fd1498Szrj 	  n = id->decl_map->get (TREE_BLOCK (*tp));
103838fd1498Szrj 	  if (n)
103938fd1498Szrj 	    new_block = *n;
104038fd1498Szrj 	}
104138fd1498Szrj       TREE_SET_BLOCK (*tp, new_block);
104238fd1498Szrj     }
104338fd1498Szrj 
104438fd1498Szrj   /* Keep iterating.  */
104538fd1498Szrj   return NULL_TREE;
104638fd1498Szrj }
104738fd1498Szrj 
104838fd1498Szrj 
104938fd1498Szrj /* Called from copy_body_id via walk_tree.  DATA is really a
105038fd1498Szrj    `copy_body_data *'.  */
105138fd1498Szrj 
105238fd1498Szrj tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)105338fd1498Szrj copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
105438fd1498Szrj {
105538fd1498Szrj   copy_body_data *id = (copy_body_data *) data;
105638fd1498Szrj   tree fn = id->src_fn;
105738fd1498Szrj   tree new_block;
105838fd1498Szrj 
105938fd1498Szrj   /* Begin by recognizing trees that we'll completely rewrite for the
106038fd1498Szrj      inlining context.  Our output for these trees is completely
106138fd1498Szrj      different from out input (e.g. RETURN_EXPR is deleted, and morphs
106238fd1498Szrj      into an edge).  Further down, we'll handle trees that get
106338fd1498Szrj      duplicated and/or tweaked.  */
106438fd1498Szrj 
106538fd1498Szrj   /* When requested, RETURN_EXPRs should be transformed to just the
106638fd1498Szrj      contained MODIFY_EXPR.  The branch semantics of the return will
106738fd1498Szrj      be handled elsewhere by manipulating the CFG rather than a statement.  */
106838fd1498Szrj   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
106938fd1498Szrj     {
107038fd1498Szrj       tree assignment = TREE_OPERAND (*tp, 0);
107138fd1498Szrj 
107238fd1498Szrj       /* If we're returning something, just turn that into an
107338fd1498Szrj 	 assignment into the equivalent of the original RESULT_DECL.
107438fd1498Szrj 	 If the "assignment" is just the result decl, the result
107538fd1498Szrj 	 decl has already been set (e.g. a recent "foo (&result_decl,
107638fd1498Szrj 	 ...)"); just toss the entire RETURN_EXPR.  */
107738fd1498Szrj       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
107838fd1498Szrj 	{
107938fd1498Szrj 	  /* Replace the RETURN_EXPR with (a copy of) the
108038fd1498Szrj 	     MODIFY_EXPR hanging underneath.  */
108138fd1498Szrj 	  *tp = copy_node (assignment);
108238fd1498Szrj 	}
108338fd1498Szrj       else /* Else the RETURN_EXPR returns no value.  */
108438fd1498Szrj 	{
108538fd1498Szrj 	  *tp = NULL;
108638fd1498Szrj 	  return (tree) (void *)1;
108738fd1498Szrj 	}
108838fd1498Szrj     }
108938fd1498Szrj   else if (TREE_CODE (*tp) == SSA_NAME)
109038fd1498Szrj     {
109138fd1498Szrj       *tp = remap_ssa_name (*tp, id);
109238fd1498Szrj       *walk_subtrees = 0;
109338fd1498Szrj       return NULL;
109438fd1498Szrj     }
109538fd1498Szrj 
109638fd1498Szrj   /* Local variables and labels need to be replaced by equivalent
109738fd1498Szrj      variables.  We don't want to copy static variables; there's only
109838fd1498Szrj      one of those, no matter how many times we inline the containing
109938fd1498Szrj      function.  Similarly for globals from an outer function.  */
110038fd1498Szrj   else if (auto_var_in_fn_p (*tp, fn))
110138fd1498Szrj     {
110238fd1498Szrj       tree new_decl;
110338fd1498Szrj 
110438fd1498Szrj       /* Remap the declaration.  */
110538fd1498Szrj       new_decl = remap_decl (*tp, id);
110638fd1498Szrj       gcc_assert (new_decl);
110738fd1498Szrj       /* Replace this variable with the copy.  */
110838fd1498Szrj       STRIP_TYPE_NOPS (new_decl);
110938fd1498Szrj       *tp = new_decl;
111038fd1498Szrj       *walk_subtrees = 0;
111138fd1498Szrj     }
111238fd1498Szrj   else if (TREE_CODE (*tp) == STATEMENT_LIST)
111338fd1498Szrj     copy_statement_list (tp);
111438fd1498Szrj   else if (TREE_CODE (*tp) == SAVE_EXPR
111538fd1498Szrj 	   || TREE_CODE (*tp) == TARGET_EXPR)
111638fd1498Szrj     remap_save_expr (tp, id->decl_map, walk_subtrees);
111738fd1498Szrj   else if (TREE_CODE (*tp) == LABEL_DECL
111838fd1498Szrj 	   && (! DECL_CONTEXT (*tp)
111938fd1498Szrj 	       || decl_function_context (*tp) == id->src_fn))
112038fd1498Szrj     /* These may need to be remapped for EH handling.  */
112138fd1498Szrj     *tp = remap_decl (*tp, id);
112238fd1498Szrj   else if (TREE_CODE (*tp) == BIND_EXPR)
112338fd1498Szrj     copy_bind_expr (tp, walk_subtrees, id);
112438fd1498Szrj   /* Types may need remapping as well.  */
112538fd1498Szrj   else if (TYPE_P (*tp))
112638fd1498Szrj     *tp = remap_type (*tp, id);
112738fd1498Szrj 
112838fd1498Szrj   /* If this is a constant, we have to copy the node iff the type will be
112938fd1498Szrj      remapped.  copy_tree_r will not copy a constant.  */
113038fd1498Szrj   else if (CONSTANT_CLASS_P (*tp))
113138fd1498Szrj     {
113238fd1498Szrj       tree new_type = remap_type (TREE_TYPE (*tp), id);
113338fd1498Szrj 
113438fd1498Szrj       if (new_type == TREE_TYPE (*tp))
113538fd1498Szrj 	*walk_subtrees = 0;
113638fd1498Szrj 
113738fd1498Szrj       else if (TREE_CODE (*tp) == INTEGER_CST)
113838fd1498Szrj 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
113938fd1498Szrj       else
114038fd1498Szrj 	{
114138fd1498Szrj 	  *tp = copy_node (*tp);
114238fd1498Szrj 	  TREE_TYPE (*tp) = new_type;
114338fd1498Szrj 	}
114438fd1498Szrj     }
114538fd1498Szrj 
114638fd1498Szrj   /* Otherwise, just copy the node.  Note that copy_tree_r already
114738fd1498Szrj      knows not to copy VAR_DECLs, etc., so this is safe.  */
114838fd1498Szrj   else
114938fd1498Szrj     {
115038fd1498Szrj       /* Here we handle trees that are not completely rewritten.
115138fd1498Szrj 	 First we detect some inlining-induced bogosities for
115238fd1498Szrj 	 discarding.  */
115338fd1498Szrj       if (TREE_CODE (*tp) == MODIFY_EXPR
115438fd1498Szrj 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
115538fd1498Szrj 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
115638fd1498Szrj 	{
115738fd1498Szrj 	  /* Some assignments VAR = VAR; don't generate any rtl code
115838fd1498Szrj 	     and thus don't count as variable modification.  Avoid
115938fd1498Szrj 	     keeping bogosities like 0 = 0.  */
116038fd1498Szrj 	  tree decl = TREE_OPERAND (*tp, 0), value;
116138fd1498Szrj 	  tree *n;
116238fd1498Szrj 
116338fd1498Szrj 	  n = id->decl_map->get (decl);
116438fd1498Szrj 	  if (n)
116538fd1498Szrj 	    {
116638fd1498Szrj 	      value = *n;
116738fd1498Szrj 	      STRIP_TYPE_NOPS (value);
116838fd1498Szrj 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
116938fd1498Szrj 		{
117038fd1498Szrj 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
117138fd1498Szrj 		  return copy_tree_body_r (tp, walk_subtrees, data);
117238fd1498Szrj 		}
117338fd1498Szrj 	    }
117438fd1498Szrj 	}
117538fd1498Szrj       else if (TREE_CODE (*tp) == INDIRECT_REF)
117638fd1498Szrj 	{
117738fd1498Szrj 	  /* Get rid of *& from inline substitutions that can happen when a
117838fd1498Szrj 	     pointer argument is an ADDR_EXPR.  */
117938fd1498Szrj 	  tree decl = TREE_OPERAND (*tp, 0);
118038fd1498Szrj 	  tree *n = id->decl_map->get (decl);
118138fd1498Szrj 	  if (n)
118238fd1498Szrj 	    {
118338fd1498Szrj 	      /* If we happen to get an ADDR_EXPR in n->value, strip
118438fd1498Szrj 	         it manually here as we'll eventually get ADDR_EXPRs
118538fd1498Szrj 		 which lie about their types pointed to.  In this case
118638fd1498Szrj 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
118738fd1498Szrj 		 but we absolutely rely on that.  As fold_indirect_ref
118838fd1498Szrj 	         does other useful transformations, try that first, though.  */
118938fd1498Szrj 	      tree type = TREE_TYPE (*tp);
119038fd1498Szrj 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
119138fd1498Szrj 	      tree old = *tp;
119238fd1498Szrj 	      *tp = gimple_fold_indirect_ref (ptr);
119338fd1498Szrj 	      if (! *tp)
119438fd1498Szrj 	        {
119538fd1498Szrj 		  type = remap_type (type, id);
119638fd1498Szrj 		  if (TREE_CODE (ptr) == ADDR_EXPR)
119738fd1498Szrj 		    {
119838fd1498Szrj 		      *tp
119938fd1498Szrj 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
120038fd1498Szrj 		      /* ???  We should either assert here or build
120138fd1498Szrj 			 a VIEW_CONVERT_EXPR instead of blindly leaking
120238fd1498Szrj 			 incompatible types to our IL.  */
120338fd1498Szrj 		      if (! *tp)
120438fd1498Szrj 			*tp = TREE_OPERAND (ptr, 0);
120538fd1498Szrj 		    }
120638fd1498Szrj 	          else
120738fd1498Szrj 		    {
120838fd1498Szrj 	              *tp = build1 (INDIRECT_REF, type, ptr);
120938fd1498Szrj 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
121038fd1498Szrj 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
121138fd1498Szrj 		      TREE_READONLY (*tp) = TREE_READONLY (old);
121238fd1498Szrj 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
121338fd1498Szrj 			 have remapped a parameter as the property might be
121438fd1498Szrj 			 valid only for the parameter itself.  */
121538fd1498Szrj 		      if (TREE_THIS_NOTRAP (old)
121638fd1498Szrj 			  && (!is_parm (TREE_OPERAND (old, 0))
121738fd1498Szrj 			      || (!id->transform_parameter && is_parm (ptr))))
121838fd1498Szrj 		        TREE_THIS_NOTRAP (*tp) = 1;
121938fd1498Szrj 		    }
122038fd1498Szrj 		}
122138fd1498Szrj 	      *walk_subtrees = 0;
122238fd1498Szrj 	      return NULL;
122338fd1498Szrj 	    }
122438fd1498Szrj 	}
122538fd1498Szrj       else if (TREE_CODE (*tp) == MEM_REF)
122638fd1498Szrj 	{
122738fd1498Szrj 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
122838fd1498Szrj 	     that can happen when a pointer argument is an ADDR_EXPR.
122938fd1498Szrj 	     Recurse here manually to allow that.  */
123038fd1498Szrj 	  tree ptr = TREE_OPERAND (*tp, 0);
123138fd1498Szrj 	  tree type = remap_type (TREE_TYPE (*tp), id);
123238fd1498Szrj 	  tree old = *tp;
123338fd1498Szrj 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
123438fd1498Szrj 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
123538fd1498Szrj 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
123638fd1498Szrj 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
123738fd1498Szrj 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
123838fd1498Szrj 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
123938fd1498Szrj 	    {
124038fd1498Szrj 	      MR_DEPENDENCE_CLIQUE (*tp)
124138fd1498Szrj 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
124238fd1498Szrj 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
124338fd1498Szrj 	    }
124438fd1498Szrj 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
124538fd1498Szrj 	     remapped a parameter as the property might be valid only
124638fd1498Szrj 	     for the parameter itself.  */
124738fd1498Szrj 	  if (TREE_THIS_NOTRAP (old)
124838fd1498Szrj 	      && (!is_parm (TREE_OPERAND (old, 0))
124938fd1498Szrj 		  || (!id->transform_parameter && is_parm (ptr))))
125038fd1498Szrj 	    TREE_THIS_NOTRAP (*tp) = 1;
125138fd1498Szrj 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
125238fd1498Szrj 	  *walk_subtrees = 0;
125338fd1498Szrj 	  return NULL;
125438fd1498Szrj 	}
125538fd1498Szrj 
125638fd1498Szrj       /* Here is the "usual case".  Copy this tree node, and then
125738fd1498Szrj 	 tweak some special cases.  */
125838fd1498Szrj       copy_tree_r (tp, walk_subtrees, NULL);
125938fd1498Szrj 
126038fd1498Szrj       /* If EXPR has block defined, map it to newly constructed block.
126138fd1498Szrj          When inlining we want EXPRs without block appear in the block
126238fd1498Szrj 	 of function call if we are not remapping a type.  */
126338fd1498Szrj       if (EXPR_P (*tp))
126438fd1498Szrj 	{
126538fd1498Szrj 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
126638fd1498Szrj 	  if (TREE_BLOCK (*tp))
126738fd1498Szrj 	    {
126838fd1498Szrj 	      tree *n;
126938fd1498Szrj 	      n = id->decl_map->get (TREE_BLOCK (*tp));
127038fd1498Szrj 	      if (n)
127138fd1498Szrj 		new_block = *n;
127238fd1498Szrj 	    }
127338fd1498Szrj 	  TREE_SET_BLOCK (*tp, new_block);
127438fd1498Szrj 	}
127538fd1498Szrj 
127638fd1498Szrj       if (TREE_CODE (*tp) != OMP_CLAUSE)
127738fd1498Szrj 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
127838fd1498Szrj 
127938fd1498Szrj       /* The copied TARGET_EXPR has never been expanded, even if the
128038fd1498Szrj 	 original node was expanded already.  */
128138fd1498Szrj       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
128238fd1498Szrj 	{
128338fd1498Szrj 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
128438fd1498Szrj 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
128538fd1498Szrj 	}
128638fd1498Szrj 
128738fd1498Szrj       /* Variable substitution need not be simple.  In particular, the
128838fd1498Szrj 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
128938fd1498Szrj 	 and friends are up-to-date.  */
129038fd1498Szrj       else if (TREE_CODE (*tp) == ADDR_EXPR)
129138fd1498Szrj 	{
129238fd1498Szrj 	  int invariant = is_gimple_min_invariant (*tp);
129338fd1498Szrj 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
129438fd1498Szrj 
129538fd1498Szrj 	  /* Handle the case where we substituted an INDIRECT_REF
129638fd1498Szrj 	     into the operand of the ADDR_EXPR.  */
129738fd1498Szrj 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
129838fd1498Szrj 	    {
129938fd1498Szrj 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
130038fd1498Szrj 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
130138fd1498Szrj 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
130238fd1498Szrj 	      *tp = t;
130338fd1498Szrj 	    }
130438fd1498Szrj 	  else
130538fd1498Szrj 	    recompute_tree_invariant_for_addr_expr (*tp);
130638fd1498Szrj 
130738fd1498Szrj 	  /* If this used to be invariant, but is not any longer,
130838fd1498Szrj 	     then regimplification is probably needed.  */
130938fd1498Szrj 	  if (invariant && !is_gimple_min_invariant (*tp))
131038fd1498Szrj 	    id->regimplify = true;
131138fd1498Szrj 
131238fd1498Szrj 	  *walk_subtrees = 0;
131338fd1498Szrj 	}
131438fd1498Szrj     }
131538fd1498Szrj 
131638fd1498Szrj   /* Keep iterating.  */
131738fd1498Szrj   return NULL_TREE;
131838fd1498Szrj }
131938fd1498Szrj 
132038fd1498Szrj /* Helper for remap_gimple_stmt.  Given an EH region number for the
132138fd1498Szrj    source function, map that to the duplicate EH region number in
132238fd1498Szrj    the destination function.  */
132338fd1498Szrj 
132438fd1498Szrj static int
remap_eh_region_nr(int old_nr,copy_body_data * id)132538fd1498Szrj remap_eh_region_nr (int old_nr, copy_body_data *id)
132638fd1498Szrj {
132738fd1498Szrj   eh_region old_r, new_r;
132838fd1498Szrj 
132938fd1498Szrj   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
133038fd1498Szrj   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
133138fd1498Szrj 
133238fd1498Szrj   return new_r->index;
133338fd1498Szrj }
133438fd1498Szrj 
133538fd1498Szrj /* Similar, but operate on INTEGER_CSTs.  */
133638fd1498Szrj 
133738fd1498Szrj static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)133838fd1498Szrj remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
133938fd1498Szrj {
134038fd1498Szrj   int old_nr, new_nr;
134138fd1498Szrj 
134238fd1498Szrj   old_nr = tree_to_shwi (old_t_nr);
134338fd1498Szrj   new_nr = remap_eh_region_nr (old_nr, id);
134438fd1498Szrj 
134538fd1498Szrj   return build_int_cst (integer_type_node, new_nr);
134638fd1498Szrj }
134738fd1498Szrj 
134838fd1498Szrj /* Helper for copy_bb.  Remap statement STMT using the inlining
134938fd1498Szrj    information in ID.  Return the new statement copy.  */
135038fd1498Szrj 
135138fd1498Szrj static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)135238fd1498Szrj remap_gimple_stmt (gimple *stmt, copy_body_data *id)
135338fd1498Szrj {
135438fd1498Szrj   gimple *copy = NULL;
135538fd1498Szrj   struct walk_stmt_info wi;
135638fd1498Szrj   bool skip_first = false;
135738fd1498Szrj   gimple_seq stmts = NULL;
135838fd1498Szrj 
135938fd1498Szrj   if (is_gimple_debug (stmt)
136038fd1498Szrj       && (gimple_debug_nonbind_marker_p (stmt)
136138fd1498Szrj 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
136238fd1498Szrj 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
136338fd1498Szrj     return stmts;
136438fd1498Szrj 
136538fd1498Szrj   /* Begin by recognizing trees that we'll completely rewrite for the
136638fd1498Szrj      inlining context.  Our output for these trees is completely
136738fd1498Szrj      different from out input (e.g. RETURN_EXPR is deleted, and morphs
136838fd1498Szrj      into an edge).  Further down, we'll handle trees that get
136938fd1498Szrj      duplicated and/or tweaked.  */
137038fd1498Szrj 
137138fd1498Szrj   /* When requested, GIMPLE_RETURNs should be transformed to just the
137238fd1498Szrj      contained GIMPLE_ASSIGN.  The branch semantics of the return will
137338fd1498Szrj      be handled elsewhere by manipulating the CFG rather than the
137438fd1498Szrj      statement.  */
137538fd1498Szrj   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
137638fd1498Szrj     {
137738fd1498Szrj       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
137838fd1498Szrj       tree retbnd = gimple_return_retbnd (stmt);
137938fd1498Szrj       tree bndslot = id->retbnd;
138038fd1498Szrj 
138138fd1498Szrj       if (retbnd && bndslot)
138238fd1498Szrj 	{
138338fd1498Szrj 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
138438fd1498Szrj 	  memset (&wi, 0, sizeof (wi));
138538fd1498Szrj 	  wi.info = id;
138638fd1498Szrj 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
138738fd1498Szrj 	  gimple_seq_add_stmt (&stmts, bndcopy);
138838fd1498Szrj 	}
138938fd1498Szrj 
139038fd1498Szrj       /* If we're returning something, just turn that into an
139138fd1498Szrj 	 assignment into the equivalent of the original RESULT_DECL.
139238fd1498Szrj 	 If RETVAL is just the result decl, the result decl has
139338fd1498Szrj 	 already been set (e.g. a recent "foo (&result_decl, ...)");
139438fd1498Szrj 	 just toss the entire GIMPLE_RETURN.  */
139538fd1498Szrj       if (retval
139638fd1498Szrj 	  && (TREE_CODE (retval) != RESULT_DECL
139738fd1498Szrj 	      && (TREE_CODE (retval) != SSA_NAME
139838fd1498Szrj 		  || ! SSA_NAME_VAR (retval)
139938fd1498Szrj 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
140038fd1498Szrj         {
140138fd1498Szrj 	  copy = gimple_build_assign (id->do_not_unshare
140238fd1498Szrj 				      ? id->retvar : unshare_expr (id->retvar),
140338fd1498Szrj 				      retval);
140438fd1498Szrj 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
140538fd1498Szrj 	  skip_first = true;
140638fd1498Szrj 
140738fd1498Szrj 	  /* We need to copy bounds if return structure with pointers into
140838fd1498Szrj 	     instrumented function.  */
140938fd1498Szrj 	  if (chkp_function_instrumented_p (id->dst_fn)
141038fd1498Szrj 	      && !bndslot
141138fd1498Szrj 	      && !BOUNDED_P (id->retvar)
141238fd1498Szrj 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
141338fd1498Szrj 	    id->assign_stmts.safe_push (copy);
141438fd1498Szrj 
141538fd1498Szrj 	}
141638fd1498Szrj       else
141738fd1498Szrj 	return stmts;
141838fd1498Szrj     }
141938fd1498Szrj   else if (gimple_has_substatements (stmt))
142038fd1498Szrj     {
142138fd1498Szrj       gimple_seq s1, s2;
142238fd1498Szrj 
142338fd1498Szrj       /* When cloning bodies from the C++ front end, we will be handed bodies
142438fd1498Szrj 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
142538fd1498Szrj 	 have embedded statements.  */
142638fd1498Szrj       switch (gimple_code (stmt))
142738fd1498Szrj 	{
142838fd1498Szrj 	case GIMPLE_BIND:
142938fd1498Szrj 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
143038fd1498Szrj 	  break;
143138fd1498Szrj 
143238fd1498Szrj 	case GIMPLE_CATCH:
143338fd1498Szrj 	  {
143438fd1498Szrj 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
143538fd1498Szrj 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
143638fd1498Szrj 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
143738fd1498Szrj 	  }
143838fd1498Szrj 	  break;
143938fd1498Szrj 
144038fd1498Szrj 	case GIMPLE_EH_FILTER:
144138fd1498Szrj 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
144238fd1498Szrj 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
144338fd1498Szrj 	  break;
144438fd1498Szrj 
144538fd1498Szrj 	case GIMPLE_TRY:
144638fd1498Szrj 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
144738fd1498Szrj 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
144838fd1498Szrj 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
144938fd1498Szrj 	  break;
145038fd1498Szrj 
145138fd1498Szrj 	case GIMPLE_WITH_CLEANUP_EXPR:
145238fd1498Szrj 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
145338fd1498Szrj 	  copy = gimple_build_wce (s1);
145438fd1498Szrj 	  break;
145538fd1498Szrj 
145638fd1498Szrj 	case GIMPLE_OMP_PARALLEL:
145738fd1498Szrj 	  {
145838fd1498Szrj 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
145938fd1498Szrj 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
146038fd1498Szrj 	    copy = gimple_build_omp_parallel
146138fd1498Szrj 	             (s1,
146238fd1498Szrj 		      gimple_omp_parallel_clauses (omp_par_stmt),
146338fd1498Szrj 		      gimple_omp_parallel_child_fn (omp_par_stmt),
146438fd1498Szrj 		      gimple_omp_parallel_data_arg (omp_par_stmt));
146538fd1498Szrj 	  }
146638fd1498Szrj 	  break;
146738fd1498Szrj 
146838fd1498Szrj 	case GIMPLE_OMP_TASK:
146938fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
147038fd1498Szrj 	  copy = gimple_build_omp_task
147138fd1498Szrj 	           (s1,
147238fd1498Szrj 		    gimple_omp_task_clauses (stmt),
147338fd1498Szrj 		    gimple_omp_task_child_fn (stmt),
147438fd1498Szrj 		    gimple_omp_task_data_arg (stmt),
147538fd1498Szrj 		    gimple_omp_task_copy_fn (stmt),
147638fd1498Szrj 		    gimple_omp_task_arg_size (stmt),
147738fd1498Szrj 		    gimple_omp_task_arg_align (stmt));
147838fd1498Szrj 	  break;
147938fd1498Szrj 
148038fd1498Szrj 	case GIMPLE_OMP_FOR:
148138fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
148238fd1498Szrj 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
148338fd1498Szrj 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
148438fd1498Szrj 				       gimple_omp_for_clauses (stmt),
148538fd1498Szrj 				       gimple_omp_for_collapse (stmt), s2);
148638fd1498Szrj 	  {
148738fd1498Szrj 	    size_t i;
148838fd1498Szrj 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
148938fd1498Szrj 	      {
149038fd1498Szrj 		gimple_omp_for_set_index (copy, i,
149138fd1498Szrj 					  gimple_omp_for_index (stmt, i));
149238fd1498Szrj 		gimple_omp_for_set_initial (copy, i,
149338fd1498Szrj 					    gimple_omp_for_initial (stmt, i));
149438fd1498Szrj 		gimple_omp_for_set_final (copy, i,
149538fd1498Szrj 					  gimple_omp_for_final (stmt, i));
149638fd1498Szrj 		gimple_omp_for_set_incr (copy, i,
149738fd1498Szrj 					 gimple_omp_for_incr (stmt, i));
149838fd1498Szrj 		gimple_omp_for_set_cond (copy, i,
149938fd1498Szrj 					 gimple_omp_for_cond (stmt, i));
150038fd1498Szrj 	      }
150138fd1498Szrj 	  }
150238fd1498Szrj 	  break;
150338fd1498Szrj 
150438fd1498Szrj 	case GIMPLE_OMP_MASTER:
150538fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
150638fd1498Szrj 	  copy = gimple_build_omp_master (s1);
150738fd1498Szrj 	  break;
150838fd1498Szrj 
150938fd1498Szrj 	case GIMPLE_OMP_TASKGROUP:
151038fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
151138fd1498Szrj 	  copy = gimple_build_omp_taskgroup (s1);
151238fd1498Szrj 	  break;
151338fd1498Szrj 
151438fd1498Szrj 	case GIMPLE_OMP_ORDERED:
151538fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
151638fd1498Szrj 	  copy = gimple_build_omp_ordered
151738fd1498Szrj 		   (s1,
151838fd1498Szrj 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
151938fd1498Szrj 	  break;
152038fd1498Szrj 
152138fd1498Szrj 	case GIMPLE_OMP_SECTION:
152238fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
152338fd1498Szrj 	  copy = gimple_build_omp_section (s1);
152438fd1498Szrj 	  break;
152538fd1498Szrj 
152638fd1498Szrj 	case GIMPLE_OMP_SECTIONS:
152738fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
152838fd1498Szrj 	  copy = gimple_build_omp_sections
152938fd1498Szrj 	           (s1, gimple_omp_sections_clauses (stmt));
153038fd1498Szrj 	  break;
153138fd1498Szrj 
153238fd1498Szrj 	case GIMPLE_OMP_SINGLE:
153338fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
153438fd1498Szrj 	  copy = gimple_build_omp_single
153538fd1498Szrj 	           (s1, gimple_omp_single_clauses (stmt));
153638fd1498Szrj 	  break;
153738fd1498Szrj 
153838fd1498Szrj 	case GIMPLE_OMP_TARGET:
153938fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
154038fd1498Szrj 	  copy = gimple_build_omp_target
154138fd1498Szrj 		   (s1, gimple_omp_target_kind (stmt),
154238fd1498Szrj 		    gimple_omp_target_clauses (stmt));
154338fd1498Szrj 	  break;
154438fd1498Szrj 
154538fd1498Szrj 	case GIMPLE_OMP_TEAMS:
154638fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
154738fd1498Szrj 	  copy = gimple_build_omp_teams
154838fd1498Szrj 		   (s1, gimple_omp_teams_clauses (stmt));
154938fd1498Szrj 	  break;
155038fd1498Szrj 
155138fd1498Szrj 	case GIMPLE_OMP_CRITICAL:
155238fd1498Szrj 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
155338fd1498Szrj 	  copy = gimple_build_omp_critical (s1,
155438fd1498Szrj 					    gimple_omp_critical_name
155538fd1498Szrj 					      (as_a <gomp_critical *> (stmt)),
155638fd1498Szrj 					    gimple_omp_critical_clauses
155738fd1498Szrj 					      (as_a <gomp_critical *> (stmt)));
155838fd1498Szrj 	  break;
155938fd1498Szrj 
156038fd1498Szrj 	case GIMPLE_TRANSACTION:
156138fd1498Szrj 	  {
156238fd1498Szrj 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
156338fd1498Szrj 	    gtransaction *new_trans_stmt;
156438fd1498Szrj 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
156538fd1498Szrj 				   id);
156638fd1498Szrj 	    copy = new_trans_stmt = gimple_build_transaction (s1);
156738fd1498Szrj 	    gimple_transaction_set_subcode (new_trans_stmt,
156838fd1498Szrj 	      gimple_transaction_subcode (old_trans_stmt));
156938fd1498Szrj 	    gimple_transaction_set_label_norm (new_trans_stmt,
157038fd1498Szrj 	      gimple_transaction_label_norm (old_trans_stmt));
157138fd1498Szrj 	    gimple_transaction_set_label_uninst (new_trans_stmt,
157238fd1498Szrj 	      gimple_transaction_label_uninst (old_trans_stmt));
157338fd1498Szrj 	    gimple_transaction_set_label_over (new_trans_stmt,
157438fd1498Szrj 	      gimple_transaction_label_over (old_trans_stmt));
157538fd1498Szrj 	  }
157638fd1498Szrj 	  break;
157738fd1498Szrj 
157838fd1498Szrj 	default:
157938fd1498Szrj 	  gcc_unreachable ();
158038fd1498Szrj 	}
158138fd1498Szrj     }
158238fd1498Szrj   else
158338fd1498Szrj     {
158438fd1498Szrj       if (gimple_assign_copy_p (stmt)
158538fd1498Szrj 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
158638fd1498Szrj 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
158738fd1498Szrj 	{
158838fd1498Szrj 	  /* Here we handle statements that are not completely rewritten.
158938fd1498Szrj 	     First we detect some inlining-induced bogosities for
159038fd1498Szrj 	     discarding.  */
159138fd1498Szrj 
159238fd1498Szrj 	  /* Some assignments VAR = VAR; don't generate any rtl code
159338fd1498Szrj 	     and thus don't count as variable modification.  Avoid
159438fd1498Szrj 	     keeping bogosities like 0 = 0.  */
159538fd1498Szrj 	  tree decl = gimple_assign_lhs (stmt), value;
159638fd1498Szrj 	  tree *n;
159738fd1498Szrj 
159838fd1498Szrj 	  n = id->decl_map->get (decl);
159938fd1498Szrj 	  if (n)
160038fd1498Szrj 	    {
160138fd1498Szrj 	      value = *n;
160238fd1498Szrj 	      STRIP_TYPE_NOPS (value);
160338fd1498Szrj 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
160438fd1498Szrj 		return NULL;
160538fd1498Szrj 	    }
160638fd1498Szrj 	}
160738fd1498Szrj 
160838fd1498Szrj       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
160938fd1498Szrj 	 in a block that we aren't copying during tree_function_versioning,
161038fd1498Szrj 	 just drop the clobber stmt.  */
161138fd1498Szrj       if (id->blocks_to_copy && gimple_clobber_p (stmt))
161238fd1498Szrj 	{
161338fd1498Szrj 	  tree lhs = gimple_assign_lhs (stmt);
161438fd1498Szrj 	  if (TREE_CODE (lhs) == MEM_REF
161538fd1498Szrj 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
161638fd1498Szrj 	    {
161738fd1498Szrj 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
161838fd1498Szrj 	      if (gimple_bb (def_stmt)
161938fd1498Szrj 		  && !bitmap_bit_p (id->blocks_to_copy,
162038fd1498Szrj 				    gimple_bb (def_stmt)->index))
162138fd1498Szrj 		return NULL;
162238fd1498Szrj 	    }
162338fd1498Szrj 	}
162438fd1498Szrj 
162538fd1498Szrj       if (gimple_debug_bind_p (stmt))
162638fd1498Szrj 	{
162738fd1498Szrj 	  gdebug *copy
162838fd1498Szrj 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
162938fd1498Szrj 				       gimple_debug_bind_get_value (stmt),
163038fd1498Szrj 				       stmt);
163138fd1498Szrj 	  id->debug_stmts.safe_push (copy);
163238fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
163338fd1498Szrj 	  return stmts;
163438fd1498Szrj 	}
163538fd1498Szrj       if (gimple_debug_source_bind_p (stmt))
163638fd1498Szrj 	{
163738fd1498Szrj 	  gdebug *copy = gimple_build_debug_source_bind
163838fd1498Szrj 	                   (gimple_debug_source_bind_get_var (stmt),
163938fd1498Szrj 			    gimple_debug_source_bind_get_value (stmt),
164038fd1498Szrj 			    stmt);
164138fd1498Szrj 	  id->debug_stmts.safe_push (copy);
164238fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
164338fd1498Szrj 	  return stmts;
164438fd1498Szrj 	}
164538fd1498Szrj       if (gimple_debug_nonbind_marker_p (stmt))
164638fd1498Szrj 	{
164738fd1498Szrj 	  /* If the inlined function has too many debug markers,
164838fd1498Szrj 	     don't copy them.  */
164938fd1498Szrj 	  if (id->src_cfun->debug_marker_count
165038fd1498Szrj 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
165138fd1498Szrj 	    return stmts;
165238fd1498Szrj 
165338fd1498Szrj 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
165438fd1498Szrj 	  id->debug_stmts.safe_push (copy);
165538fd1498Szrj 	  gimple_seq_add_stmt (&stmts, copy);
165638fd1498Szrj 	  return stmts;
165738fd1498Szrj 	}
165838fd1498Szrj       gcc_checking_assert (!is_gimple_debug (stmt));
165938fd1498Szrj 
166038fd1498Szrj       /* Create a new deep copy of the statement.  */
166138fd1498Szrj       copy = gimple_copy (stmt);
166238fd1498Szrj 
166338fd1498Szrj       /* Clear flags that need revisiting.  */
166438fd1498Szrj       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
166538fd1498Szrj         {
166638fd1498Szrj 	  if (gimple_call_tail_p (call_stmt))
166738fd1498Szrj 	    gimple_call_set_tail (call_stmt, false);
166838fd1498Szrj 	  if (gimple_call_from_thunk_p (call_stmt))
166938fd1498Szrj 	    gimple_call_set_from_thunk (call_stmt, false);
167038fd1498Szrj 	  if (gimple_call_internal_p (call_stmt))
167138fd1498Szrj 	    switch (gimple_call_internal_fn (call_stmt))
167238fd1498Szrj 	      {
167338fd1498Szrj 	      case IFN_GOMP_SIMD_LANE:
167438fd1498Szrj 	      case IFN_GOMP_SIMD_VF:
167538fd1498Szrj 	      case IFN_GOMP_SIMD_LAST_LANE:
167638fd1498Szrj 	      case IFN_GOMP_SIMD_ORDERED_START:
167738fd1498Szrj 	      case IFN_GOMP_SIMD_ORDERED_END:
167838fd1498Szrj 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
167938fd1498Szrj 	        break;
168038fd1498Szrj 	      default:
168138fd1498Szrj 		break;
168238fd1498Szrj 	      }
168338fd1498Szrj 	}
168438fd1498Szrj 
168538fd1498Szrj       /* Remap the region numbers for __builtin_eh_{pointer,filter},
168638fd1498Szrj 	 RESX and EH_DISPATCH.  */
168738fd1498Szrj       if (id->eh_map)
168838fd1498Szrj 	switch (gimple_code (copy))
168938fd1498Szrj 	  {
169038fd1498Szrj 	  case GIMPLE_CALL:
169138fd1498Szrj 	    {
169238fd1498Szrj 	      tree r, fndecl = gimple_call_fndecl (copy);
169338fd1498Szrj 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
169438fd1498Szrj 		switch (DECL_FUNCTION_CODE (fndecl))
169538fd1498Szrj 		  {
169638fd1498Szrj 		  case BUILT_IN_EH_COPY_VALUES:
169738fd1498Szrj 		    r = gimple_call_arg (copy, 1);
169838fd1498Szrj 		    r = remap_eh_region_tree_nr (r, id);
169938fd1498Szrj 		    gimple_call_set_arg (copy, 1, r);
170038fd1498Szrj 		    /* FALLTHRU */
170138fd1498Szrj 
170238fd1498Szrj 		  case BUILT_IN_EH_POINTER:
170338fd1498Szrj 		  case BUILT_IN_EH_FILTER:
170438fd1498Szrj 		    r = gimple_call_arg (copy, 0);
170538fd1498Szrj 		    r = remap_eh_region_tree_nr (r, id);
170638fd1498Szrj 		    gimple_call_set_arg (copy, 0, r);
170738fd1498Szrj 		    break;
170838fd1498Szrj 
170938fd1498Szrj 		  default:
171038fd1498Szrj 		    break;
171138fd1498Szrj 		  }
171238fd1498Szrj 
171338fd1498Szrj 	      /* Reset alias info if we didn't apply measures to
171438fd1498Szrj 		 keep it valid over inlining by setting DECL_PT_UID.  */
171538fd1498Szrj 	      if (!id->src_cfun->gimple_df
171638fd1498Szrj 		  || !id->src_cfun->gimple_df->ipa_pta)
171738fd1498Szrj 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
171838fd1498Szrj 	    }
171938fd1498Szrj 	    break;
172038fd1498Szrj 
172138fd1498Szrj 	  case GIMPLE_RESX:
172238fd1498Szrj 	    {
172338fd1498Szrj 	      gresx *resx_stmt = as_a <gresx *> (copy);
172438fd1498Szrj 	      int r = gimple_resx_region (resx_stmt);
172538fd1498Szrj 	      r = remap_eh_region_nr (r, id);
172638fd1498Szrj 	      gimple_resx_set_region (resx_stmt, r);
172738fd1498Szrj 	    }
172838fd1498Szrj 	    break;
172938fd1498Szrj 
173038fd1498Szrj 	  case GIMPLE_EH_DISPATCH:
173138fd1498Szrj 	    {
173238fd1498Szrj 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
173338fd1498Szrj 	      int r = gimple_eh_dispatch_region (eh_dispatch);
173438fd1498Szrj 	      r = remap_eh_region_nr (r, id);
173538fd1498Szrj 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
173638fd1498Szrj 	    }
173738fd1498Szrj 	    break;
173838fd1498Szrj 
173938fd1498Szrj 	  default:
174038fd1498Szrj 	    break;
174138fd1498Szrj 	  }
174238fd1498Szrj     }
174338fd1498Szrj 
174438fd1498Szrj   /* If STMT has a block defined, map it to the newly constructed
174538fd1498Szrj      block.  */
174638fd1498Szrj   if (gimple_block (copy))
174738fd1498Szrj     {
174838fd1498Szrj       tree *n;
174938fd1498Szrj       n = id->decl_map->get (gimple_block (copy));
175038fd1498Szrj       gcc_assert (n);
175138fd1498Szrj       gimple_set_block (copy, *n);
175238fd1498Szrj     }
175338fd1498Szrj 
175438fd1498Szrj   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
175538fd1498Szrj       || gimple_debug_nonbind_marker_p (copy))
175638fd1498Szrj     {
175738fd1498Szrj       gimple_seq_add_stmt (&stmts, copy);
175838fd1498Szrj       return stmts;
175938fd1498Szrj     }
176038fd1498Szrj 
176138fd1498Szrj   /* Remap all the operands in COPY.  */
176238fd1498Szrj   memset (&wi, 0, sizeof (wi));
176338fd1498Szrj   wi.info = id;
176438fd1498Szrj   if (skip_first)
176538fd1498Szrj     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
176638fd1498Szrj   else
176738fd1498Szrj     walk_gimple_op (copy, remap_gimple_op_r, &wi);
176838fd1498Szrj 
176938fd1498Szrj   /* Clear the copied virtual operands.  We are not remapping them here
177038fd1498Szrj      but are going to recreate them from scratch.  */
177138fd1498Szrj   if (gimple_has_mem_ops (copy))
177238fd1498Szrj     {
177338fd1498Szrj       gimple_set_vdef (copy, NULL_TREE);
177438fd1498Szrj       gimple_set_vuse (copy, NULL_TREE);
177538fd1498Szrj     }
177638fd1498Szrj 
177738fd1498Szrj   gimple_seq_add_stmt (&stmts, copy);
177838fd1498Szrj   return stmts;
177938fd1498Szrj }
178038fd1498Szrj 
178138fd1498Szrj 
178238fd1498Szrj /* Copy basic block, scale profile accordingly.  Edges will be taken care of
178338fd1498Szrj    later  */
178438fd1498Szrj 
178538fd1498Szrj static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)178638fd1498Szrj copy_bb (copy_body_data *id, basic_block bb,
178738fd1498Szrj          profile_count num, profile_count den)
178838fd1498Szrj {
178938fd1498Szrj   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
179038fd1498Szrj   basic_block copy_basic_block;
179138fd1498Szrj   tree decl;
179238fd1498Szrj   basic_block prev;
179338fd1498Szrj 
179438fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
179538fd1498Szrj 
179638fd1498Szrj   /* Search for previous copied basic block.  */
179738fd1498Szrj   prev = bb->prev_bb;
179838fd1498Szrj   while (!prev->aux)
179938fd1498Szrj     prev = prev->prev_bb;
180038fd1498Szrj 
180138fd1498Szrj   /* create_basic_block() will append every new block to
180238fd1498Szrj      basic_block_info automatically.  */
180338fd1498Szrj   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
180438fd1498Szrj   copy_basic_block->count = bb->count.apply_scale (num, den);
180538fd1498Szrj 
180638fd1498Szrj   copy_gsi = gsi_start_bb (copy_basic_block);
180738fd1498Szrj 
180838fd1498Szrj   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
180938fd1498Szrj     {
181038fd1498Szrj       gimple_seq stmts;
181138fd1498Szrj       gimple *stmt = gsi_stmt (gsi);
181238fd1498Szrj       gimple *orig_stmt = stmt;
181338fd1498Szrj       gimple_stmt_iterator stmts_gsi;
181438fd1498Szrj       bool stmt_added = false;
181538fd1498Szrj 
181638fd1498Szrj       id->regimplify = false;
181738fd1498Szrj       stmts = remap_gimple_stmt (stmt, id);
181838fd1498Szrj 
181938fd1498Szrj       if (gimple_seq_empty_p (stmts))
182038fd1498Szrj 	continue;
182138fd1498Szrj 
182238fd1498Szrj       seq_gsi = copy_gsi;
182338fd1498Szrj 
182438fd1498Szrj       for (stmts_gsi = gsi_start (stmts);
182538fd1498Szrj 	   !gsi_end_p (stmts_gsi); )
182638fd1498Szrj 	{
182738fd1498Szrj 	  stmt = gsi_stmt (stmts_gsi);
182838fd1498Szrj 
182938fd1498Szrj 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
183038fd1498Szrj 	  gsi_next (&stmts_gsi);
183138fd1498Szrj 
183238fd1498Szrj 	  if (gimple_nop_p (stmt))
183338fd1498Szrj 	      continue;
183438fd1498Szrj 
183538fd1498Szrj 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
183638fd1498Szrj 					    orig_stmt);
183738fd1498Szrj 
183838fd1498Szrj 	  /* With return slot optimization we can end up with
183938fd1498Szrj 	     non-gimple (foo *)&this->m, fix that here.  */
184038fd1498Szrj 	  if (is_gimple_assign (stmt)
184138fd1498Szrj 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
184238fd1498Szrj 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
184338fd1498Szrj 	    {
184438fd1498Szrj 	      tree new_rhs;
184538fd1498Szrj 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
184638fd1498Szrj 						  gimple_assign_rhs1 (stmt),
184738fd1498Szrj 						  true, NULL, false,
184838fd1498Szrj 						  GSI_CONTINUE_LINKING);
184938fd1498Szrj 	      gimple_assign_set_rhs1 (stmt, new_rhs);
185038fd1498Szrj 	      id->regimplify = false;
185138fd1498Szrj 	    }
185238fd1498Szrj 
185338fd1498Szrj 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
185438fd1498Szrj 
185538fd1498Szrj 	  if (id->regimplify)
185638fd1498Szrj 	    gimple_regimplify_operands (stmt, &seq_gsi);
185738fd1498Szrj 
185838fd1498Szrj 	  stmt_added = true;
185938fd1498Szrj 	}
186038fd1498Szrj 
186138fd1498Szrj       if (!stmt_added)
186238fd1498Szrj 	continue;
186338fd1498Szrj 
186438fd1498Szrj       /* If copy_basic_block has been empty at the start of this iteration,
186538fd1498Szrj 	 call gsi_start_bb again to get at the newly added statements.  */
186638fd1498Szrj       if (gsi_end_p (copy_gsi))
186738fd1498Szrj 	copy_gsi = gsi_start_bb (copy_basic_block);
186838fd1498Szrj       else
186938fd1498Szrj 	gsi_next (&copy_gsi);
187038fd1498Szrj 
187138fd1498Szrj       /* Process the new statement.  The call to gimple_regimplify_operands
187238fd1498Szrj 	 possibly turned the statement into multiple statements, we
187338fd1498Szrj 	 need to process all of them.  */
187438fd1498Szrj       do
187538fd1498Szrj 	{
187638fd1498Szrj 	  tree fn;
187738fd1498Szrj 	  gcall *call_stmt;
187838fd1498Szrj 
187938fd1498Szrj 	  stmt = gsi_stmt (copy_gsi);
188038fd1498Szrj 	  call_stmt = dyn_cast <gcall *> (stmt);
188138fd1498Szrj 	  if (call_stmt
188238fd1498Szrj 	      && gimple_call_va_arg_pack_p (call_stmt)
188338fd1498Szrj 	      && id->call_stmt
188438fd1498Szrj 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
188538fd1498Szrj 	    {
188638fd1498Szrj 	      /* __builtin_va_arg_pack () should be replaced by
188738fd1498Szrj 		 all arguments corresponding to ... in the caller.  */
188838fd1498Szrj 	      tree p;
188938fd1498Szrj 	      gcall *new_call;
189038fd1498Szrj 	      vec<tree> argarray;
189138fd1498Szrj 	      size_t nargs = gimple_call_num_args (id->call_stmt);
189238fd1498Szrj 	      size_t n, i, nargs_to_copy;
189338fd1498Szrj 	      bool remove_bounds = false;
189438fd1498Szrj 
189538fd1498Szrj 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
189638fd1498Szrj 		nargs--;
189738fd1498Szrj 
189838fd1498Szrj 	      /* Bounds should be removed from arg pack in case
189938fd1498Szrj 		 we handle not instrumented call in instrumented
190038fd1498Szrj 		 function.  */
190138fd1498Szrj 	      nargs_to_copy = nargs;
190238fd1498Szrj 	      if (gimple_call_with_bounds_p (id->call_stmt)
190338fd1498Szrj 		  && !gimple_call_with_bounds_p (stmt))
190438fd1498Szrj 		{
190538fd1498Szrj 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
190638fd1498Szrj 		       i < gimple_call_num_args (id->call_stmt);
190738fd1498Szrj 		       i++)
190838fd1498Szrj 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
190938fd1498Szrj 		      nargs_to_copy--;
191038fd1498Szrj 		  remove_bounds = true;
191138fd1498Szrj 		}
191238fd1498Szrj 
191338fd1498Szrj 	      /* Create the new array of arguments.  */
191438fd1498Szrj 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
191538fd1498Szrj 	      argarray.create (n);
191638fd1498Szrj 	      argarray.safe_grow_cleared (n);
191738fd1498Szrj 
191838fd1498Szrj 	      /* Copy all the arguments before '...'  */
191938fd1498Szrj 	      memcpy (argarray.address (),
192038fd1498Szrj 		      gimple_call_arg_ptr (call_stmt, 0),
192138fd1498Szrj 		      gimple_call_num_args (call_stmt) * sizeof (tree));
192238fd1498Szrj 
192338fd1498Szrj 	      if (remove_bounds)
192438fd1498Szrj 		{
192538fd1498Szrj 		  /* Append the rest of arguments removing bounds.  */
192638fd1498Szrj 		  unsigned cur = gimple_call_num_args (call_stmt);
192738fd1498Szrj 		  i = gimple_call_num_args (id->call_stmt) - nargs;
192838fd1498Szrj 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
192938fd1498Szrj 		       i < gimple_call_num_args (id->call_stmt);
193038fd1498Szrj 		       i++)
193138fd1498Szrj 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
193238fd1498Szrj 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
193338fd1498Szrj 		  gcc_assert (cur == n);
193438fd1498Szrj 		}
193538fd1498Szrj 	      else
193638fd1498Szrj 		{
193738fd1498Szrj 		  /* Append the arguments passed in '...'  */
193838fd1498Szrj 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
193938fd1498Szrj 			  gimple_call_arg_ptr (id->call_stmt, 0)
194038fd1498Szrj 			  + (gimple_call_num_args (id->call_stmt) - nargs),
194138fd1498Szrj 			  nargs * sizeof (tree));
194238fd1498Szrj 		}
194338fd1498Szrj 
194438fd1498Szrj 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
194538fd1498Szrj 						argarray);
194638fd1498Szrj 
194738fd1498Szrj 	      argarray.release ();
194838fd1498Szrj 
194938fd1498Szrj 	      /* Copy all GIMPLE_CALL flags, location and block, except
195038fd1498Szrj 		 GF_CALL_VA_ARG_PACK.  */
195138fd1498Szrj 	      gimple_call_copy_flags (new_call, call_stmt);
195238fd1498Szrj 	      gimple_call_set_va_arg_pack (new_call, false);
195338fd1498Szrj 	      gimple_set_location (new_call, gimple_location (stmt));
195438fd1498Szrj 	      gimple_set_block (new_call, gimple_block (stmt));
195538fd1498Szrj 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
195638fd1498Szrj 
195738fd1498Szrj 	      gsi_replace (&copy_gsi, new_call, false);
195838fd1498Szrj 	      stmt = new_call;
195938fd1498Szrj 	    }
196038fd1498Szrj 	  else if (call_stmt
196138fd1498Szrj 		   && id->call_stmt
196238fd1498Szrj 		   && (decl = gimple_call_fndecl (stmt))
196338fd1498Szrj 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
196458e805e6Szrj 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
196538fd1498Szrj 	    {
196638fd1498Szrj 	      /* __builtin_va_arg_pack_len () should be replaced by
196738fd1498Szrj 		 the number of anonymous arguments.  */
196838fd1498Szrj 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
196938fd1498Szrj 	      tree count, p;
197038fd1498Szrj 	      gimple *new_stmt;
197138fd1498Szrj 
197238fd1498Szrj 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
197338fd1498Szrj 		nargs--;
197438fd1498Szrj 
197538fd1498Szrj 	      /* For instrumented calls we should ignore bounds.  */
197638fd1498Szrj 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
197738fd1498Szrj 		   i < gimple_call_num_args (id->call_stmt);
197838fd1498Szrj 		   i++)
197938fd1498Szrj 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
198038fd1498Szrj 		  nargs--;
198138fd1498Szrj 
198258e805e6Szrj 	      if (!gimple_call_lhs (stmt))
198358e805e6Szrj 		{
198458e805e6Szrj 		  /* Drop unused calls.  */
198558e805e6Szrj 		  gsi_remove (&copy_gsi, false);
198658e805e6Szrj 		  continue;
198758e805e6Szrj 		}
198858e805e6Szrj 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
198958e805e6Szrj 		{
199038fd1498Szrj 		  count = build_int_cst (integer_type_node, nargs);
199138fd1498Szrj 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
199238fd1498Szrj 		  gsi_replace (&copy_gsi, new_stmt, false);
199338fd1498Szrj 		  stmt = new_stmt;
199438fd1498Szrj 		}
199558e805e6Szrj 	      else if (nargs != 0)
199658e805e6Szrj 		{
199758e805e6Szrj 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
199858e805e6Szrj 		  count = build_int_cst (integer_type_node, nargs);
199958e805e6Szrj 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
200058e805e6Szrj 						  PLUS_EXPR, newlhs, count);
200158e805e6Szrj 		  gimple_call_set_lhs (stmt, newlhs);
200258e805e6Szrj 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
200358e805e6Szrj 		}
200458e805e6Szrj 	    }
200538fd1498Szrj 	  else if (call_stmt
200638fd1498Szrj 		   && id->call_stmt
200738fd1498Szrj 		   && gimple_call_internal_p (stmt)
200838fd1498Szrj 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
200938fd1498Szrj 	    {
201038fd1498Szrj 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
201138fd1498Szrj 	      gsi_remove (&copy_gsi, false);
201238fd1498Szrj 	      continue;
201338fd1498Szrj 	    }
201438fd1498Szrj 
201538fd1498Szrj 	  /* Statements produced by inlining can be unfolded, especially
201638fd1498Szrj 	     when we constant propagated some operands.  We can't fold
201738fd1498Szrj 	     them right now for two reasons:
201838fd1498Szrj 	     1) folding require SSA_NAME_DEF_STMTs to be correct
201938fd1498Szrj 	     2) we can't change function calls to builtins.
202038fd1498Szrj 	     So we just mark statement for later folding.  We mark
202138fd1498Szrj 	     all new statements, instead just statements that has changed
202238fd1498Szrj 	     by some nontrivial substitution so even statements made
202338fd1498Szrj 	     foldable indirectly are updated.  If this turns out to be
202438fd1498Szrj 	     expensive, copy_body can be told to watch for nontrivial
202538fd1498Szrj 	     changes.  */
202638fd1498Szrj 	  if (id->statements_to_fold)
202738fd1498Szrj 	    id->statements_to_fold->add (stmt);
202838fd1498Szrj 
202938fd1498Szrj 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
203038fd1498Szrj 	     callgraph edges and update or duplicate them.  */
203138fd1498Szrj 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
203238fd1498Szrj 	    {
203338fd1498Szrj 	      struct cgraph_edge *edge;
203438fd1498Szrj 
203538fd1498Szrj 	      switch (id->transform_call_graph_edges)
203638fd1498Szrj 		{
203738fd1498Szrj 		case CB_CGE_DUPLICATE:
203838fd1498Szrj 		  edge = id->src_node->get_edge (orig_stmt);
203938fd1498Szrj 		  if (edge)
204038fd1498Szrj 		    {
204138fd1498Szrj 		      struct cgraph_edge *old_edge = edge;
204238fd1498Szrj 		      profile_count old_cnt = edge->count;
204338fd1498Szrj 		      edge = edge->clone (id->dst_node, call_stmt,
204438fd1498Szrj 					  gimple_uid (stmt),
204538fd1498Szrj 					  num, den,
204638fd1498Szrj 					  true);
204738fd1498Szrj 
204838fd1498Szrj 		      /* Speculative calls consist of two edges - direct and
204938fd1498Szrj 			 indirect.  Duplicate the whole thing and distribute
205038fd1498Szrj 			 frequencies accordingly.  */
205138fd1498Szrj 		      if (edge->speculative)
205238fd1498Szrj 			{
205338fd1498Szrj 			  struct cgraph_edge *direct, *indirect;
205438fd1498Szrj 			  struct ipa_ref *ref;
205538fd1498Szrj 
205638fd1498Szrj 			  gcc_assert (!edge->indirect_unknown_callee);
205738fd1498Szrj 			  old_edge->speculative_call_info (direct, indirect, ref);
205838fd1498Szrj 
205938fd1498Szrj 			  profile_count indir_cnt = indirect->count;
206038fd1498Szrj 			  indirect = indirect->clone (id->dst_node, call_stmt,
206138fd1498Szrj 						      gimple_uid (stmt),
206238fd1498Szrj 						      num, den,
206338fd1498Szrj 						      true);
206438fd1498Szrj 
206538fd1498Szrj 			  profile_probability prob
206638fd1498Szrj 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
206738fd1498Szrj 			  indirect->count
206838fd1498Szrj 			     = copy_basic_block->count.apply_probability (prob);
206938fd1498Szrj 			  edge->count = copy_basic_block->count - indirect->count;
207038fd1498Szrj 			  id->dst_node->clone_reference (ref, stmt);
207138fd1498Szrj 			}
207238fd1498Szrj 		      else
207338fd1498Szrj 			edge->count = copy_basic_block->count;
207438fd1498Szrj 		    }
207538fd1498Szrj 		  break;
207638fd1498Szrj 
207738fd1498Szrj 		case CB_CGE_MOVE_CLONES:
207838fd1498Szrj 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
207938fd1498Szrj 								call_stmt);
208038fd1498Szrj 		  edge = id->dst_node->get_edge (stmt);
208138fd1498Szrj 		  break;
208238fd1498Szrj 
208338fd1498Szrj 		case CB_CGE_MOVE:
208438fd1498Szrj 		  edge = id->dst_node->get_edge (orig_stmt);
208538fd1498Szrj 		  if (edge)
208638fd1498Szrj 		    edge->set_call_stmt (call_stmt);
208738fd1498Szrj 		  break;
208838fd1498Szrj 
208938fd1498Szrj 		default:
209038fd1498Szrj 		  gcc_unreachable ();
209138fd1498Szrj 		}
209238fd1498Szrj 
209338fd1498Szrj 	      /* Constant propagation on argument done during inlining
209438fd1498Szrj 		 may create new direct call.  Produce an edge for it.  */
209538fd1498Szrj 	      if ((!edge
209638fd1498Szrj 		   || (edge->indirect_inlining_edge
209738fd1498Szrj 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
209838fd1498Szrj 		  && id->dst_node->definition
209938fd1498Szrj 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
210038fd1498Szrj 		{
210138fd1498Szrj 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
210238fd1498Szrj 
210338fd1498Szrj 		  /* We have missing edge in the callgraph.  This can happen
210438fd1498Szrj 		     when previous inlining turned an indirect call into a
210538fd1498Szrj 		     direct call by constant propagating arguments or we are
210638fd1498Szrj 		     producing dead clone (for further cloning).  In all
210738fd1498Szrj 		     other cases we hit a bug (incorrect node sharing is the
210838fd1498Szrj 		     most common reason for missing edges).  */
210938fd1498Szrj 		  gcc_assert (!dest->definition
211038fd1498Szrj 			      || dest->address_taken
211138fd1498Szrj 		  	      || !id->src_node->definition
211238fd1498Szrj 			      || !id->dst_node->definition);
211338fd1498Szrj 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
211438fd1498Szrj 		    id->dst_node->create_edge_including_clones
211538fd1498Szrj 		      (dest, orig_stmt, call_stmt, bb->count,
211638fd1498Szrj 		       CIF_ORIGINALLY_INDIRECT_CALL);
211738fd1498Szrj 		  else
211838fd1498Szrj 		    id->dst_node->create_edge (dest, call_stmt,
211938fd1498Szrj 					bb->count)->inline_failed
212038fd1498Szrj 		      = CIF_ORIGINALLY_INDIRECT_CALL;
212138fd1498Szrj 		  if (dump_file)
212238fd1498Szrj 		    {
212338fd1498Szrj 		      fprintf (dump_file, "Created new direct edge to %s\n",
212438fd1498Szrj 			       dest->name ());
212538fd1498Szrj 		    }
212638fd1498Szrj 		}
212738fd1498Szrj 
212838fd1498Szrj 	      notice_special_calls (as_a <gcall *> (stmt));
212938fd1498Szrj 	    }
213038fd1498Szrj 
213138fd1498Szrj 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
213238fd1498Szrj 				      id->eh_map, id->eh_lp_nr);
213338fd1498Szrj 
213438fd1498Szrj 	  gsi_next (&copy_gsi);
213538fd1498Szrj 	}
213638fd1498Szrj       while (!gsi_end_p (copy_gsi));
213738fd1498Szrj 
213838fd1498Szrj       copy_gsi = gsi_last_bb (copy_basic_block);
213938fd1498Szrj     }
214038fd1498Szrj 
214138fd1498Szrj   return copy_basic_block;
214238fd1498Szrj }
214338fd1498Szrj 
214438fd1498Szrj /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
214538fd1498Szrj    form is quite easy, since dominator relationship for old basic blocks does
214638fd1498Szrj    not change.
214738fd1498Szrj 
214838fd1498Szrj    There is however exception where inlining might change dominator relation
214938fd1498Szrj    across EH edges from basic block within inlined functions destinating
215038fd1498Szrj    to landing pads in function we inline into.
215138fd1498Szrj 
215238fd1498Szrj    The function fills in PHI_RESULTs of such PHI nodes if they refer
215338fd1498Szrj    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
215438fd1498Szrj    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
215538fd1498Szrj    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
215638fd1498Szrj    set, and this means that there will be no overlapping live ranges
215738fd1498Szrj    for the underlying symbol.
215838fd1498Szrj 
215938fd1498Szrj    This might change in future if we allow redirecting of EH edges and
216038fd1498Szrj    we might want to change way build CFG pre-inlining to include
216138fd1498Szrj    all the possible edges then.  */
216238fd1498Szrj static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)216338fd1498Szrj update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
216438fd1498Szrj 				  bool can_throw, bool nonlocal_goto)
216538fd1498Szrj {
216638fd1498Szrj   edge e;
216738fd1498Szrj   edge_iterator ei;
216838fd1498Szrj 
216938fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->succs)
217038fd1498Szrj     if (!e->dest->aux
217138fd1498Szrj 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
217238fd1498Szrj       {
217338fd1498Szrj 	gphi *phi;
217438fd1498Szrj 	gphi_iterator si;
217538fd1498Szrj 
217638fd1498Szrj 	if (!nonlocal_goto)
217738fd1498Szrj 	  gcc_assert (e->flags & EDGE_EH);
217838fd1498Szrj 
217938fd1498Szrj 	if (!can_throw)
218038fd1498Szrj 	  gcc_assert (!(e->flags & EDGE_EH));
218138fd1498Szrj 
218238fd1498Szrj 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
218338fd1498Szrj 	  {
218438fd1498Szrj 	    edge re;
218538fd1498Szrj 
218638fd1498Szrj 	    phi = si.phi ();
218738fd1498Szrj 
218838fd1498Szrj 	    /* For abnormal goto/call edges the receiver can be the
218938fd1498Szrj 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
219038fd1498Szrj 
219138fd1498Szrj 	    gcc_assert ((e->flags & EDGE_EH)
219238fd1498Szrj 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
219338fd1498Szrj 
219438fd1498Szrj 	    re = find_edge (ret_bb, e->dest);
219538fd1498Szrj 	    gcc_checking_assert (re);
219638fd1498Szrj 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
219738fd1498Szrj 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
219838fd1498Szrj 
219938fd1498Szrj 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
220038fd1498Szrj 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
220138fd1498Szrj 	  }
220238fd1498Szrj       }
220338fd1498Szrj }
220438fd1498Szrj 
220538fd1498Szrj 
220638fd1498Szrj /* Copy edges from BB into its copy constructed earlier, scale profile
220738fd1498Szrj    accordingly.  Edges will be taken care of later.  Assume aux
220838fd1498Szrj    pointers to point to the copies of each BB.  Return true if any
220938fd1498Szrj    debug stmts are left after a statement that must end the basic block.  */
221038fd1498Szrj 
221138fd1498Szrj static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest)221238fd1498Szrj copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
221338fd1498Szrj 		   basic_block ret_bb, basic_block abnormal_goto_dest)
221438fd1498Szrj {
221538fd1498Szrj   basic_block new_bb = (basic_block) bb->aux;
221638fd1498Szrj   edge_iterator ei;
221738fd1498Szrj   edge old_edge;
221838fd1498Szrj   gimple_stmt_iterator si;
221938fd1498Szrj   int flags;
222038fd1498Szrj   bool need_debug_cleanup = false;
222138fd1498Szrj 
222238fd1498Szrj   /* Use the indices from the original blocks to create edges for the
222338fd1498Szrj      new ones.  */
222438fd1498Szrj   FOR_EACH_EDGE (old_edge, ei, bb->succs)
222538fd1498Szrj     if (!(old_edge->flags & EDGE_EH))
222638fd1498Szrj       {
222738fd1498Szrj 	edge new_edge;
222838fd1498Szrj 
222938fd1498Szrj 	flags = old_edge->flags;
223038fd1498Szrj 
223138fd1498Szrj 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
223238fd1498Szrj 	if (old_edge->dest->index == EXIT_BLOCK
223338fd1498Szrj 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
223438fd1498Szrj 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
223538fd1498Szrj 	  flags |= EDGE_FALLTHRU;
223638fd1498Szrj 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
223738fd1498Szrj 	new_edge->probability = old_edge->probability;
223838fd1498Szrj       }
223938fd1498Szrj 
224038fd1498Szrj   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
224138fd1498Szrj     return false;
224238fd1498Szrj 
224338fd1498Szrj   /* When doing function splitting, we must decreate count of the return block
224438fd1498Szrj      which was previously reachable by block we did not copy.  */
224538fd1498Szrj   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
224638fd1498Szrj     FOR_EACH_EDGE (old_edge, ei, bb->preds)
224738fd1498Szrj       if (old_edge->src->index != ENTRY_BLOCK
224838fd1498Szrj 	  && !old_edge->src->aux)
224938fd1498Szrj 	new_bb->count -= old_edge->count ().apply_scale (num, den);
225038fd1498Szrj 
225138fd1498Szrj   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
225238fd1498Szrj     {
225338fd1498Szrj       gimple *copy_stmt;
225438fd1498Szrj       bool can_throw, nonlocal_goto;
225538fd1498Szrj 
225638fd1498Szrj       copy_stmt = gsi_stmt (si);
225738fd1498Szrj       if (!is_gimple_debug (copy_stmt))
225838fd1498Szrj 	update_stmt (copy_stmt);
225938fd1498Szrj 
226038fd1498Szrj       /* Do this before the possible split_block.  */
226138fd1498Szrj       gsi_next (&si);
226238fd1498Szrj 
226338fd1498Szrj       /* If this tree could throw an exception, there are two
226438fd1498Szrj          cases where we need to add abnormal edge(s): the
226538fd1498Szrj          tree wasn't in a region and there is a "current
226638fd1498Szrj          region" in the caller; or the original tree had
226738fd1498Szrj          EH edges.  In both cases split the block after the tree,
226838fd1498Szrj          and add abnormal edge(s) as needed; we need both
226938fd1498Szrj          those from the callee and the caller.
227038fd1498Szrj          We check whether the copy can throw, because the const
227138fd1498Szrj          propagation can change an INDIRECT_REF which throws
227238fd1498Szrj          into a COMPONENT_REF which doesn't.  If the copy
227338fd1498Szrj          can throw, the original could also throw.  */
227438fd1498Szrj       can_throw = stmt_can_throw_internal (copy_stmt);
227538fd1498Szrj       nonlocal_goto
227638fd1498Szrj 	= (stmt_can_make_abnormal_goto (copy_stmt)
227738fd1498Szrj 	   && !computed_goto_p (copy_stmt));
227838fd1498Szrj 
227938fd1498Szrj       if (can_throw || nonlocal_goto)
228038fd1498Szrj 	{
228138fd1498Szrj 	  if (!gsi_end_p (si))
228238fd1498Szrj 	    {
228338fd1498Szrj 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
228438fd1498Szrj 		gsi_next (&si);
228538fd1498Szrj 	      if (gsi_end_p (si))
228638fd1498Szrj 		need_debug_cleanup = true;
228738fd1498Szrj 	    }
228838fd1498Szrj 	  if (!gsi_end_p (si))
228938fd1498Szrj 	    /* Note that bb's predecessor edges aren't necessarily
229038fd1498Szrj 	       right at this point; split_block doesn't care.  */
229138fd1498Szrj 	    {
229238fd1498Szrj 	      edge e = split_block (new_bb, copy_stmt);
229338fd1498Szrj 
229438fd1498Szrj 	      new_bb = e->dest;
229538fd1498Szrj 	      new_bb->aux = e->src->aux;
229638fd1498Szrj 	      si = gsi_start_bb (new_bb);
229738fd1498Szrj 	    }
229838fd1498Szrj 	}
229938fd1498Szrj 
230038fd1498Szrj       bool update_probs = false;
230138fd1498Szrj 
230238fd1498Szrj       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
230338fd1498Szrj 	{
230438fd1498Szrj 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
230538fd1498Szrj 	  update_probs = true;
230638fd1498Szrj 	}
230738fd1498Szrj       else if (can_throw)
230838fd1498Szrj 	{
230938fd1498Szrj 	  make_eh_edges (copy_stmt);
231038fd1498Szrj 	  update_probs = true;
231138fd1498Szrj 	}
231238fd1498Szrj 
231338fd1498Szrj       /* EH edges may not match old edges.  Copy as much as possible.  */
231438fd1498Szrj       if (update_probs)
231538fd1498Szrj 	{
231638fd1498Szrj           edge e;
231738fd1498Szrj           edge_iterator ei;
231838fd1498Szrj 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
231938fd1498Szrj 
232038fd1498Szrj           FOR_EACH_EDGE (old_edge, ei, bb->succs)
232138fd1498Szrj             if ((old_edge->flags & EDGE_EH)
232238fd1498Szrj 		&& (e = find_edge (copy_stmt_bb,
232338fd1498Szrj 				   (basic_block) old_edge->dest->aux))
232438fd1498Szrj 		&& (e->flags & EDGE_EH))
232538fd1498Szrj 	      e->probability = old_edge->probability;
232638fd1498Szrj 
232738fd1498Szrj           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
232838fd1498Szrj 	    if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
232938fd1498Szrj 	      e->probability = profile_probability::never ();
233038fd1498Szrj         }
233138fd1498Szrj 
233238fd1498Szrj 
233338fd1498Szrj       /* If the call we inline cannot make abnormal goto do not add
233438fd1498Szrj          additional abnormal edges but only retain those already present
233538fd1498Szrj 	 in the original function body.  */
233638fd1498Szrj       if (abnormal_goto_dest == NULL)
233738fd1498Szrj 	nonlocal_goto = false;
233838fd1498Szrj       if (nonlocal_goto)
233938fd1498Szrj 	{
234038fd1498Szrj 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
234138fd1498Szrj 
234238fd1498Szrj 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
234338fd1498Szrj 	    nonlocal_goto = false;
234438fd1498Szrj 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
234538fd1498Szrj 	     in OpenMP regions which aren't allowed to be left abnormally.
234638fd1498Szrj 	     So, no need to add abnormal edge in that case.  */
234738fd1498Szrj 	  else if (is_gimple_call (copy_stmt)
234838fd1498Szrj 		   && gimple_call_internal_p (copy_stmt)
234938fd1498Szrj 		   && (gimple_call_internal_fn (copy_stmt)
235038fd1498Szrj 		       == IFN_ABNORMAL_DISPATCHER)
235138fd1498Szrj 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
235238fd1498Szrj 	    nonlocal_goto = false;
235338fd1498Szrj 	  else
235438fd1498Szrj 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
235538fd1498Szrj 				   EDGE_ABNORMAL);
235638fd1498Szrj 	}
235738fd1498Szrj 
235838fd1498Szrj       if ((can_throw || nonlocal_goto)
235938fd1498Szrj 	  && gimple_in_ssa_p (cfun))
236038fd1498Szrj 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
236138fd1498Szrj 					  can_throw, nonlocal_goto);
236238fd1498Szrj     }
236338fd1498Szrj   return need_debug_cleanup;
236438fd1498Szrj }
236538fd1498Szrj 
236638fd1498Szrj /* Copy the PHIs.  All blocks and edges are copied, some blocks
236738fd1498Szrj    was possibly split and new outgoing EH edges inserted.
236838fd1498Szrj    BB points to the block of original function and AUX pointers links
236938fd1498Szrj    the original and newly copied blocks.  */
237038fd1498Szrj 
237138fd1498Szrj static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)237238fd1498Szrj copy_phis_for_bb (basic_block bb, copy_body_data *id)
237338fd1498Szrj {
237438fd1498Szrj   basic_block const new_bb = (basic_block) bb->aux;
237538fd1498Szrj   edge_iterator ei;
237638fd1498Szrj   gphi *phi;
237738fd1498Szrj   gphi_iterator si;
237838fd1498Szrj   edge new_edge;
237938fd1498Szrj   bool inserted = false;
238038fd1498Szrj 
238138fd1498Szrj   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
238238fd1498Szrj     {
238338fd1498Szrj       tree res, new_res;
238438fd1498Szrj       gphi *new_phi;
238538fd1498Szrj 
238638fd1498Szrj       phi = si.phi ();
238738fd1498Szrj       res = PHI_RESULT (phi);
238838fd1498Szrj       new_res = res;
238938fd1498Szrj       if (!virtual_operand_p (res))
239038fd1498Szrj 	{
239138fd1498Szrj 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
239238fd1498Szrj 	  if (EDGE_COUNT (new_bb->preds) == 0)
239338fd1498Szrj 	    {
239438fd1498Szrj 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
239538fd1498Szrj 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
239638fd1498Szrj 	    }
239738fd1498Szrj 	  else
239838fd1498Szrj 	    {
239938fd1498Szrj 	      new_phi = create_phi_node (new_res, new_bb);
240038fd1498Szrj 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
240138fd1498Szrj 		{
240238fd1498Szrj 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
240338fd1498Szrj 					     bb);
240438fd1498Szrj 		  tree arg;
240538fd1498Szrj 		  tree new_arg;
240638fd1498Szrj 		  edge_iterator ei2;
240738fd1498Szrj 		  location_t locus;
240838fd1498Szrj 
240938fd1498Szrj 		  /* When doing partial cloning, we allow PHIs on the entry
241038fd1498Szrj 		     block as long as all the arguments are the same.
241138fd1498Szrj 		     Find any input edge to see argument to copy.  */
241238fd1498Szrj 		  if (!old_edge)
241338fd1498Szrj 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
241438fd1498Szrj 		      if (!old_edge->src->aux)
241538fd1498Szrj 			break;
241638fd1498Szrj 
241738fd1498Szrj 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
241838fd1498Szrj 		  new_arg = arg;
241938fd1498Szrj 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
242038fd1498Szrj 		  gcc_assert (new_arg);
242138fd1498Szrj 		  /* With return slot optimization we can end up with
242238fd1498Szrj 		     non-gimple (foo *)&this->m, fix that here.  */
242338fd1498Szrj 		  if (TREE_CODE (new_arg) != SSA_NAME
242438fd1498Szrj 		      && TREE_CODE (new_arg) != FUNCTION_DECL
242538fd1498Szrj 		      && !is_gimple_val (new_arg))
242638fd1498Szrj 		    {
242738fd1498Szrj 		      gimple_seq stmts = NULL;
242838fd1498Szrj 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
242938fd1498Szrj 						      NULL);
243038fd1498Szrj 		      gsi_insert_seq_on_edge (new_edge, stmts);
243138fd1498Szrj 		      inserted = true;
243238fd1498Szrj 		    }
243338fd1498Szrj 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
243438fd1498Szrj 		  if (LOCATION_BLOCK (locus))
243538fd1498Szrj 		    {
243638fd1498Szrj 		      tree *n;
243738fd1498Szrj 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
243838fd1498Szrj 		      gcc_assert (n);
243938fd1498Szrj 		      locus = set_block (locus, *n);
244038fd1498Szrj 		    }
244138fd1498Szrj 		  else
244238fd1498Szrj 		    locus = LOCATION_LOCUS (locus);
244338fd1498Szrj 
244438fd1498Szrj 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
244538fd1498Szrj 		}
244638fd1498Szrj 	    }
244738fd1498Szrj 	}
244838fd1498Szrj     }
244938fd1498Szrj 
245038fd1498Szrj   /* Commit the delayed edge insertions.  */
245138fd1498Szrj   if (inserted)
245238fd1498Szrj     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
245338fd1498Szrj       gsi_commit_one_edge_insert (new_edge, NULL);
245438fd1498Szrj }
245538fd1498Szrj 
245638fd1498Szrj 
245738fd1498Szrj /* Wrapper for remap_decl so it can be used as a callback.  */
245838fd1498Szrj 
245938fd1498Szrj static tree
remap_decl_1(tree decl,void * data)246038fd1498Szrj remap_decl_1 (tree decl, void *data)
246138fd1498Szrj {
246238fd1498Szrj   return remap_decl (decl, (copy_body_data *) data);
246338fd1498Szrj }
246438fd1498Szrj 
246538fd1498Szrj /* Build struct function and associated datastructures for the new clone
246638fd1498Szrj    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
246738fd1498Szrj    the cfun to the function of new_fndecl (and current_function_decl too).  */
246838fd1498Szrj 
246938fd1498Szrj static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)247038fd1498Szrj initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
247138fd1498Szrj {
247238fd1498Szrj   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
247338fd1498Szrj 
247438fd1498Szrj   if (!DECL_ARGUMENTS (new_fndecl))
247538fd1498Szrj     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
247638fd1498Szrj   if (!DECL_RESULT (new_fndecl))
247738fd1498Szrj     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
247838fd1498Szrj 
247938fd1498Szrj   /* Register specific tree functions.  */
248038fd1498Szrj   gimple_register_cfg_hooks ();
248138fd1498Szrj 
248238fd1498Szrj   /* Get clean struct function.  */
248338fd1498Szrj   push_struct_function (new_fndecl);
248438fd1498Szrj 
248538fd1498Szrj   /* We will rebuild these, so just sanity check that they are empty.  */
248638fd1498Szrj   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
248738fd1498Szrj   gcc_assert (cfun->local_decls == NULL);
248838fd1498Szrj   gcc_assert (cfun->cfg == NULL);
248938fd1498Szrj   gcc_assert (cfun->decl == new_fndecl);
249038fd1498Szrj 
249138fd1498Szrj   /* Copy items we preserve during cloning.  */
249238fd1498Szrj   cfun->static_chain_decl = src_cfun->static_chain_decl;
249338fd1498Szrj   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
249438fd1498Szrj   cfun->function_end_locus = src_cfun->function_end_locus;
249538fd1498Szrj   cfun->curr_properties = src_cfun->curr_properties;
249638fd1498Szrj   cfun->last_verified = src_cfun->last_verified;
249738fd1498Szrj   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
249838fd1498Szrj   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
249938fd1498Szrj   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
250038fd1498Szrj   cfun->stdarg = src_cfun->stdarg;
250138fd1498Szrj   cfun->after_inlining = src_cfun->after_inlining;
250238fd1498Szrj   cfun->can_throw_non_call_exceptions
250338fd1498Szrj     = src_cfun->can_throw_non_call_exceptions;
250438fd1498Szrj   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
250538fd1498Szrj   cfun->returns_struct = src_cfun->returns_struct;
250638fd1498Szrj   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
250738fd1498Szrj 
250838fd1498Szrj   init_empty_tree_cfg ();
250938fd1498Szrj 
251038fd1498Szrj   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
251138fd1498Szrj 
251238fd1498Szrj   profile_count num = count;
251338fd1498Szrj   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
251438fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
251538fd1498Szrj 
251638fd1498Szrj   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
251738fd1498Szrj     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
251838fd1498Szrj 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
251938fd1498Szrj   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
252038fd1498Szrj     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
252138fd1498Szrj 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
252238fd1498Szrj   if (src_cfun->eh)
252338fd1498Szrj     init_eh_for_function ();
252438fd1498Szrj 
252538fd1498Szrj   if (src_cfun->gimple_df)
252638fd1498Szrj     {
252738fd1498Szrj       init_tree_ssa (cfun);
252838fd1498Szrj       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
252938fd1498Szrj       if (cfun->gimple_df->in_ssa_p)
253038fd1498Szrj 	init_ssa_operands (cfun);
253138fd1498Szrj     }
253238fd1498Szrj }
253338fd1498Szrj 
253438fd1498Szrj /* Helper function for copy_cfg_body.  Move debug stmts from the end
253538fd1498Szrj    of NEW_BB to the beginning of successor basic blocks when needed.  If the
253638fd1498Szrj    successor has multiple predecessors, reset them, otherwise keep
253738fd1498Szrj    their value.  */
253838fd1498Szrj 
253938fd1498Szrj static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)254038fd1498Szrj maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
254138fd1498Szrj {
254238fd1498Szrj   edge e;
254338fd1498Szrj   edge_iterator ei;
254438fd1498Szrj   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
254538fd1498Szrj 
254638fd1498Szrj   if (gsi_end_p (si)
254738fd1498Szrj       || gsi_one_before_end_p (si)
254838fd1498Szrj       || !(stmt_can_throw_internal (gsi_stmt (si))
254938fd1498Szrj 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
255038fd1498Szrj     return;
255138fd1498Szrj 
255238fd1498Szrj   FOR_EACH_EDGE (e, ei, new_bb->succs)
255338fd1498Szrj     {
255438fd1498Szrj       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
255538fd1498Szrj       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
255638fd1498Szrj       while (is_gimple_debug (gsi_stmt (ssi)))
255738fd1498Szrj 	{
255838fd1498Szrj 	  gimple *stmt = gsi_stmt (ssi);
255938fd1498Szrj 	  gdebug *new_stmt;
256038fd1498Szrj 	  tree var;
256138fd1498Szrj 	  tree value;
256238fd1498Szrj 
256338fd1498Szrj 	  /* For the last edge move the debug stmts instead of copying
256438fd1498Szrj 	     them.  */
256538fd1498Szrj 	  if (ei_one_before_end_p (ei))
256638fd1498Szrj 	    {
256738fd1498Szrj 	      si = ssi;
256838fd1498Szrj 	      gsi_prev (&ssi);
256938fd1498Szrj 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
257038fd1498Szrj 		gimple_debug_bind_reset_value (stmt);
257138fd1498Szrj 	      gsi_remove (&si, false);
257238fd1498Szrj 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
257338fd1498Szrj 	      continue;
257438fd1498Szrj 	    }
257538fd1498Szrj 
257638fd1498Szrj 	  if (gimple_debug_bind_p (stmt))
257738fd1498Szrj 	    {
257838fd1498Szrj 	      var = gimple_debug_bind_get_var (stmt);
257938fd1498Szrj 	      if (single_pred_p (e->dest))
258038fd1498Szrj 		{
258138fd1498Szrj 		  value = gimple_debug_bind_get_value (stmt);
258238fd1498Szrj 		  value = unshare_expr (value);
258338fd1498Szrj 		}
258438fd1498Szrj 	      else
258538fd1498Szrj 		value = NULL_TREE;
258638fd1498Szrj 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
258738fd1498Szrj 	    }
258838fd1498Szrj 	  else if (gimple_debug_source_bind_p (stmt))
258938fd1498Szrj 	    {
259038fd1498Szrj 	      var = gimple_debug_source_bind_get_var (stmt);
259138fd1498Szrj 	      value = gimple_debug_source_bind_get_value (stmt);
259238fd1498Szrj 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
259338fd1498Szrj 	    }
259438fd1498Szrj 	  else if (gimple_debug_nonbind_marker_p (stmt))
259538fd1498Szrj 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
259638fd1498Szrj 	  else
259738fd1498Szrj 	    gcc_unreachable ();
259838fd1498Szrj 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
259938fd1498Szrj 	  id->debug_stmts.safe_push (new_stmt);
260038fd1498Szrj 	  gsi_prev (&ssi);
260138fd1498Szrj 	}
260238fd1498Szrj     }
260338fd1498Szrj }
260438fd1498Szrj 
260538fd1498Szrj /* Make a copy of the sub-loops of SRC_PARENT and place them
260638fd1498Szrj    as siblings of DEST_PARENT.  */
260738fd1498Szrj 
260838fd1498Szrj static void
copy_loops(copy_body_data * id,struct loop * dest_parent,struct loop * src_parent)260938fd1498Szrj copy_loops (copy_body_data *id,
261038fd1498Szrj 	    struct loop *dest_parent, struct loop *src_parent)
261138fd1498Szrj {
261238fd1498Szrj   struct loop *src_loop = src_parent->inner;
261338fd1498Szrj   while (src_loop)
261438fd1498Szrj     {
261538fd1498Szrj       if (!id->blocks_to_copy
261638fd1498Szrj 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
261738fd1498Szrj 	{
261838fd1498Szrj 	  struct loop *dest_loop = alloc_loop ();
261938fd1498Szrj 
262038fd1498Szrj 	  /* Assign the new loop its header and latch and associate
262138fd1498Szrj 	     those with the new loop.  */
262238fd1498Szrj 	  dest_loop->header = (basic_block)src_loop->header->aux;
262338fd1498Szrj 	  dest_loop->header->loop_father = dest_loop;
262438fd1498Szrj 	  if (src_loop->latch != NULL)
262538fd1498Szrj 	    {
262638fd1498Szrj 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
262738fd1498Szrj 	      dest_loop->latch->loop_father = dest_loop;
262838fd1498Szrj 	    }
262938fd1498Szrj 
263038fd1498Szrj 	  /* Copy loop meta-data.  */
263138fd1498Szrj 	  copy_loop_info (src_loop, dest_loop);
263238fd1498Szrj 
263338fd1498Szrj 	  /* Finally place it into the loop array and the loop tree.  */
263438fd1498Szrj 	  place_new_loop (cfun, dest_loop);
263538fd1498Szrj 	  flow_loop_tree_node_add (dest_parent, dest_loop);
263638fd1498Szrj 
263738fd1498Szrj 	  dest_loop->safelen = src_loop->safelen;
263838fd1498Szrj 	  if (src_loop->unroll)
263938fd1498Szrj 	    {
264038fd1498Szrj 	      dest_loop->unroll = src_loop->unroll;
264138fd1498Szrj 	      cfun->has_unroll = true;
264238fd1498Szrj 	    }
264338fd1498Szrj 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
264438fd1498Szrj 	  if (src_loop->force_vectorize)
264538fd1498Szrj 	    {
264638fd1498Szrj 	      dest_loop->force_vectorize = true;
264738fd1498Szrj 	      cfun->has_force_vectorize_loops = true;
264838fd1498Szrj 	    }
264938fd1498Szrj 	  if (src_loop->simduid)
265038fd1498Szrj 	    {
265138fd1498Szrj 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
265238fd1498Szrj 	      cfun->has_simduid_loops = true;
265338fd1498Szrj 	    }
265438fd1498Szrj 
265538fd1498Szrj 	  /* Recurse.  */
265638fd1498Szrj 	  copy_loops (id, dest_loop, src_loop);
265738fd1498Szrj 	}
265838fd1498Szrj       src_loop = src_loop->next;
265938fd1498Szrj     }
266038fd1498Szrj }
266138fd1498Szrj 
266238fd1498Szrj /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
266338fd1498Szrj 
266438fd1498Szrj void
redirect_all_calls(copy_body_data * id,basic_block bb)266538fd1498Szrj redirect_all_calls (copy_body_data * id, basic_block bb)
266638fd1498Szrj {
266738fd1498Szrj   gimple_stmt_iterator si;
266838fd1498Szrj   gimple *last = last_stmt (bb);
266938fd1498Szrj   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
267038fd1498Szrj     {
267138fd1498Szrj       gimple *stmt = gsi_stmt (si);
267238fd1498Szrj       if (is_gimple_call (stmt))
267338fd1498Szrj 	{
267438fd1498Szrj 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
267538fd1498Szrj 	  if (edge)
267638fd1498Szrj 	    {
267738fd1498Szrj 	      edge->redirect_call_stmt_to_callee ();
267838fd1498Szrj 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
267938fd1498Szrj 		gimple_purge_dead_eh_edges (bb);
268038fd1498Szrj 	    }
268138fd1498Szrj 	}
268238fd1498Szrj     }
268338fd1498Szrj }
268438fd1498Szrj 
268538fd1498Szrj /* Make a copy of the body of FN so that it can be inserted inline in
268638fd1498Szrj    another function.  Walks FN via CFG, returns new fndecl.  */
268738fd1498Szrj 
268838fd1498Szrj static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)268938fd1498Szrj copy_cfg_body (copy_body_data * id,
269038fd1498Szrj 	       basic_block entry_block_map, basic_block exit_block_map,
269138fd1498Szrj 	       basic_block new_entry)
269238fd1498Szrj {
269338fd1498Szrj   tree callee_fndecl = id->src_fn;
269438fd1498Szrj   /* Original cfun for the callee, doesn't change.  */
269538fd1498Szrj   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
269638fd1498Szrj   struct function *cfun_to_copy;
269738fd1498Szrj   basic_block bb;
269838fd1498Szrj   tree new_fndecl = NULL;
269938fd1498Szrj   bool need_debug_cleanup = false;
270038fd1498Szrj   int last;
270138fd1498Szrj   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
270238fd1498Szrj   profile_count num = entry_block_map->count;
270338fd1498Szrj 
270438fd1498Szrj   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
270538fd1498Szrj 
270638fd1498Szrj   /* Register specific tree functions.  */
270738fd1498Szrj   gimple_register_cfg_hooks ();
270838fd1498Szrj 
270938fd1498Szrj   /* If we are inlining just region of the function, make sure to connect
271038fd1498Szrj      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
271138fd1498Szrj      part of loop, we must compute frequency and probability of
271238fd1498Szrj      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
271338fd1498Szrj      probabilities of edges incoming from nonduplicated region.  */
271438fd1498Szrj   if (new_entry)
271538fd1498Szrj     {
271638fd1498Szrj       edge e;
271738fd1498Szrj       edge_iterator ei;
271838fd1498Szrj       den = profile_count::zero ();
271938fd1498Szrj 
272038fd1498Szrj       FOR_EACH_EDGE (e, ei, new_entry->preds)
272138fd1498Szrj 	if (!e->src->aux)
272238fd1498Szrj 	  den += e->count ();
272338fd1498Szrj       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
272438fd1498Szrj     }
272538fd1498Szrj 
272638fd1498Szrj   profile_count::adjust_for_ipa_scaling (&num, &den);
272738fd1498Szrj 
272838fd1498Szrj   /* Must have a CFG here at this point.  */
272938fd1498Szrj   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
273038fd1498Szrj 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
273138fd1498Szrj 
273238fd1498Szrj 
273338fd1498Szrj   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
273438fd1498Szrj   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
273538fd1498Szrj   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
273638fd1498Szrj   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
273738fd1498Szrj 
273838fd1498Szrj   /* Duplicate any exception-handling regions.  */
273938fd1498Szrj   if (cfun->eh)
274038fd1498Szrj     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
274138fd1498Szrj 				       remap_decl_1, id);
274238fd1498Szrj 
274338fd1498Szrj   /* Use aux pointers to map the original blocks to copy.  */
274438fd1498Szrj   FOR_EACH_BB_FN (bb, cfun_to_copy)
274538fd1498Szrj     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
274638fd1498Szrj       {
274738fd1498Szrj 	basic_block new_bb = copy_bb (id, bb, num, den);
274838fd1498Szrj 	bb->aux = new_bb;
274938fd1498Szrj 	new_bb->aux = bb;
275038fd1498Szrj 	new_bb->loop_father = entry_block_map->loop_father;
275138fd1498Szrj       }
275238fd1498Szrj 
275338fd1498Szrj   last = last_basic_block_for_fn (cfun);
275438fd1498Szrj 
275538fd1498Szrj   /* Now that we've duplicated the blocks, duplicate their edges.  */
275638fd1498Szrj   basic_block abnormal_goto_dest = NULL;
275738fd1498Szrj   if (id->call_stmt
275838fd1498Szrj       && stmt_can_make_abnormal_goto (id->call_stmt))
275938fd1498Szrj     {
276038fd1498Szrj       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
276138fd1498Szrj 
276238fd1498Szrj       bb = gimple_bb (id->call_stmt);
276338fd1498Szrj       gsi_next (&gsi);
276438fd1498Szrj       if (gsi_end_p (gsi))
276538fd1498Szrj 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
276638fd1498Szrj     }
276738fd1498Szrj   FOR_ALL_BB_FN (bb, cfun_to_copy)
276838fd1498Szrj     if (!id->blocks_to_copy
276938fd1498Szrj 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
277038fd1498Szrj       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
277138fd1498Szrj 					       abnormal_goto_dest);
277238fd1498Szrj 
277338fd1498Szrj   if (new_entry)
277438fd1498Szrj     {
277538fd1498Szrj       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
277638fd1498Szrj 			  EDGE_FALLTHRU);
277738fd1498Szrj       e->probability = profile_probability::always ();
277838fd1498Szrj     }
277938fd1498Szrj 
278038fd1498Szrj   /* Duplicate the loop tree, if available and wanted.  */
278138fd1498Szrj   if (loops_for_fn (src_cfun) != NULL
278238fd1498Szrj       && current_loops != NULL)
278338fd1498Szrj     {
278438fd1498Szrj       copy_loops (id, entry_block_map->loop_father,
278538fd1498Szrj 		  get_loop (src_cfun, 0));
278638fd1498Szrj       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
278738fd1498Szrj       loops_state_set (LOOPS_NEED_FIXUP);
278838fd1498Szrj     }
278938fd1498Szrj 
279038fd1498Szrj   /* If the loop tree in the source function needed fixup, mark the
279138fd1498Szrj      destination loop tree for fixup, too.  */
279238fd1498Szrj   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
279338fd1498Szrj     loops_state_set (LOOPS_NEED_FIXUP);
279438fd1498Szrj 
279538fd1498Szrj   if (gimple_in_ssa_p (cfun))
279638fd1498Szrj     FOR_ALL_BB_FN (bb, cfun_to_copy)
279738fd1498Szrj       if (!id->blocks_to_copy
279838fd1498Szrj 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
279938fd1498Szrj 	copy_phis_for_bb (bb, id);
280038fd1498Szrj 
280138fd1498Szrj   FOR_ALL_BB_FN (bb, cfun_to_copy)
280238fd1498Szrj     if (bb->aux)
280338fd1498Szrj       {
280438fd1498Szrj 	if (need_debug_cleanup
280538fd1498Szrj 	    && bb->index != ENTRY_BLOCK
280638fd1498Szrj 	    && bb->index != EXIT_BLOCK)
280738fd1498Szrj 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
280838fd1498Szrj 	/* Update call edge destinations.  This can not be done before loop
280938fd1498Szrj 	   info is updated, because we may split basic blocks.  */
281038fd1498Szrj 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
281138fd1498Szrj 	    && bb->index != ENTRY_BLOCK
281238fd1498Szrj 	    && bb->index != EXIT_BLOCK)
281338fd1498Szrj 	  redirect_all_calls (id, (basic_block)bb->aux);
281438fd1498Szrj 	((basic_block)bb->aux)->aux = NULL;
281538fd1498Szrj 	bb->aux = NULL;
281638fd1498Szrj       }
281738fd1498Szrj 
281838fd1498Szrj   /* Zero out AUX fields of newly created block during EH edge
281938fd1498Szrj      insertion. */
282038fd1498Szrj   for (; last < last_basic_block_for_fn (cfun); last++)
282138fd1498Szrj     {
282238fd1498Szrj       if (need_debug_cleanup)
282338fd1498Szrj 	maybe_move_debug_stmts_to_successors (id,
282438fd1498Szrj 					      BASIC_BLOCK_FOR_FN (cfun, last));
282538fd1498Szrj       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
282638fd1498Szrj       /* Update call edge destinations.  This can not be done before loop
282738fd1498Szrj 	 info is updated, because we may split basic blocks.  */
282838fd1498Szrj       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
282938fd1498Szrj 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
283038fd1498Szrj     }
283138fd1498Szrj   entry_block_map->aux = NULL;
283238fd1498Szrj   exit_block_map->aux = NULL;
283338fd1498Szrj 
283438fd1498Szrj   if (id->eh_map)
283538fd1498Szrj     {
283638fd1498Szrj       delete id->eh_map;
283738fd1498Szrj       id->eh_map = NULL;
283838fd1498Szrj     }
283938fd1498Szrj   if (id->dependence_map)
284038fd1498Szrj     {
284138fd1498Szrj       delete id->dependence_map;
284238fd1498Szrj       id->dependence_map = NULL;
284338fd1498Szrj     }
284438fd1498Szrj 
284538fd1498Szrj   return new_fndecl;
284638fd1498Szrj }
284738fd1498Szrj 
284838fd1498Szrj /* Copy the debug STMT using ID.  We deal with these statements in a
284938fd1498Szrj    special way: if any variable in their VALUE expression wasn't
285038fd1498Szrj    remapped yet, we won't remap it, because that would get decl uids
285138fd1498Szrj    out of sync, causing codegen differences between -g and -g0.  If
285238fd1498Szrj    this arises, we drop the VALUE expression altogether.  */
285338fd1498Szrj 
285438fd1498Szrj static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)285538fd1498Szrj copy_debug_stmt (gdebug *stmt, copy_body_data *id)
285638fd1498Szrj {
285738fd1498Szrj   tree t, *n;
285838fd1498Szrj   struct walk_stmt_info wi;
285938fd1498Szrj 
286038fd1498Szrj   if (gimple_block (stmt))
286138fd1498Szrj     {
286238fd1498Szrj       n = id->decl_map->get (gimple_block (stmt));
286338fd1498Szrj       gimple_set_block (stmt, n ? *n : id->block);
286438fd1498Szrj     }
286538fd1498Szrj 
286638fd1498Szrj   if (gimple_debug_nonbind_marker_p (stmt))
286738fd1498Szrj     return;
286838fd1498Szrj 
286938fd1498Szrj   /* Remap all the operands in COPY.  */
287038fd1498Szrj   memset (&wi, 0, sizeof (wi));
287138fd1498Szrj   wi.info = id;
287238fd1498Szrj 
287338fd1498Szrj   processing_debug_stmt = 1;
287438fd1498Szrj 
287538fd1498Szrj   if (gimple_debug_source_bind_p (stmt))
287638fd1498Szrj     t = gimple_debug_source_bind_get_var (stmt);
287738fd1498Szrj   else if (gimple_debug_bind_p (stmt))
287838fd1498Szrj     t = gimple_debug_bind_get_var (stmt);
287938fd1498Szrj   else
288038fd1498Szrj     gcc_unreachable ();
288138fd1498Szrj 
288238fd1498Szrj   if (TREE_CODE (t) == PARM_DECL && id->debug_map
288338fd1498Szrj       && (n = id->debug_map->get (t)))
288438fd1498Szrj     {
288538fd1498Szrj       gcc_assert (VAR_P (*n));
288638fd1498Szrj       t = *n;
288738fd1498Szrj     }
288838fd1498Szrj   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
288938fd1498Szrj     /* T is a non-localized variable.  */;
289038fd1498Szrj   else
289138fd1498Szrj     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
289238fd1498Szrj 
289338fd1498Szrj   if (gimple_debug_bind_p (stmt))
289438fd1498Szrj     {
289538fd1498Szrj       gimple_debug_bind_set_var (stmt, t);
289638fd1498Szrj 
289738fd1498Szrj       if (gimple_debug_bind_has_value_p (stmt))
289838fd1498Szrj 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
289938fd1498Szrj 		   remap_gimple_op_r, &wi, NULL);
290038fd1498Szrj 
290138fd1498Szrj       /* Punt if any decl couldn't be remapped.  */
290238fd1498Szrj       if (processing_debug_stmt < 0)
290338fd1498Szrj 	gimple_debug_bind_reset_value (stmt);
290438fd1498Szrj     }
290538fd1498Szrj   else if (gimple_debug_source_bind_p (stmt))
290638fd1498Szrj     {
290738fd1498Szrj       gimple_debug_source_bind_set_var (stmt, t);
290838fd1498Szrj       /* When inlining and source bind refers to one of the optimized
290938fd1498Szrj 	 away parameters, change the source bind into normal debug bind
291038fd1498Szrj 	 referring to the corresponding DEBUG_EXPR_DECL that should have
291138fd1498Szrj 	 been bound before the call stmt.  */
291238fd1498Szrj       t = gimple_debug_source_bind_get_value (stmt);
291338fd1498Szrj       if (t != NULL_TREE
291438fd1498Szrj 	  && TREE_CODE (t) == PARM_DECL
291538fd1498Szrj 	  && id->call_stmt)
291638fd1498Szrj 	{
291738fd1498Szrj 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
291838fd1498Szrj 	  unsigned int i;
291938fd1498Szrj 	  if (debug_args != NULL)
292038fd1498Szrj 	    {
292138fd1498Szrj 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
292238fd1498Szrj 		if ((**debug_args)[i] == DECL_ORIGIN (t)
292338fd1498Szrj 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
292438fd1498Szrj 		  {
292538fd1498Szrj 		    t = (**debug_args)[i + 1];
292638fd1498Szrj 		    stmt->subcode = GIMPLE_DEBUG_BIND;
292738fd1498Szrj 		    gimple_debug_bind_set_value (stmt, t);
292838fd1498Szrj 		    break;
292938fd1498Szrj 		  }
293038fd1498Szrj 	    }
293138fd1498Szrj 	}
293238fd1498Szrj       if (gimple_debug_source_bind_p (stmt))
293338fd1498Szrj 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
293438fd1498Szrj 		   remap_gimple_op_r, &wi, NULL);
293538fd1498Szrj     }
293638fd1498Szrj 
293738fd1498Szrj   processing_debug_stmt = 0;
293838fd1498Szrj 
293938fd1498Szrj   update_stmt (stmt);
294038fd1498Szrj }
294138fd1498Szrj 
294238fd1498Szrj /* Process deferred debug stmts.  In order to give values better odds
294338fd1498Szrj    of being successfully remapped, we delay the processing of debug
294438fd1498Szrj    stmts until all other stmts that might require remapping are
294538fd1498Szrj    processed.  */
294638fd1498Szrj 
294738fd1498Szrj static void
copy_debug_stmts(copy_body_data * id)294838fd1498Szrj copy_debug_stmts (copy_body_data *id)
294938fd1498Szrj {
295038fd1498Szrj   size_t i;
295138fd1498Szrj   gdebug *stmt;
295238fd1498Szrj 
295338fd1498Szrj   if (!id->debug_stmts.exists ())
295438fd1498Szrj     return;
295538fd1498Szrj 
295638fd1498Szrj   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
295738fd1498Szrj     copy_debug_stmt (stmt, id);
295838fd1498Szrj 
295938fd1498Szrj   id->debug_stmts.release ();
296038fd1498Szrj }
296138fd1498Szrj 
296238fd1498Szrj /* Make a copy of the body of SRC_FN so that it can be inserted inline in
296338fd1498Szrj    another function.  */
296438fd1498Szrj 
296538fd1498Szrj static tree
copy_tree_body(copy_body_data * id)296638fd1498Szrj copy_tree_body (copy_body_data *id)
296738fd1498Szrj {
296838fd1498Szrj   tree fndecl = id->src_fn;
296938fd1498Szrj   tree body = DECL_SAVED_TREE (fndecl);
297038fd1498Szrj 
297138fd1498Szrj   walk_tree (&body, copy_tree_body_r, id, NULL);
297238fd1498Szrj 
297338fd1498Szrj   return body;
297438fd1498Szrj }
297538fd1498Szrj 
297638fd1498Szrj /* Make a copy of the body of FN so that it can be inserted inline in
297738fd1498Szrj    another function.  */
297838fd1498Szrj 
297938fd1498Szrj static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)298038fd1498Szrj copy_body (copy_body_data *id,
298138fd1498Szrj 	   basic_block entry_block_map, basic_block exit_block_map,
298238fd1498Szrj 	   basic_block new_entry)
298338fd1498Szrj {
298438fd1498Szrj   tree fndecl = id->src_fn;
298538fd1498Szrj   tree body;
298638fd1498Szrj 
298738fd1498Szrj   /* If this body has a CFG, walk CFG and copy.  */
298838fd1498Szrj   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
298938fd1498Szrj   body = copy_cfg_body (id, entry_block_map, exit_block_map,
299038fd1498Szrj 			new_entry);
299138fd1498Szrj   copy_debug_stmts (id);
299238fd1498Szrj 
299338fd1498Szrj   return body;
299438fd1498Szrj }
299538fd1498Szrj 
299638fd1498Szrj /* Return true if VALUE is an ADDR_EXPR of an automatic variable
299738fd1498Szrj    defined in function FN, or of a data member thereof.  */
299838fd1498Szrj 
299938fd1498Szrj static bool
self_inlining_addr_expr(tree value,tree fn)300038fd1498Szrj self_inlining_addr_expr (tree value, tree fn)
300138fd1498Szrj {
300238fd1498Szrj   tree var;
300338fd1498Szrj 
300438fd1498Szrj   if (TREE_CODE (value) != ADDR_EXPR)
300538fd1498Szrj     return false;
300638fd1498Szrj 
300738fd1498Szrj   var = get_base_address (TREE_OPERAND (value, 0));
300838fd1498Szrj 
300938fd1498Szrj   return var && auto_var_in_fn_p (var, fn);
301038fd1498Szrj }
301138fd1498Szrj 
301238fd1498Szrj /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
301338fd1498Szrj    lexical block and line number information from base_stmt, if given,
301438fd1498Szrj    or from the last stmt of the block otherwise.  */
301538fd1498Szrj 
301638fd1498Szrj static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)301738fd1498Szrj insert_init_debug_bind (copy_body_data *id,
301838fd1498Szrj 			basic_block bb, tree var, tree value,
301938fd1498Szrj 			gimple *base_stmt)
302038fd1498Szrj {
302138fd1498Szrj   gimple *note;
302238fd1498Szrj   gimple_stmt_iterator gsi;
302338fd1498Szrj   tree tracked_var;
302438fd1498Szrj 
302538fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
302638fd1498Szrj     return NULL;
302738fd1498Szrj 
302838fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
302938fd1498Szrj     return NULL;
303038fd1498Szrj 
303138fd1498Szrj   tracked_var = target_for_debug_bind (var);
303238fd1498Szrj   if (!tracked_var)
303338fd1498Szrj     return NULL;
303438fd1498Szrj 
303538fd1498Szrj   if (bb)
303638fd1498Szrj     {
303738fd1498Szrj       gsi = gsi_last_bb (bb);
303838fd1498Szrj       if (!base_stmt && !gsi_end_p (gsi))
303938fd1498Szrj 	base_stmt = gsi_stmt (gsi);
304038fd1498Szrj     }
304138fd1498Szrj 
304238fd1498Szrj   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
304338fd1498Szrj 
304438fd1498Szrj   if (bb)
304538fd1498Szrj     {
304638fd1498Szrj       if (!gsi_end_p (gsi))
304738fd1498Szrj 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
304838fd1498Szrj       else
304938fd1498Szrj 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
305038fd1498Szrj     }
305138fd1498Szrj 
305238fd1498Szrj   return note;
305338fd1498Szrj }
305438fd1498Szrj 
305538fd1498Szrj static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)305638fd1498Szrj insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
305738fd1498Szrj {
305838fd1498Szrj   /* If VAR represents a zero-sized variable, it's possible that the
305938fd1498Szrj      assignment statement may result in no gimple statements.  */
306038fd1498Szrj   if (init_stmt)
306138fd1498Szrj     {
306238fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
306338fd1498Szrj 
306438fd1498Szrj       /* We can end up with init statements that store to a non-register
306538fd1498Szrj          from a rhs with a conversion.  Handle that here by forcing the
306638fd1498Szrj 	 rhs into a temporary.  gimple_regimplify_operands is not
306738fd1498Szrj 	 prepared to do this for us.  */
306838fd1498Szrj       if (!is_gimple_debug (init_stmt)
306938fd1498Szrj 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
307038fd1498Szrj 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
307138fd1498Szrj 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
307238fd1498Szrj 	{
307338fd1498Szrj 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
307438fd1498Szrj 			     gimple_expr_type (init_stmt),
307538fd1498Szrj 			     gimple_assign_rhs1 (init_stmt));
307638fd1498Szrj 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
307738fd1498Szrj 					  GSI_NEW_STMT);
307838fd1498Szrj 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
307938fd1498Szrj 	  gimple_assign_set_rhs1 (init_stmt, rhs);
308038fd1498Szrj 	}
308138fd1498Szrj       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
308238fd1498Szrj       gimple_regimplify_operands (init_stmt, &si);
308338fd1498Szrj 
308438fd1498Szrj       if (!is_gimple_debug (init_stmt))
308538fd1498Szrj 	{
308638fd1498Szrj 	  tree def = gimple_assign_lhs (init_stmt);
308738fd1498Szrj 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
308838fd1498Szrj 	}
308938fd1498Szrj     }
309038fd1498Szrj }
309138fd1498Szrj 
309238fd1498Szrj /* Initialize parameter P with VALUE.  If needed, produce init statement
309338fd1498Szrj    at the end of BB.  When BB is NULL, we return init statement to be
309438fd1498Szrj    output later.  */
309538fd1498Szrj static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)309638fd1498Szrj setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
309738fd1498Szrj 		     basic_block bb, tree *vars)
309838fd1498Szrj {
309938fd1498Szrj   gimple *init_stmt = NULL;
310038fd1498Szrj   tree var;
310138fd1498Szrj   tree rhs = value;
310238fd1498Szrj   tree def = (gimple_in_ssa_p (cfun)
310338fd1498Szrj 	      ? ssa_default_def (id->src_cfun, p) : NULL);
310438fd1498Szrj 
310538fd1498Szrj   if (value
310638fd1498Szrj       && value != error_mark_node
310738fd1498Szrj       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
310838fd1498Szrj     {
310938fd1498Szrj       /* If we can match up types by promotion/demotion do so.  */
311038fd1498Szrj       if (fold_convertible_p (TREE_TYPE (p), value))
311138fd1498Szrj 	rhs = fold_convert (TREE_TYPE (p), value);
311238fd1498Szrj       else
311338fd1498Szrj 	{
311438fd1498Szrj 	  /* ???  For valid programs we should not end up here.
311538fd1498Szrj 	     Still if we end up with truly mismatched types here, fall back
311638fd1498Szrj 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
311738fd1498Szrj 	     GIMPLE to the following passes.  */
311838fd1498Szrj 	  if (!is_gimple_reg_type (TREE_TYPE (value))
311938fd1498Szrj 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
312038fd1498Szrj 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
312138fd1498Szrj 	  else
312238fd1498Szrj 	    rhs = build_zero_cst (TREE_TYPE (p));
312338fd1498Szrj 	}
312438fd1498Szrj     }
312538fd1498Szrj 
312638fd1498Szrj   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
312738fd1498Szrj      here since the type of this decl must be visible to the calling
312838fd1498Szrj      function.  */
312938fd1498Szrj   var = copy_decl_to_var (p, id);
313038fd1498Szrj 
313138fd1498Szrj   /* Declare this new variable.  */
313238fd1498Szrj   DECL_CHAIN (var) = *vars;
313338fd1498Szrj   *vars = var;
313438fd1498Szrj 
313538fd1498Szrj   /* Make gimplifier happy about this variable.  */
313638fd1498Szrj   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
313738fd1498Szrj 
313838fd1498Szrj   /* If the parameter is never assigned to, has no SSA_NAMEs created,
313938fd1498Szrj      we would not need to create a new variable here at all, if it
314038fd1498Szrj      weren't for debug info.  Still, we can just use the argument
314138fd1498Szrj      value.  */
314238fd1498Szrj   if (TREE_READONLY (p)
314338fd1498Szrj       && !TREE_ADDRESSABLE (p)
314438fd1498Szrj       && value && !TREE_SIDE_EFFECTS (value)
314538fd1498Szrj       && !def)
314638fd1498Szrj     {
314738fd1498Szrj       /* We may produce non-gimple trees by adding NOPs or introduce
314838fd1498Szrj 	 invalid sharing when operand is not really constant.
314938fd1498Szrj 	 It is not big deal to prohibit constant propagation here as
315038fd1498Szrj 	 we will constant propagate in DOM1 pass anyway.  */
315138fd1498Szrj       if (is_gimple_min_invariant (value)
315238fd1498Szrj 	  && useless_type_conversion_p (TREE_TYPE (p),
315338fd1498Szrj 						 TREE_TYPE (value))
315438fd1498Szrj 	  /* We have to be very careful about ADDR_EXPR.  Make sure
315538fd1498Szrj 	     the base variable isn't a local variable of the inlined
315638fd1498Szrj 	     function, e.g., when doing recursive inlining, direct or
315738fd1498Szrj 	     mutually-recursive or whatever, which is why we don't
315838fd1498Szrj 	     just test whether fn == current_function_decl.  */
315938fd1498Szrj 	  && ! self_inlining_addr_expr (value, fn))
316038fd1498Szrj 	{
316138fd1498Szrj 	  insert_decl_map (id, p, value);
316238fd1498Szrj 	  insert_debug_decl_map (id, p, var);
316338fd1498Szrj 	  return insert_init_debug_bind (id, bb, var, value, NULL);
316438fd1498Szrj 	}
316538fd1498Szrj     }
316638fd1498Szrj 
316738fd1498Szrj   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
316838fd1498Szrj      that way, when the PARM_DECL is encountered, it will be
316938fd1498Szrj      automatically replaced by the VAR_DECL.  */
317038fd1498Szrj   insert_decl_map (id, p, var);
317138fd1498Szrj 
317238fd1498Szrj   /* Even if P was TREE_READONLY, the new VAR should not be.
317338fd1498Szrj      In the original code, we would have constructed a
317438fd1498Szrj      temporary, and then the function body would have never
317538fd1498Szrj      changed the value of P.  However, now, we will be
317638fd1498Szrj      constructing VAR directly.  The constructor body may
317738fd1498Szrj      change its value multiple times as it is being
317838fd1498Szrj      constructed.  Therefore, it must not be TREE_READONLY;
317938fd1498Szrj      the back-end assumes that TREE_READONLY variable is
318038fd1498Szrj      assigned to only once.  */
318138fd1498Szrj   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
318238fd1498Szrj     TREE_READONLY (var) = 0;
318338fd1498Szrj 
318438fd1498Szrj   /* If there is no setup required and we are in SSA, take the easy route
318538fd1498Szrj      replacing all SSA names representing the function parameter by the
318638fd1498Szrj      SSA name passed to function.
318738fd1498Szrj 
318838fd1498Szrj      We need to construct map for the variable anyway as it might be used
318938fd1498Szrj      in different SSA names when parameter is set in function.
319038fd1498Szrj 
319138fd1498Szrj      Do replacement at -O0 for const arguments replaced by constant.
319238fd1498Szrj      This is important for builtin_constant_p and other construct requiring
319338fd1498Szrj      constant argument to be visible in inlined function body.  */
319438fd1498Szrj   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
319538fd1498Szrj       && (optimize
319638fd1498Szrj           || (TREE_READONLY (p)
319738fd1498Szrj 	      && is_gimple_min_invariant (rhs)))
319838fd1498Szrj       && (TREE_CODE (rhs) == SSA_NAME
319938fd1498Szrj 	  || is_gimple_min_invariant (rhs))
320038fd1498Szrj       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
320138fd1498Szrj     {
320238fd1498Szrj       insert_decl_map (id, def, rhs);
320338fd1498Szrj       return insert_init_debug_bind (id, bb, var, rhs, NULL);
320438fd1498Szrj     }
320538fd1498Szrj 
320638fd1498Szrj   /* If the value of argument is never used, don't care about initializing
320738fd1498Szrj      it.  */
320838fd1498Szrj   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
320938fd1498Szrj     {
321038fd1498Szrj       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
321138fd1498Szrj       return insert_init_debug_bind (id, bb, var, rhs, NULL);
321238fd1498Szrj     }
321338fd1498Szrj 
321438fd1498Szrj   /* Initialize this VAR_DECL from the equivalent argument.  Convert
321538fd1498Szrj      the argument to the proper type in case it was promoted.  */
321638fd1498Szrj   if (value)
321738fd1498Szrj     {
321838fd1498Szrj       if (rhs == error_mark_node)
321938fd1498Szrj 	{
322038fd1498Szrj 	  insert_decl_map (id, p, var);
322138fd1498Szrj 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
322238fd1498Szrj 	}
322338fd1498Szrj 
322438fd1498Szrj       STRIP_USELESS_TYPE_CONVERSION (rhs);
322538fd1498Szrj 
322638fd1498Szrj       /* If we are in SSA form properly remap the default definition
322738fd1498Szrj          or assign to a dummy SSA name if the parameter is unused and
322838fd1498Szrj 	 we are not optimizing.  */
322938fd1498Szrj       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
323038fd1498Szrj 	{
323138fd1498Szrj 	  if (def)
323238fd1498Szrj 	    {
323338fd1498Szrj 	      def = remap_ssa_name (def, id);
323438fd1498Szrj 	      init_stmt = gimple_build_assign (def, rhs);
323538fd1498Szrj 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
323638fd1498Szrj 	      set_ssa_default_def (cfun, var, NULL);
323738fd1498Szrj 	    }
323838fd1498Szrj 	  else if (!optimize)
323938fd1498Szrj 	    {
324038fd1498Szrj 	      def = make_ssa_name (var);
324138fd1498Szrj 	      init_stmt = gimple_build_assign (def, rhs);
324238fd1498Szrj 	    }
324338fd1498Szrj 	}
324438fd1498Szrj       else
324538fd1498Szrj         init_stmt = gimple_build_assign (var, rhs);
324638fd1498Szrj 
324738fd1498Szrj       if (bb && init_stmt)
324838fd1498Szrj         insert_init_stmt (id, bb, init_stmt);
324938fd1498Szrj     }
325038fd1498Szrj   return init_stmt;
325138fd1498Szrj }
325238fd1498Szrj 
325338fd1498Szrj /* Generate code to initialize the parameters of the function at the
325438fd1498Szrj    top of the stack in ID from the GIMPLE_CALL STMT.  */
325538fd1498Szrj 
325638fd1498Szrj static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)325738fd1498Szrj initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
325838fd1498Szrj 			       tree fn, basic_block bb)
325938fd1498Szrj {
326038fd1498Szrj   tree parms;
326138fd1498Szrj   size_t i;
326238fd1498Szrj   tree p;
326338fd1498Szrj   tree vars = NULL_TREE;
326438fd1498Szrj   tree static_chain = gimple_call_chain (stmt);
326538fd1498Szrj 
326638fd1498Szrj   /* Figure out what the parameters are.  */
326738fd1498Szrj   parms = DECL_ARGUMENTS (fn);
326838fd1498Szrj 
326938fd1498Szrj   /* Loop through the parameter declarations, replacing each with an
327038fd1498Szrj      equivalent VAR_DECL, appropriately initialized.  */
327138fd1498Szrj   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
327238fd1498Szrj     {
327338fd1498Szrj       tree val;
327438fd1498Szrj       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
327538fd1498Szrj       setup_one_parameter (id, p, val, fn, bb, &vars);
327638fd1498Szrj     }
327738fd1498Szrj   /* After remapping parameters remap their types.  This has to be done
327838fd1498Szrj      in a second loop over all parameters to appropriately remap
327938fd1498Szrj      variable sized arrays when the size is specified in a
328038fd1498Szrj      parameter following the array.  */
328138fd1498Szrj   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
328238fd1498Szrj     {
328338fd1498Szrj       tree *varp = id->decl_map->get (p);
328438fd1498Szrj       if (varp && VAR_P (*varp))
328538fd1498Szrj 	{
328638fd1498Szrj 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
328738fd1498Szrj 		      ? ssa_default_def (id->src_cfun, p) : NULL);
328838fd1498Szrj 	  tree var = *varp;
328938fd1498Szrj 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
329038fd1498Szrj 	  /* Also remap the default definition if it was remapped
329138fd1498Szrj 	     to the default definition of the parameter replacement
329238fd1498Szrj 	     by the parameter setup.  */
329338fd1498Szrj 	  if (def)
329438fd1498Szrj 	    {
329538fd1498Szrj 	      tree *defp = id->decl_map->get (def);
329638fd1498Szrj 	      if (defp
329738fd1498Szrj 		  && TREE_CODE (*defp) == SSA_NAME
329838fd1498Szrj 		  && SSA_NAME_VAR (*defp) == var)
329938fd1498Szrj 		TREE_TYPE (*defp) = TREE_TYPE (var);
330038fd1498Szrj 	    }
330138fd1498Szrj 	}
330238fd1498Szrj     }
330338fd1498Szrj 
330438fd1498Szrj   /* Initialize the static chain.  */
330538fd1498Szrj   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
330638fd1498Szrj   gcc_assert (fn != current_function_decl);
330738fd1498Szrj   if (p)
330838fd1498Szrj     {
330938fd1498Szrj       /* No static chain?  Seems like a bug in tree-nested.c.  */
331038fd1498Szrj       gcc_assert (static_chain);
331138fd1498Szrj 
331238fd1498Szrj       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
331338fd1498Szrj     }
331438fd1498Szrj 
331538fd1498Szrj   declare_inline_vars (id->block, vars);
331638fd1498Szrj }
331738fd1498Szrj 
331838fd1498Szrj 
331938fd1498Szrj /* Declare a return variable to replace the RESULT_DECL for the
332038fd1498Szrj    function we are calling.  An appropriate DECL_STMT is returned.
332138fd1498Szrj    The USE_STMT is filled to contain a use of the declaration to
332238fd1498Szrj    indicate the return value of the function.
332338fd1498Szrj 
332438fd1498Szrj    RETURN_SLOT, if non-null is place where to store the result.  It
332538fd1498Szrj    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
332638fd1498Szrj    was the LHS of the MODIFY_EXPR to which this call is the RHS.
332738fd1498Szrj 
332838fd1498Szrj    RETURN_BOUNDS holds a destination for returned bounds.
332938fd1498Szrj 
333038fd1498Szrj    The return value is a (possibly null) value that holds the result
333138fd1498Szrj    as seen by the caller.  */
333238fd1498Szrj 
333338fd1498Szrj static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,tree return_bounds,basic_block entry_bb)333438fd1498Szrj declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
333538fd1498Szrj 			 tree return_bounds, basic_block entry_bb)
333638fd1498Szrj {
333738fd1498Szrj   tree callee = id->src_fn;
333838fd1498Szrj   tree result = DECL_RESULT (callee);
333938fd1498Szrj   tree callee_type = TREE_TYPE (result);
334038fd1498Szrj   tree caller_type;
334138fd1498Szrj   tree var, use;
334238fd1498Szrj 
334338fd1498Szrj   /* Handle type-mismatches in the function declaration return type
334438fd1498Szrj      vs. the call expression.  */
334538fd1498Szrj   if (modify_dest)
334638fd1498Szrj     caller_type = TREE_TYPE (modify_dest);
334738fd1498Szrj   else
334838fd1498Szrj     caller_type = TREE_TYPE (TREE_TYPE (callee));
334938fd1498Szrj 
335038fd1498Szrj   /* We don't need to do anything for functions that don't return anything.  */
335138fd1498Szrj   if (VOID_TYPE_P (callee_type))
335238fd1498Szrj     return NULL_TREE;
335338fd1498Szrj 
335438fd1498Szrj   /* If there was a return slot, then the return value is the
335538fd1498Szrj      dereferenced address of that object.  */
335638fd1498Szrj   if (return_slot)
335738fd1498Szrj     {
335838fd1498Szrj       /* The front end shouldn't have used both return_slot and
335938fd1498Szrj 	 a modify expression.  */
336038fd1498Szrj       gcc_assert (!modify_dest);
336138fd1498Szrj       if (DECL_BY_REFERENCE (result))
336238fd1498Szrj 	{
336338fd1498Szrj 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
336438fd1498Szrj 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
336538fd1498Szrj 
336638fd1498Szrj 	  /* We are going to construct *&return_slot and we can't do that
336738fd1498Szrj 	     for variables believed to be not addressable.
336838fd1498Szrj 
336938fd1498Szrj 	     FIXME: This check possibly can match, because values returned
337038fd1498Szrj 	     via return slot optimization are not believed to have address
337138fd1498Szrj 	     taken by alias analysis.  */
337238fd1498Szrj 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
337338fd1498Szrj 	  var = return_slot_addr;
337438fd1498Szrj 	}
337538fd1498Szrj       else
337638fd1498Szrj 	{
337738fd1498Szrj 	  var = return_slot;
337838fd1498Szrj 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
337938fd1498Szrj 	  if (TREE_ADDRESSABLE (result))
338038fd1498Szrj 	    mark_addressable (var);
338138fd1498Szrj 	}
338238fd1498Szrj       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
338338fd1498Szrj            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
338438fd1498Szrj 	  && !DECL_GIMPLE_REG_P (result)
338538fd1498Szrj 	  && DECL_P (var))
338638fd1498Szrj 	DECL_GIMPLE_REG_P (var) = 0;
338738fd1498Szrj       use = NULL;
338838fd1498Szrj       goto done;
338938fd1498Szrj     }
339038fd1498Szrj 
339138fd1498Szrj   /* All types requiring non-trivial constructors should have been handled.  */
339238fd1498Szrj   gcc_assert (!TREE_ADDRESSABLE (callee_type));
339338fd1498Szrj 
339438fd1498Szrj   /* Attempt to avoid creating a new temporary variable.  */
339538fd1498Szrj   if (modify_dest
339638fd1498Szrj       && TREE_CODE (modify_dest) != SSA_NAME)
339738fd1498Szrj     {
339838fd1498Szrj       bool use_it = false;
339938fd1498Szrj 
340038fd1498Szrj       /* We can't use MODIFY_DEST if there's type promotion involved.  */
340138fd1498Szrj       if (!useless_type_conversion_p (callee_type, caller_type))
340238fd1498Szrj 	use_it = false;
340338fd1498Szrj 
340438fd1498Szrj       /* ??? If we're assigning to a variable sized type, then we must
340538fd1498Szrj 	 reuse the destination variable, because we've no good way to
340638fd1498Szrj 	 create variable sized temporaries at this point.  */
340738fd1498Szrj       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
340838fd1498Szrj 	use_it = true;
340938fd1498Szrj 
341038fd1498Szrj       /* If the callee cannot possibly modify MODIFY_DEST, then we can
341138fd1498Szrj 	 reuse it as the result of the call directly.  Don't do this if
341238fd1498Szrj 	 it would promote MODIFY_DEST to addressable.  */
341338fd1498Szrj       else if (TREE_ADDRESSABLE (result))
341438fd1498Szrj 	use_it = false;
341538fd1498Szrj       else
341638fd1498Szrj 	{
341738fd1498Szrj 	  tree base_m = get_base_address (modify_dest);
341838fd1498Szrj 
341938fd1498Szrj 	  /* If the base isn't a decl, then it's a pointer, and we don't
342038fd1498Szrj 	     know where that's going to go.  */
342138fd1498Szrj 	  if (!DECL_P (base_m))
342238fd1498Szrj 	    use_it = false;
342338fd1498Szrj 	  else if (is_global_var (base_m))
342438fd1498Szrj 	    use_it = false;
342538fd1498Szrj 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
342638fd1498Szrj 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
342738fd1498Szrj 		   && !DECL_GIMPLE_REG_P (result)
342838fd1498Szrj 		   && DECL_GIMPLE_REG_P (base_m))
342938fd1498Szrj 	    use_it = false;
343038fd1498Szrj 	  else if (!TREE_ADDRESSABLE (base_m))
343138fd1498Szrj 	    use_it = true;
343238fd1498Szrj 	}
343338fd1498Szrj 
343438fd1498Szrj       if (use_it)
343538fd1498Szrj 	{
343638fd1498Szrj 	  var = modify_dest;
343738fd1498Szrj 	  use = NULL;
343838fd1498Szrj 	  goto done;
343938fd1498Szrj 	}
344038fd1498Szrj     }
344138fd1498Szrj 
344238fd1498Szrj   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
344338fd1498Szrj 
344438fd1498Szrj   var = copy_result_decl_to_var (result, id);
344538fd1498Szrj   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
344638fd1498Szrj 
344738fd1498Szrj   /* Do not have the rest of GCC warn about this variable as it should
344838fd1498Szrj      not be visible to the user.  */
344938fd1498Szrj   TREE_NO_WARNING (var) = 1;
345038fd1498Szrj 
345138fd1498Szrj   declare_inline_vars (id->block, var);
345238fd1498Szrj 
345338fd1498Szrj   /* Build the use expr.  If the return type of the function was
345438fd1498Szrj      promoted, convert it back to the expected type.  */
345538fd1498Szrj   use = var;
345638fd1498Szrj   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
345738fd1498Szrj     {
345838fd1498Szrj       /* If we can match up types by promotion/demotion do so.  */
345938fd1498Szrj       if (fold_convertible_p (caller_type, var))
346038fd1498Szrj 	use = fold_convert (caller_type, var);
346138fd1498Szrj       else
346238fd1498Szrj 	{
346338fd1498Szrj 	  /* ???  For valid programs we should not end up here.
346438fd1498Szrj 	     Still if we end up with truly mismatched types here, fall back
346538fd1498Szrj 	     to using a MEM_REF to not leak invalid GIMPLE to the following
346638fd1498Szrj 	     passes.  */
346738fd1498Szrj 	  /* Prevent var from being written into SSA form.  */
346838fd1498Szrj 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
346938fd1498Szrj 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
347038fd1498Szrj 	    DECL_GIMPLE_REG_P (var) = false;
347138fd1498Szrj 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
347238fd1498Szrj 	    TREE_ADDRESSABLE (var) = true;
347338fd1498Szrj 	  use = fold_build2 (MEM_REF, caller_type,
347438fd1498Szrj 			     build_fold_addr_expr (var),
347538fd1498Szrj 			     build_int_cst (ptr_type_node, 0));
347638fd1498Szrj 	}
347738fd1498Szrj     }
347838fd1498Szrj 
347938fd1498Szrj   STRIP_USELESS_TYPE_CONVERSION (use);
348038fd1498Szrj 
348138fd1498Szrj   if (DECL_BY_REFERENCE (result))
348238fd1498Szrj     {
348338fd1498Szrj       TREE_ADDRESSABLE (var) = 1;
348438fd1498Szrj       var = build_fold_addr_expr (var);
348538fd1498Szrj     }
348638fd1498Szrj 
348738fd1498Szrj  done:
348838fd1498Szrj   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
348938fd1498Szrj      way, when the RESULT_DECL is encountered, it will be
349038fd1498Szrj      automatically replaced by the VAR_DECL.
349138fd1498Szrj 
349238fd1498Szrj      When returning by reference, ensure that RESULT_DECL remaps to
349338fd1498Szrj      gimple_val.  */
349438fd1498Szrj   if (DECL_BY_REFERENCE (result)
349538fd1498Szrj       && !is_gimple_val (var))
349638fd1498Szrj     {
349738fd1498Szrj       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
349838fd1498Szrj       insert_decl_map (id, result, temp);
349938fd1498Szrj       /* When RESULT_DECL is in SSA form, we need to remap and initialize
350038fd1498Szrj 	 it's default_def SSA_NAME.  */
350138fd1498Szrj       if (gimple_in_ssa_p (id->src_cfun)
350238fd1498Szrj 	  && is_gimple_reg (result))
350338fd1498Szrj 	{
350438fd1498Szrj 	  temp = make_ssa_name (temp);
350538fd1498Szrj 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
350638fd1498Szrj 	}
350738fd1498Szrj       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
350838fd1498Szrj     }
350938fd1498Szrj   else
351038fd1498Szrj     insert_decl_map (id, result, var);
351138fd1498Szrj 
351238fd1498Szrj   /* Remember this so we can ignore it in remap_decls.  */
351338fd1498Szrj   id->retvar = var;
351438fd1498Szrj 
351538fd1498Szrj   /* If returned bounds are used, then make var for them.  */
351638fd1498Szrj   if (return_bounds)
351738fd1498Szrj   {
351838fd1498Szrj     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
351938fd1498Szrj     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
352038fd1498Szrj     TREE_NO_WARNING (bndtemp) = 1;
352138fd1498Szrj     declare_inline_vars (id->block, bndtemp);
352238fd1498Szrj 
352338fd1498Szrj     id->retbnd = bndtemp;
352438fd1498Szrj     insert_init_stmt (id, entry_bb,
352538fd1498Szrj 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
352638fd1498Szrj   }
352738fd1498Szrj 
352838fd1498Szrj   return use;
352938fd1498Szrj }
353038fd1498Szrj 
353138fd1498Szrj /* Determine if the function can be copied.  If so return NULL.  If
353238fd1498Szrj    not return a string describng the reason for failure.  */
353338fd1498Szrj 
353438fd1498Szrj const char *
copy_forbidden(struct function * fun)353538fd1498Szrj copy_forbidden (struct function *fun)
353638fd1498Szrj {
353738fd1498Szrj   const char *reason = fun->cannot_be_copied_reason;
353838fd1498Szrj 
353938fd1498Szrj   /* Only examine the function once.  */
354038fd1498Szrj   if (fun->cannot_be_copied_set)
354138fd1498Szrj     return reason;
354238fd1498Szrj 
354338fd1498Szrj   /* We cannot copy a function that receives a non-local goto
354438fd1498Szrj      because we cannot remap the destination label used in the
354538fd1498Szrj      function that is performing the non-local goto.  */
354638fd1498Szrj   /* ??? Actually, this should be possible, if we work at it.
354738fd1498Szrj      No doubt there's just a handful of places that simply
354838fd1498Szrj      assume it doesn't happen and don't substitute properly.  */
354938fd1498Szrj   if (fun->has_nonlocal_label)
355038fd1498Szrj     {
355138fd1498Szrj       reason = G_("function %q+F can never be copied "
355238fd1498Szrj 		  "because it receives a non-local goto");
355338fd1498Szrj       goto fail;
355438fd1498Szrj     }
355538fd1498Szrj 
355638fd1498Szrj   if (fun->has_forced_label_in_static)
355738fd1498Szrj     {
355838fd1498Szrj       reason = G_("function %q+F can never be copied because it saves "
355938fd1498Szrj 		  "address of local label in a static variable");
356038fd1498Szrj       goto fail;
356138fd1498Szrj     }
356238fd1498Szrj 
356338fd1498Szrj  fail:
356438fd1498Szrj   fun->cannot_be_copied_reason = reason;
356538fd1498Szrj   fun->cannot_be_copied_set = true;
356638fd1498Szrj   return reason;
356738fd1498Szrj }
356838fd1498Szrj 
356938fd1498Szrj 
357038fd1498Szrj static const char *inline_forbidden_reason;
357138fd1498Szrj 
357238fd1498Szrj /* A callback for walk_gimple_seq to handle statements.  Returns non-null
357338fd1498Szrj    iff a function can not be inlined.  Also sets the reason why. */
357438fd1498Szrj 
357538fd1498Szrj static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)357638fd1498Szrj inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
357738fd1498Szrj 			 struct walk_stmt_info *wip)
357838fd1498Szrj {
357938fd1498Szrj   tree fn = (tree) wip->info;
358038fd1498Szrj   tree t;
358138fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
358238fd1498Szrj 
358338fd1498Szrj   switch (gimple_code (stmt))
358438fd1498Szrj     {
358538fd1498Szrj     case GIMPLE_CALL:
358638fd1498Szrj       /* Refuse to inline alloca call unless user explicitly forced so as
358738fd1498Szrj 	 this may change program's memory overhead drastically when the
358838fd1498Szrj 	 function using alloca is called in loop.  In GCC present in
358938fd1498Szrj 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
359038fd1498Szrj 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
359138fd1498Szrj 	 VLA objects as those can't cause unbounded growth (they're always
359238fd1498Szrj 	 wrapped inside stack_save/stack_restore regions.  */
359338fd1498Szrj       if (gimple_maybe_alloca_call_p (stmt)
359438fd1498Szrj 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
359538fd1498Szrj 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
359638fd1498Szrj 	{
359738fd1498Szrj 	  inline_forbidden_reason
359838fd1498Szrj 	    = G_("function %q+F can never be inlined because it uses "
359938fd1498Szrj 		 "alloca (override using the always_inline attribute)");
360038fd1498Szrj 	  *handled_ops_p = true;
360138fd1498Szrj 	  return fn;
360238fd1498Szrj 	}
360338fd1498Szrj 
360438fd1498Szrj       t = gimple_call_fndecl (stmt);
360538fd1498Szrj       if (t == NULL_TREE)
360638fd1498Szrj 	break;
360738fd1498Szrj 
360838fd1498Szrj       /* We cannot inline functions that call setjmp.  */
360938fd1498Szrj       if (setjmp_call_p (t))
361038fd1498Szrj 	{
361138fd1498Szrj 	  inline_forbidden_reason
361238fd1498Szrj 	    = G_("function %q+F can never be inlined because it uses setjmp");
361338fd1498Szrj 	  *handled_ops_p = true;
361438fd1498Szrj 	  return t;
361538fd1498Szrj 	}
361638fd1498Szrj 
361738fd1498Szrj       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
361838fd1498Szrj 	switch (DECL_FUNCTION_CODE (t))
361938fd1498Szrj 	  {
362038fd1498Szrj 	    /* We cannot inline functions that take a variable number of
362138fd1498Szrj 	       arguments.  */
362238fd1498Szrj 	  case BUILT_IN_VA_START:
362338fd1498Szrj 	  case BUILT_IN_NEXT_ARG:
362438fd1498Szrj 	  case BUILT_IN_VA_END:
362538fd1498Szrj 	    inline_forbidden_reason
362638fd1498Szrj 	      = G_("function %q+F can never be inlined because it "
362738fd1498Szrj 		   "uses variable argument lists");
362838fd1498Szrj 	    *handled_ops_p = true;
362938fd1498Szrj 	    return t;
363038fd1498Szrj 
363138fd1498Szrj 	  case BUILT_IN_LONGJMP:
363238fd1498Szrj 	    /* We can't inline functions that call __builtin_longjmp at
363338fd1498Szrj 	       all.  The non-local goto machinery really requires the
363438fd1498Szrj 	       destination be in a different function.  If we allow the
363538fd1498Szrj 	       function calling __builtin_longjmp to be inlined into the
363638fd1498Szrj 	       function calling __builtin_setjmp, Things will Go Awry.  */
363738fd1498Szrj 	    inline_forbidden_reason
363838fd1498Szrj 	      = G_("function %q+F can never be inlined because "
363938fd1498Szrj 		   "it uses setjmp-longjmp exception handling");
364038fd1498Szrj 	    *handled_ops_p = true;
364138fd1498Szrj 	    return t;
364238fd1498Szrj 
364338fd1498Szrj 	  case BUILT_IN_NONLOCAL_GOTO:
364438fd1498Szrj 	    /* Similarly.  */
364538fd1498Szrj 	    inline_forbidden_reason
364638fd1498Szrj 	      = G_("function %q+F can never be inlined because "
364738fd1498Szrj 		   "it uses non-local goto");
364838fd1498Szrj 	    *handled_ops_p = true;
364938fd1498Szrj 	    return t;
365038fd1498Szrj 
365138fd1498Szrj 	  case BUILT_IN_RETURN:
365238fd1498Szrj 	  case BUILT_IN_APPLY_ARGS:
365338fd1498Szrj 	    /* If a __builtin_apply_args caller would be inlined,
365438fd1498Szrj 	       it would be saving arguments of the function it has
365538fd1498Szrj 	       been inlined into.  Similarly __builtin_return would
365638fd1498Szrj 	       return from the function the inline has been inlined into.  */
365738fd1498Szrj 	    inline_forbidden_reason
365838fd1498Szrj 	      = G_("function %q+F can never be inlined because "
365938fd1498Szrj 		   "it uses __builtin_return or __builtin_apply_args");
366038fd1498Szrj 	    *handled_ops_p = true;
366138fd1498Szrj 	    return t;
366238fd1498Szrj 
366338fd1498Szrj 	  default:
366438fd1498Szrj 	    break;
366538fd1498Szrj 	  }
366638fd1498Szrj       break;
366738fd1498Szrj 
366838fd1498Szrj     case GIMPLE_GOTO:
366938fd1498Szrj       t = gimple_goto_dest (stmt);
367038fd1498Szrj 
367138fd1498Szrj       /* We will not inline a function which uses computed goto.  The
367238fd1498Szrj 	 addresses of its local labels, which may be tucked into
367338fd1498Szrj 	 global storage, are of course not constant across
367438fd1498Szrj 	 instantiations, which causes unexpected behavior.  */
367538fd1498Szrj       if (TREE_CODE (t) != LABEL_DECL)
367638fd1498Szrj 	{
367738fd1498Szrj 	  inline_forbidden_reason
367838fd1498Szrj 	    = G_("function %q+F can never be inlined "
367938fd1498Szrj 		 "because it contains a computed goto");
368038fd1498Szrj 	  *handled_ops_p = true;
368138fd1498Szrj 	  return t;
368238fd1498Szrj 	}
368338fd1498Szrj       break;
368438fd1498Szrj 
368538fd1498Szrj     default:
368638fd1498Szrj       break;
368738fd1498Szrj     }
368838fd1498Szrj 
368938fd1498Szrj   *handled_ops_p = false;
369038fd1498Szrj   return NULL_TREE;
369138fd1498Szrj }
369238fd1498Szrj 
369338fd1498Szrj /* Return true if FNDECL is a function that cannot be inlined into
369438fd1498Szrj    another one.  */
369538fd1498Szrj 
369638fd1498Szrj static bool
inline_forbidden_p(tree fndecl)369738fd1498Szrj inline_forbidden_p (tree fndecl)
369838fd1498Szrj {
369938fd1498Szrj   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
370038fd1498Szrj   struct walk_stmt_info wi;
370138fd1498Szrj   basic_block bb;
370238fd1498Szrj   bool forbidden_p = false;
370338fd1498Szrj 
370438fd1498Szrj   /* First check for shared reasons not to copy the code.  */
370538fd1498Szrj   inline_forbidden_reason = copy_forbidden (fun);
370638fd1498Szrj   if (inline_forbidden_reason != NULL)
370738fd1498Szrj     return true;
370838fd1498Szrj 
370938fd1498Szrj   /* Next, walk the statements of the function looking for
371038fd1498Szrj      constraucts we can't handle, or are non-optimal for inlining.  */
371138fd1498Szrj   hash_set<tree> visited_nodes;
371238fd1498Szrj   memset (&wi, 0, sizeof (wi));
371338fd1498Szrj   wi.info = (void *) fndecl;
371438fd1498Szrj   wi.pset = &visited_nodes;
371538fd1498Szrj 
371638fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
371738fd1498Szrj     {
371838fd1498Szrj       gimple *ret;
371938fd1498Szrj       gimple_seq seq = bb_seq (bb);
372038fd1498Szrj       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
372138fd1498Szrj       forbidden_p = (ret != NULL);
372238fd1498Szrj       if (forbidden_p)
372338fd1498Szrj 	break;
372438fd1498Szrj     }
372538fd1498Szrj 
372638fd1498Szrj   return forbidden_p;
372738fd1498Szrj }
372838fd1498Szrj 
372938fd1498Szrj /* Return false if the function FNDECL cannot be inlined on account of its
373038fd1498Szrj    attributes, true otherwise.  */
373138fd1498Szrj static bool
function_attribute_inlinable_p(const_tree fndecl)373238fd1498Szrj function_attribute_inlinable_p (const_tree fndecl)
373338fd1498Szrj {
373438fd1498Szrj   if (targetm.attribute_table)
373538fd1498Szrj     {
373638fd1498Szrj       const_tree a;
373738fd1498Szrj 
373838fd1498Szrj       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
373938fd1498Szrj 	{
374038fd1498Szrj 	  const_tree name = TREE_PURPOSE (a);
374138fd1498Szrj 	  int i;
374238fd1498Szrj 
374338fd1498Szrj 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
374438fd1498Szrj 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
374538fd1498Szrj 	      return targetm.function_attribute_inlinable_p (fndecl);
374638fd1498Szrj 	}
374738fd1498Szrj     }
374838fd1498Szrj 
374938fd1498Szrj   return true;
375038fd1498Szrj }
375138fd1498Szrj 
375238fd1498Szrj /* Returns nonzero if FN is a function that does not have any
375338fd1498Szrj    fundamental inline blocking properties.  */
375438fd1498Szrj 
375538fd1498Szrj bool
tree_inlinable_function_p(tree fn)375638fd1498Szrj tree_inlinable_function_p (tree fn)
375738fd1498Szrj {
375838fd1498Szrj   bool inlinable = true;
375938fd1498Szrj   bool do_warning;
376038fd1498Szrj   tree always_inline;
376138fd1498Szrj 
376238fd1498Szrj   /* If we've already decided this function shouldn't be inlined,
376338fd1498Szrj      there's no need to check again.  */
376438fd1498Szrj   if (DECL_UNINLINABLE (fn))
376538fd1498Szrj     return false;
376638fd1498Szrj 
376738fd1498Szrj   /* We only warn for functions declared `inline' by the user.  */
376838fd1498Szrj   do_warning = (warn_inline
376938fd1498Szrj 		&& DECL_DECLARED_INLINE_P (fn)
377038fd1498Szrj 		&& !DECL_NO_INLINE_WARNING_P (fn)
377138fd1498Szrj 		&& !DECL_IN_SYSTEM_HEADER (fn));
377238fd1498Szrj 
377338fd1498Szrj   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
377438fd1498Szrj 
377538fd1498Szrj   if (flag_no_inline
377638fd1498Szrj       && always_inline == NULL)
377738fd1498Szrj     {
377838fd1498Szrj       if (do_warning)
377938fd1498Szrj         warning (OPT_Winline, "function %q+F can never be inlined because it "
378038fd1498Szrj                  "is suppressed using -fno-inline", fn);
378138fd1498Szrj       inlinable = false;
378238fd1498Szrj     }
378338fd1498Szrj 
378438fd1498Szrj   else if (!function_attribute_inlinable_p (fn))
378538fd1498Szrj     {
378638fd1498Szrj       if (do_warning)
378738fd1498Szrj         warning (OPT_Winline, "function %q+F can never be inlined because it "
378838fd1498Szrj                  "uses attributes conflicting with inlining", fn);
378938fd1498Szrj       inlinable = false;
379038fd1498Szrj     }
379138fd1498Szrj 
379238fd1498Szrj   else if (inline_forbidden_p (fn))
379338fd1498Szrj     {
379438fd1498Szrj       /* See if we should warn about uninlinable functions.  Previously,
379538fd1498Szrj 	 some of these warnings would be issued while trying to expand
379638fd1498Szrj 	 the function inline, but that would cause multiple warnings
379738fd1498Szrj 	 about functions that would for example call alloca.  But since
379838fd1498Szrj 	 this a property of the function, just one warning is enough.
379938fd1498Szrj 	 As a bonus we can now give more details about the reason why a
380038fd1498Szrj 	 function is not inlinable.  */
380138fd1498Szrj       if (always_inline)
380238fd1498Szrj 	error (inline_forbidden_reason, fn);
380338fd1498Szrj       else if (do_warning)
380438fd1498Szrj 	warning (OPT_Winline, inline_forbidden_reason, fn);
380538fd1498Szrj 
380638fd1498Szrj       inlinable = false;
380738fd1498Szrj     }
380838fd1498Szrj 
380938fd1498Szrj   /* Squirrel away the result so that we don't have to check again.  */
381038fd1498Szrj   DECL_UNINLINABLE (fn) = !inlinable;
381138fd1498Szrj 
381238fd1498Szrj   return inlinable;
381338fd1498Szrj }
381438fd1498Szrj 
381538fd1498Szrj /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
381638fd1498Szrj    word size and take possible memcpy call into account and return
381738fd1498Szrj    cost based on whether optimizing for size or speed according to SPEED_P.  */
381838fd1498Szrj 
381938fd1498Szrj int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))382038fd1498Szrj estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
382138fd1498Szrj {
382238fd1498Szrj   HOST_WIDE_INT size;
382338fd1498Szrj 
382438fd1498Szrj   gcc_assert (!VOID_TYPE_P (type));
382538fd1498Szrj 
382638fd1498Szrj   if (TREE_CODE (type) == VECTOR_TYPE)
382738fd1498Szrj     {
382838fd1498Szrj       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
382938fd1498Szrj       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
383038fd1498Szrj       int orig_mode_size
383138fd1498Szrj 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
383238fd1498Szrj       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
383338fd1498Szrj       return ((orig_mode_size + simd_mode_size - 1)
383438fd1498Szrj 	      / simd_mode_size);
383538fd1498Szrj     }
383638fd1498Szrj 
383738fd1498Szrj   size = int_size_in_bytes (type);
383838fd1498Szrj 
383938fd1498Szrj   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
384038fd1498Szrj     /* Cost of a memcpy call, 3 arguments and the call.  */
384138fd1498Szrj     return 4;
384238fd1498Szrj   else
384338fd1498Szrj     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
384438fd1498Szrj }
384538fd1498Szrj 
384638fd1498Szrj /* Returns cost of operation CODE, according to WEIGHTS  */
384738fd1498Szrj 
384838fd1498Szrj static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)384938fd1498Szrj estimate_operator_cost (enum tree_code code, eni_weights *weights,
385038fd1498Szrj 			tree op1 ATTRIBUTE_UNUSED, tree op2)
385138fd1498Szrj {
385238fd1498Szrj   switch (code)
385338fd1498Szrj     {
385438fd1498Szrj     /* These are "free" conversions, or their presumed cost
385538fd1498Szrj        is folded into other operations.  */
385638fd1498Szrj     case RANGE_EXPR:
385738fd1498Szrj     CASE_CONVERT:
385838fd1498Szrj     case COMPLEX_EXPR:
385938fd1498Szrj     case PAREN_EXPR:
386038fd1498Szrj     case VIEW_CONVERT_EXPR:
386138fd1498Szrj       return 0;
386238fd1498Szrj 
386338fd1498Szrj     /* Assign cost of 1 to usual operations.
386438fd1498Szrj        ??? We may consider mapping RTL costs to this.  */
386538fd1498Szrj     case COND_EXPR:
386638fd1498Szrj     case VEC_COND_EXPR:
386738fd1498Szrj     case VEC_PERM_EXPR:
386838fd1498Szrj 
386938fd1498Szrj     case PLUS_EXPR:
387038fd1498Szrj     case POINTER_PLUS_EXPR:
387138fd1498Szrj     case POINTER_DIFF_EXPR:
387238fd1498Szrj     case MINUS_EXPR:
387338fd1498Szrj     case MULT_EXPR:
387438fd1498Szrj     case MULT_HIGHPART_EXPR:
387538fd1498Szrj     case FMA_EXPR:
387638fd1498Szrj 
387738fd1498Szrj     case ADDR_SPACE_CONVERT_EXPR:
387838fd1498Szrj     case FIXED_CONVERT_EXPR:
387938fd1498Szrj     case FIX_TRUNC_EXPR:
388038fd1498Szrj 
388138fd1498Szrj     case NEGATE_EXPR:
388238fd1498Szrj     case FLOAT_EXPR:
388338fd1498Szrj     case MIN_EXPR:
388438fd1498Szrj     case MAX_EXPR:
388538fd1498Szrj     case ABS_EXPR:
388638fd1498Szrj 
388738fd1498Szrj     case LSHIFT_EXPR:
388838fd1498Szrj     case RSHIFT_EXPR:
388938fd1498Szrj     case LROTATE_EXPR:
389038fd1498Szrj     case RROTATE_EXPR:
389138fd1498Szrj 
389238fd1498Szrj     case BIT_IOR_EXPR:
389338fd1498Szrj     case BIT_XOR_EXPR:
389438fd1498Szrj     case BIT_AND_EXPR:
389538fd1498Szrj     case BIT_NOT_EXPR:
389638fd1498Szrj 
389738fd1498Szrj     case TRUTH_ANDIF_EXPR:
389838fd1498Szrj     case TRUTH_ORIF_EXPR:
389938fd1498Szrj     case TRUTH_AND_EXPR:
390038fd1498Szrj     case TRUTH_OR_EXPR:
390138fd1498Szrj     case TRUTH_XOR_EXPR:
390238fd1498Szrj     case TRUTH_NOT_EXPR:
390338fd1498Szrj 
390438fd1498Szrj     case LT_EXPR:
390538fd1498Szrj     case LE_EXPR:
390638fd1498Szrj     case GT_EXPR:
390738fd1498Szrj     case GE_EXPR:
390838fd1498Szrj     case EQ_EXPR:
390938fd1498Szrj     case NE_EXPR:
391038fd1498Szrj     case ORDERED_EXPR:
391138fd1498Szrj     case UNORDERED_EXPR:
391238fd1498Szrj 
391338fd1498Szrj     case UNLT_EXPR:
391438fd1498Szrj     case UNLE_EXPR:
391538fd1498Szrj     case UNGT_EXPR:
391638fd1498Szrj     case UNGE_EXPR:
391738fd1498Szrj     case UNEQ_EXPR:
391838fd1498Szrj     case LTGT_EXPR:
391938fd1498Szrj 
392038fd1498Szrj     case CONJ_EXPR:
392138fd1498Szrj 
392238fd1498Szrj     case PREDECREMENT_EXPR:
392338fd1498Szrj     case PREINCREMENT_EXPR:
392438fd1498Szrj     case POSTDECREMENT_EXPR:
392538fd1498Szrj     case POSTINCREMENT_EXPR:
392638fd1498Szrj 
392738fd1498Szrj     case REALIGN_LOAD_EXPR:
392838fd1498Szrj 
392938fd1498Szrj     case WIDEN_SUM_EXPR:
393038fd1498Szrj     case WIDEN_MULT_EXPR:
393138fd1498Szrj     case DOT_PROD_EXPR:
393238fd1498Szrj     case SAD_EXPR:
393338fd1498Szrj     case WIDEN_MULT_PLUS_EXPR:
393438fd1498Szrj     case WIDEN_MULT_MINUS_EXPR:
393538fd1498Szrj     case WIDEN_LSHIFT_EXPR:
393638fd1498Szrj 
393738fd1498Szrj     case VEC_WIDEN_MULT_HI_EXPR:
393838fd1498Szrj     case VEC_WIDEN_MULT_LO_EXPR:
393938fd1498Szrj     case VEC_WIDEN_MULT_EVEN_EXPR:
394038fd1498Szrj     case VEC_WIDEN_MULT_ODD_EXPR:
394138fd1498Szrj     case VEC_UNPACK_HI_EXPR:
394238fd1498Szrj     case VEC_UNPACK_LO_EXPR:
394338fd1498Szrj     case VEC_UNPACK_FLOAT_HI_EXPR:
394438fd1498Szrj     case VEC_UNPACK_FLOAT_LO_EXPR:
394538fd1498Szrj     case VEC_PACK_TRUNC_EXPR:
394638fd1498Szrj     case VEC_PACK_SAT_EXPR:
394738fd1498Szrj     case VEC_PACK_FIX_TRUNC_EXPR:
394838fd1498Szrj     case VEC_WIDEN_LSHIFT_HI_EXPR:
394938fd1498Szrj     case VEC_WIDEN_LSHIFT_LO_EXPR:
395038fd1498Szrj     case VEC_DUPLICATE_EXPR:
395138fd1498Szrj     case VEC_SERIES_EXPR:
395238fd1498Szrj 
395338fd1498Szrj       return 1;
395438fd1498Szrj 
395538fd1498Szrj     /* Few special cases of expensive operations.  This is useful
395638fd1498Szrj        to avoid inlining on functions having too many of these.  */
395738fd1498Szrj     case TRUNC_DIV_EXPR:
395838fd1498Szrj     case CEIL_DIV_EXPR:
395938fd1498Szrj     case FLOOR_DIV_EXPR:
396038fd1498Szrj     case ROUND_DIV_EXPR:
396138fd1498Szrj     case EXACT_DIV_EXPR:
396238fd1498Szrj     case TRUNC_MOD_EXPR:
396338fd1498Szrj     case CEIL_MOD_EXPR:
396438fd1498Szrj     case FLOOR_MOD_EXPR:
396538fd1498Szrj     case ROUND_MOD_EXPR:
396638fd1498Szrj     case RDIV_EXPR:
396738fd1498Szrj       if (TREE_CODE (op2) != INTEGER_CST)
396838fd1498Szrj         return weights->div_mod_cost;
396938fd1498Szrj       return 1;
397038fd1498Szrj 
397138fd1498Szrj     /* Bit-field insertion needs several shift and mask operations.  */
397238fd1498Szrj     case BIT_INSERT_EXPR:
397338fd1498Szrj       return 3;
397438fd1498Szrj 
397538fd1498Szrj     default:
397638fd1498Szrj       /* We expect a copy assignment with no operator.  */
397738fd1498Szrj       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
397838fd1498Szrj       return 0;
397938fd1498Szrj     }
398038fd1498Szrj }
398138fd1498Szrj 
398238fd1498Szrj 
398338fd1498Szrj /* Estimate number of instructions that will be created by expanding
398438fd1498Szrj    the statements in the statement sequence STMTS.
398538fd1498Szrj    WEIGHTS contains weights attributed to various constructs.  */
398638fd1498Szrj 
398738fd1498Szrj int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)398838fd1498Szrj estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
398938fd1498Szrj {
399038fd1498Szrj   int cost;
399138fd1498Szrj   gimple_stmt_iterator gsi;
399238fd1498Szrj 
399338fd1498Szrj   cost = 0;
399438fd1498Szrj   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
399538fd1498Szrj     cost += estimate_num_insns (gsi_stmt (gsi), weights);
399638fd1498Szrj 
399738fd1498Szrj   return cost;
399838fd1498Szrj }
399938fd1498Szrj 
400038fd1498Szrj 
400138fd1498Szrj /* Estimate number of instructions that will be created by expanding STMT.
400238fd1498Szrj    WEIGHTS contains weights attributed to various constructs.  */
400338fd1498Szrj 
400438fd1498Szrj int
estimate_num_insns(gimple * stmt,eni_weights * weights)400538fd1498Szrj estimate_num_insns (gimple *stmt, eni_weights *weights)
400638fd1498Szrj {
400738fd1498Szrj   unsigned cost, i;
400838fd1498Szrj   enum gimple_code code = gimple_code (stmt);
400938fd1498Szrj   tree lhs;
401038fd1498Szrj   tree rhs;
401138fd1498Szrj 
401238fd1498Szrj   switch (code)
401338fd1498Szrj     {
401438fd1498Szrj     case GIMPLE_ASSIGN:
401538fd1498Szrj       /* Try to estimate the cost of assignments.  We have three cases to
401638fd1498Szrj 	 deal with:
401738fd1498Szrj 	 1) Simple assignments to registers;
401838fd1498Szrj 	 2) Stores to things that must live in memory.  This includes
401938fd1498Szrj 	    "normal" stores to scalars, but also assignments of large
402038fd1498Szrj 	    structures, or constructors of big arrays;
402138fd1498Szrj 
402238fd1498Szrj 	 Let us look at the first two cases, assuming we have "a = b + C":
402338fd1498Szrj 	 <GIMPLE_ASSIGN <var_decl "a">
402438fd1498Szrj 	        <plus_expr <var_decl "b"> <constant C>>
402538fd1498Szrj 	 If "a" is a GIMPLE register, the assignment to it is free on almost
402638fd1498Szrj 	 any target, because "a" usually ends up in a real register.  Hence
402738fd1498Szrj 	 the only cost of this expression comes from the PLUS_EXPR, and we
402838fd1498Szrj 	 can ignore the GIMPLE_ASSIGN.
402938fd1498Szrj 	 If "a" is not a GIMPLE register, the assignment to "a" will most
403038fd1498Szrj 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
403138fd1498Szrj 	 of moving something into "a", which we compute using the function
403238fd1498Szrj 	 estimate_move_cost.  */
403338fd1498Szrj       if (gimple_clobber_p (stmt))
403438fd1498Szrj 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
403538fd1498Szrj 
403638fd1498Szrj       lhs = gimple_assign_lhs (stmt);
403738fd1498Szrj       rhs = gimple_assign_rhs1 (stmt);
403838fd1498Szrj 
403938fd1498Szrj       cost = 0;
404038fd1498Szrj 
404138fd1498Szrj       /* Account for the cost of moving to / from memory.  */
404238fd1498Szrj       if (gimple_store_p (stmt))
404338fd1498Szrj 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
404438fd1498Szrj       if (gimple_assign_load_p (stmt))
404538fd1498Szrj 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
404638fd1498Szrj 
404738fd1498Szrj       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
404838fd1498Szrj       				      gimple_assign_rhs1 (stmt),
404938fd1498Szrj 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
405038fd1498Szrj 				      == GIMPLE_BINARY_RHS
405138fd1498Szrj 				      ? gimple_assign_rhs2 (stmt) : NULL);
405238fd1498Szrj       break;
405338fd1498Szrj 
405438fd1498Szrj     case GIMPLE_COND:
405538fd1498Szrj       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
405638fd1498Szrj       				         gimple_op (stmt, 0),
405738fd1498Szrj 				         gimple_op (stmt, 1));
405838fd1498Szrj       break;
405938fd1498Szrj 
406038fd1498Szrj     case GIMPLE_SWITCH:
406138fd1498Szrj       {
406238fd1498Szrj 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
406338fd1498Szrj 	/* Take into account cost of the switch + guess 2 conditional jumps for
406438fd1498Szrj 	   each case label.
406538fd1498Szrj 
406638fd1498Szrj 	   TODO: once the switch expansion logic is sufficiently separated, we can
406738fd1498Szrj 	   do better job on estimating cost of the switch.  */
406838fd1498Szrj 	if (weights->time_based)
406938fd1498Szrj 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
407038fd1498Szrj 	else
407138fd1498Szrj 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
407238fd1498Szrj       }
407338fd1498Szrj       break;
407438fd1498Szrj 
407538fd1498Szrj     case GIMPLE_CALL:
407638fd1498Szrj       {
407738fd1498Szrj 	tree decl;
407838fd1498Szrj 
407938fd1498Szrj 	if (gimple_call_internal_p (stmt))
408038fd1498Szrj 	  return 0;
408138fd1498Szrj 	else if ((decl = gimple_call_fndecl (stmt))
408238fd1498Szrj 		 && DECL_BUILT_IN (decl))
408338fd1498Szrj 	  {
408438fd1498Szrj 	    /* Do not special case builtins where we see the body.
408538fd1498Szrj 	       This just confuse inliner.  */
408638fd1498Szrj 	    struct cgraph_node *node;
408738fd1498Szrj 	    if (!(node = cgraph_node::get (decl))
408838fd1498Szrj 		|| node->definition)
408938fd1498Szrj 	      ;
409038fd1498Szrj 	    /* For buitins that are likely expanded to nothing or
409138fd1498Szrj 	       inlined do not account operand costs.  */
409238fd1498Szrj 	    else if (is_simple_builtin (decl))
409338fd1498Szrj 	      return 0;
409438fd1498Szrj 	    else if (is_inexpensive_builtin (decl))
409538fd1498Szrj 	      return weights->target_builtin_call_cost;
409638fd1498Szrj 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
409738fd1498Szrj 	      {
409838fd1498Szrj 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
409938fd1498Szrj 		   specialize the cheap expansion we do here.
410038fd1498Szrj 		   ???  This asks for a more general solution.  */
410138fd1498Szrj 		switch (DECL_FUNCTION_CODE (decl))
410238fd1498Szrj 		  {
410338fd1498Szrj 		    case BUILT_IN_POW:
410438fd1498Szrj 		    case BUILT_IN_POWF:
410538fd1498Szrj 		    case BUILT_IN_POWL:
410638fd1498Szrj 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
410738fd1498Szrj 			  && (real_equal
410838fd1498Szrj 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
410938fd1498Szrj 			       &dconst2)))
411038fd1498Szrj 			return estimate_operator_cost
411138fd1498Szrj 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
411238fd1498Szrj 			     gimple_call_arg (stmt, 0));
411338fd1498Szrj 		      break;
411438fd1498Szrj 
411538fd1498Szrj 		    default:
411638fd1498Szrj 		      break;
411738fd1498Szrj 		  }
411838fd1498Szrj 	      }
411938fd1498Szrj 	  }
412038fd1498Szrj 
412138fd1498Szrj 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
412238fd1498Szrj 	if (gimple_call_lhs (stmt))
412338fd1498Szrj 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
412438fd1498Szrj 				      weights->time_based);
412538fd1498Szrj 	for (i = 0; i < gimple_call_num_args (stmt); i++)
412638fd1498Szrj 	  {
412738fd1498Szrj 	    tree arg = gimple_call_arg (stmt, i);
412838fd1498Szrj 	    cost += estimate_move_cost (TREE_TYPE (arg),
412938fd1498Szrj 					weights->time_based);
413038fd1498Szrj 	  }
413138fd1498Szrj 	break;
413238fd1498Szrj       }
413338fd1498Szrj 
413438fd1498Szrj     case GIMPLE_RETURN:
413538fd1498Szrj       return weights->return_cost;
413638fd1498Szrj 
413738fd1498Szrj     case GIMPLE_GOTO:
413838fd1498Szrj     case GIMPLE_LABEL:
413938fd1498Szrj     case GIMPLE_NOP:
414038fd1498Szrj     case GIMPLE_PHI:
414138fd1498Szrj     case GIMPLE_PREDICT:
414238fd1498Szrj     case GIMPLE_DEBUG:
414338fd1498Szrj       return 0;
414438fd1498Szrj 
414538fd1498Szrj     case GIMPLE_ASM:
414638fd1498Szrj       {
414738fd1498Szrj 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
414838fd1498Szrj 	/* 1000 means infinity. This avoids overflows later
414938fd1498Szrj 	   with very long asm statements.  */
415038fd1498Szrj 	if (count > 1000)
415138fd1498Szrj 	  count = 1000;
415258e805e6Szrj 	/* If this asm is asm inline, count anything as minimum size.  */
415358e805e6Szrj 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
415458e805e6Szrj 	  count = MIN (1, count);
415538fd1498Szrj 	return MAX (1, count);
415638fd1498Szrj       }
415738fd1498Szrj 
415838fd1498Szrj     case GIMPLE_RESX:
415938fd1498Szrj       /* This is either going to be an external function call with one
416038fd1498Szrj 	 argument, or two register copy statements plus a goto.  */
416138fd1498Szrj       return 2;
416238fd1498Szrj 
416338fd1498Szrj     case GIMPLE_EH_DISPATCH:
416438fd1498Szrj       /* ??? This is going to turn into a switch statement.  Ideally
416538fd1498Szrj 	 we'd have a look at the eh region and estimate the number of
416638fd1498Szrj 	 edges involved.  */
416738fd1498Szrj       return 10;
416838fd1498Szrj 
416938fd1498Szrj     case GIMPLE_BIND:
417038fd1498Szrj       return estimate_num_insns_seq (
417138fd1498Szrj 	       gimple_bind_body (as_a <gbind *> (stmt)),
417238fd1498Szrj 	       weights);
417338fd1498Szrj 
417438fd1498Szrj     case GIMPLE_EH_FILTER:
417538fd1498Szrj       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
417638fd1498Szrj 
417738fd1498Szrj     case GIMPLE_CATCH:
417838fd1498Szrj       return estimate_num_insns_seq (gimple_catch_handler (
417938fd1498Szrj 				       as_a <gcatch *> (stmt)),
418038fd1498Szrj 				     weights);
418138fd1498Szrj 
418238fd1498Szrj     case GIMPLE_TRY:
418338fd1498Szrj       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
418438fd1498Szrj               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
418538fd1498Szrj 
418638fd1498Szrj     /* OMP directives are generally very expensive.  */
418738fd1498Szrj 
418838fd1498Szrj     case GIMPLE_OMP_RETURN:
418938fd1498Szrj     case GIMPLE_OMP_SECTIONS_SWITCH:
419038fd1498Szrj     case GIMPLE_OMP_ATOMIC_STORE:
419138fd1498Szrj     case GIMPLE_OMP_CONTINUE:
419238fd1498Szrj       /* ...except these, which are cheap.  */
419338fd1498Szrj       return 0;
419438fd1498Szrj 
419538fd1498Szrj     case GIMPLE_OMP_ATOMIC_LOAD:
419638fd1498Szrj       return weights->omp_cost;
419738fd1498Szrj 
419838fd1498Szrj     case GIMPLE_OMP_FOR:
419938fd1498Szrj       return (weights->omp_cost
420038fd1498Szrj               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
420138fd1498Szrj               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
420238fd1498Szrj 
420338fd1498Szrj     case GIMPLE_OMP_PARALLEL:
420438fd1498Szrj     case GIMPLE_OMP_TASK:
420538fd1498Szrj     case GIMPLE_OMP_CRITICAL:
420638fd1498Szrj     case GIMPLE_OMP_MASTER:
420738fd1498Szrj     case GIMPLE_OMP_TASKGROUP:
420838fd1498Szrj     case GIMPLE_OMP_ORDERED:
420938fd1498Szrj     case GIMPLE_OMP_SECTION:
421038fd1498Szrj     case GIMPLE_OMP_SECTIONS:
421138fd1498Szrj     case GIMPLE_OMP_SINGLE:
421238fd1498Szrj     case GIMPLE_OMP_TARGET:
421338fd1498Szrj     case GIMPLE_OMP_TEAMS:
421438fd1498Szrj       return (weights->omp_cost
421538fd1498Szrj               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
421638fd1498Szrj 
421738fd1498Szrj     case GIMPLE_TRANSACTION:
421838fd1498Szrj       return (weights->tm_cost
421938fd1498Szrj 	      + estimate_num_insns_seq (gimple_transaction_body (
422038fd1498Szrj 					  as_a <gtransaction *> (stmt)),
422138fd1498Szrj 					weights));
422238fd1498Szrj 
422338fd1498Szrj     default:
422438fd1498Szrj       gcc_unreachable ();
422538fd1498Szrj     }
422638fd1498Szrj 
422738fd1498Szrj   return cost;
422838fd1498Szrj }
422938fd1498Szrj 
423038fd1498Szrj /* Estimate number of instructions that will be created by expanding
423138fd1498Szrj    function FNDECL.  WEIGHTS contains weights attributed to various
423238fd1498Szrj    constructs.  */
423338fd1498Szrj 
423438fd1498Szrj int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)423538fd1498Szrj estimate_num_insns_fn (tree fndecl, eni_weights *weights)
423638fd1498Szrj {
423738fd1498Szrj   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
423838fd1498Szrj   gimple_stmt_iterator bsi;
423938fd1498Szrj   basic_block bb;
424038fd1498Szrj   int n = 0;
424138fd1498Szrj 
424238fd1498Szrj   gcc_assert (my_function && my_function->cfg);
424338fd1498Szrj   FOR_EACH_BB_FN (bb, my_function)
424438fd1498Szrj     {
424538fd1498Szrj       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
424638fd1498Szrj 	n += estimate_num_insns (gsi_stmt (bsi), weights);
424738fd1498Szrj     }
424838fd1498Szrj 
424938fd1498Szrj   return n;
425038fd1498Szrj }
425138fd1498Szrj 
425238fd1498Szrj 
425338fd1498Szrj /* Initializes weights used by estimate_num_insns.  */
425438fd1498Szrj 
425538fd1498Szrj void
init_inline_once(void)425638fd1498Szrj init_inline_once (void)
425738fd1498Szrj {
425838fd1498Szrj   eni_size_weights.call_cost = 1;
425938fd1498Szrj   eni_size_weights.indirect_call_cost = 3;
426038fd1498Szrj   eni_size_weights.target_builtin_call_cost = 1;
426138fd1498Szrj   eni_size_weights.div_mod_cost = 1;
426238fd1498Szrj   eni_size_weights.omp_cost = 40;
426338fd1498Szrj   eni_size_weights.tm_cost = 10;
426438fd1498Szrj   eni_size_weights.time_based = false;
426538fd1498Szrj   eni_size_weights.return_cost = 1;
426638fd1498Szrj 
426738fd1498Szrj   /* Estimating time for call is difficult, since we have no idea what the
426838fd1498Szrj      called function does.  In the current uses of eni_time_weights,
426938fd1498Szrj      underestimating the cost does less harm than overestimating it, so
427038fd1498Szrj      we choose a rather small value here.  */
427138fd1498Szrj   eni_time_weights.call_cost = 10;
427238fd1498Szrj   eni_time_weights.indirect_call_cost = 15;
427338fd1498Szrj   eni_time_weights.target_builtin_call_cost = 1;
427438fd1498Szrj   eni_time_weights.div_mod_cost = 10;
427538fd1498Szrj   eni_time_weights.omp_cost = 40;
427638fd1498Szrj   eni_time_weights.tm_cost = 40;
427738fd1498Szrj   eni_time_weights.time_based = true;
427838fd1498Szrj   eni_time_weights.return_cost = 2;
427938fd1498Szrj }
428038fd1498Szrj 
428138fd1498Szrj 
428238fd1498Szrj /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
428338fd1498Szrj 
428438fd1498Szrj static void
prepend_lexical_block(tree current_block,tree new_block)428538fd1498Szrj prepend_lexical_block (tree current_block, tree new_block)
428638fd1498Szrj {
428738fd1498Szrj   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
428838fd1498Szrj   BLOCK_SUBBLOCKS (current_block) = new_block;
428938fd1498Szrj   BLOCK_SUPERCONTEXT (new_block) = current_block;
429038fd1498Szrj }
429138fd1498Szrj 
429238fd1498Szrj /* Add local variables from CALLEE to CALLER.  */
429338fd1498Szrj 
429438fd1498Szrj static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)429538fd1498Szrj add_local_variables (struct function *callee, struct function *caller,
429638fd1498Szrj 		     copy_body_data *id)
429738fd1498Szrj {
429838fd1498Szrj   tree var;
429938fd1498Szrj   unsigned ix;
430038fd1498Szrj 
430138fd1498Szrj   FOR_EACH_LOCAL_DECL (callee, ix, var)
430238fd1498Szrj     if (!can_be_nonlocal (var, id))
430338fd1498Szrj       {
430438fd1498Szrj         tree new_var = remap_decl (var, id);
430538fd1498Szrj 
430638fd1498Szrj         /* Remap debug-expressions.  */
430738fd1498Szrj 	if (VAR_P (new_var)
430838fd1498Szrj 	    && DECL_HAS_DEBUG_EXPR_P (var)
430938fd1498Szrj 	    && new_var != var)
431038fd1498Szrj 	  {
431138fd1498Szrj 	    tree tem = DECL_DEBUG_EXPR (var);
431238fd1498Szrj 	    bool old_regimplify = id->regimplify;
431338fd1498Szrj 	    id->remapping_type_depth++;
431438fd1498Szrj 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
431538fd1498Szrj 	    id->remapping_type_depth--;
431638fd1498Szrj 	    id->regimplify = old_regimplify;
431738fd1498Szrj 	    SET_DECL_DEBUG_EXPR (new_var, tem);
431838fd1498Szrj 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
431938fd1498Szrj 	  }
432038fd1498Szrj 	add_local_decl (caller, new_var);
432138fd1498Szrj       }
432238fd1498Szrj }
432338fd1498Szrj 
432438fd1498Szrj /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
432538fd1498Szrj    have brought in or introduced any debug stmts for SRCVAR.  */
432638fd1498Szrj 
432738fd1498Szrj static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)432838fd1498Szrj reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
432938fd1498Szrj {
433038fd1498Szrj   tree *remappedvarp = id->decl_map->get (srcvar);
433138fd1498Szrj 
433238fd1498Szrj   if (!remappedvarp)
433338fd1498Szrj     return;
433438fd1498Szrj 
433538fd1498Szrj   if (!VAR_P (*remappedvarp))
433638fd1498Szrj     return;
433738fd1498Szrj 
433838fd1498Szrj   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
433938fd1498Szrj     return;
434038fd1498Szrj 
434138fd1498Szrj   tree tvar = target_for_debug_bind (*remappedvarp);
434238fd1498Szrj   if (!tvar)
434338fd1498Szrj     return;
434438fd1498Szrj 
434538fd1498Szrj   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
434638fd1498Szrj 					  id->call_stmt);
434738fd1498Szrj   gimple_seq_add_stmt (bindings, stmt);
434838fd1498Szrj }
434938fd1498Szrj 
435038fd1498Szrj /* For each inlined variable for which we may have debug bind stmts,
435138fd1498Szrj    add before GSI a final debug stmt resetting it, marking the end of
435238fd1498Szrj    its life, so that var-tracking knows it doesn't have to compute
435338fd1498Szrj    further locations for it.  */
435438fd1498Szrj 
435538fd1498Szrj static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)435638fd1498Szrj reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
435738fd1498Szrj {
435838fd1498Szrj   tree var;
435938fd1498Szrj   unsigned ix;
436038fd1498Szrj   gimple_seq bindings = NULL;
436138fd1498Szrj 
436238fd1498Szrj   if (!gimple_in_ssa_p (id->src_cfun))
436338fd1498Szrj     return;
436438fd1498Szrj 
436538fd1498Szrj   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
436638fd1498Szrj     return;
436738fd1498Szrj 
436838fd1498Szrj   for (var = DECL_ARGUMENTS (id->src_fn);
436938fd1498Szrj        var; var = DECL_CHAIN (var))
437038fd1498Szrj     reset_debug_binding (id, var, &bindings);
437138fd1498Szrj 
437238fd1498Szrj   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
437338fd1498Szrj     reset_debug_binding (id, var, &bindings);
437438fd1498Szrj 
437538fd1498Szrj   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
437638fd1498Szrj }
437738fd1498Szrj 
437838fd1498Szrj /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
437938fd1498Szrj 
438038fd1498Szrj static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id)438138fd1498Szrj expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
438238fd1498Szrj {
438338fd1498Szrj   tree use_retvar;
438438fd1498Szrj   tree fn;
438538fd1498Szrj   hash_map<tree, tree> *dst;
438638fd1498Szrj   hash_map<tree, tree> *st = NULL;
438738fd1498Szrj   tree return_slot;
438838fd1498Szrj   tree modify_dest;
438938fd1498Szrj   tree return_bounds = NULL;
439038fd1498Szrj   struct cgraph_edge *cg_edge;
439138fd1498Szrj   cgraph_inline_failed_t reason;
439238fd1498Szrj   basic_block return_block;
439338fd1498Szrj   edge e;
439438fd1498Szrj   gimple_stmt_iterator gsi, stmt_gsi;
439538fd1498Szrj   bool successfully_inlined = false;
439638fd1498Szrj   bool purge_dead_abnormal_edges;
439738fd1498Szrj   gcall *call_stmt;
439838fd1498Szrj   unsigned int i;
439938fd1498Szrj   unsigned int prop_mask, src_properties;
440038fd1498Szrj   struct function *dst_cfun;
440138fd1498Szrj   tree simduid;
440238fd1498Szrj   use_operand_p use;
440338fd1498Szrj   gimple *simtenter_stmt = NULL;
440438fd1498Szrj   vec<tree> *simtvars_save;
440538fd1498Szrj 
440638fd1498Szrj   /* The gimplifier uses input_location in too many places, such as
440738fd1498Szrj      internal_get_tmp_var ().  */
440838fd1498Szrj   location_t saved_location = input_location;
440938fd1498Szrj   input_location = gimple_location (stmt);
441038fd1498Szrj 
441138fd1498Szrj   /* From here on, we're only interested in CALL_EXPRs.  */
441238fd1498Szrj   call_stmt = dyn_cast <gcall *> (stmt);
441338fd1498Szrj   if (!call_stmt)
441438fd1498Szrj     goto egress;
441538fd1498Szrj 
441638fd1498Szrj   cg_edge = id->dst_node->get_edge (stmt);
441738fd1498Szrj   gcc_checking_assert (cg_edge);
441838fd1498Szrj   /* First, see if we can figure out what function is being called.
441938fd1498Szrj      If we cannot, then there is no hope of inlining the function.  */
442038fd1498Szrj   if (cg_edge->indirect_unknown_callee)
442138fd1498Szrj     goto egress;
442238fd1498Szrj   fn = cg_edge->callee->decl;
442338fd1498Szrj   gcc_checking_assert (fn);
442438fd1498Szrj 
442538fd1498Szrj   /* If FN is a declaration of a function in a nested scope that was
442638fd1498Szrj      globally declared inline, we don't set its DECL_INITIAL.
442738fd1498Szrj      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
442838fd1498Szrj      C++ front-end uses it for cdtors to refer to their internal
442938fd1498Szrj      declarations, that are not real functions.  Fortunately those
443038fd1498Szrj      don't have trees to be saved, so we can tell by checking their
443138fd1498Szrj      gimple_body.  */
443238fd1498Szrj   if (!DECL_INITIAL (fn)
443338fd1498Szrj       && DECL_ABSTRACT_ORIGIN (fn)
443438fd1498Szrj       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
443538fd1498Szrj     fn = DECL_ABSTRACT_ORIGIN (fn);
443638fd1498Szrj 
443738fd1498Szrj   /* Don't try to inline functions that are not well-suited to inlining.  */
443838fd1498Szrj   if (cg_edge->inline_failed)
443938fd1498Szrj     {
444038fd1498Szrj       reason = cg_edge->inline_failed;
444138fd1498Szrj       /* If this call was originally indirect, we do not want to emit any
444238fd1498Szrj 	 inlining related warnings or sorry messages because there are no
444338fd1498Szrj 	 guarantees regarding those.  */
444438fd1498Szrj       if (cg_edge->indirect_inlining_edge)
444538fd1498Szrj 	goto egress;
444638fd1498Szrj 
444738fd1498Szrj       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
444838fd1498Szrj           /* For extern inline functions that get redefined we always
444938fd1498Szrj 	     silently ignored always_inline flag. Better behavior would
445038fd1498Szrj 	     be to be able to keep both bodies and use extern inline body
445138fd1498Szrj 	     for inlining, but we can't do that because frontends overwrite
445238fd1498Szrj 	     the body.  */
445338fd1498Szrj 	  && !cg_edge->callee->local.redefined_extern_inline
445438fd1498Szrj 	  /* During early inline pass, report only when optimization is
445538fd1498Szrj 	     not turned on.  */
445638fd1498Szrj 	  && (symtab->global_info_ready
445738fd1498Szrj 	      || !optimize
445838fd1498Szrj 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
445938fd1498Szrj 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
446038fd1498Szrj 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
446138fd1498Szrj 	{
446238fd1498Szrj 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
446338fd1498Szrj 		 cgraph_inline_failed_string (reason));
446438fd1498Szrj 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
446538fd1498Szrj 	    inform (gimple_location (stmt), "called from here");
446638fd1498Szrj 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
446738fd1498Szrj 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
446838fd1498Szrj                    "called from this function");
446938fd1498Szrj 	}
447038fd1498Szrj       else if (warn_inline
447138fd1498Szrj 	       && DECL_DECLARED_INLINE_P (fn)
447238fd1498Szrj 	       && !DECL_NO_INLINE_WARNING_P (fn)
447338fd1498Szrj 	       && !DECL_IN_SYSTEM_HEADER (fn)
447438fd1498Szrj 	       && reason != CIF_UNSPECIFIED
447538fd1498Szrj 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
447638fd1498Szrj 	       /* Do not warn about not inlined recursive calls.  */
447738fd1498Szrj 	       && !cg_edge->recursive_p ()
447838fd1498Szrj 	       /* Avoid warnings during early inline pass. */
447938fd1498Szrj 	       && symtab->global_info_ready)
448038fd1498Szrj 	{
448138fd1498Szrj 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
448238fd1498Szrj 		       fn, _(cgraph_inline_failed_string (reason))))
448338fd1498Szrj 	    {
448438fd1498Szrj 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
448538fd1498Szrj 		inform (gimple_location (stmt), "called from here");
448638fd1498Szrj 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
448738fd1498Szrj 		inform (DECL_SOURCE_LOCATION (cfun->decl),
448838fd1498Szrj                        "called from this function");
448938fd1498Szrj 	    }
449038fd1498Szrj 	}
449138fd1498Szrj       goto egress;
449238fd1498Szrj     }
449338fd1498Szrj   id->src_node = cg_edge->callee;
449438fd1498Szrj 
449538fd1498Szrj   /* If callee is thunk, all we need is to adjust the THIS pointer
449638fd1498Szrj      and redirect to function being thunked.  */
449738fd1498Szrj   if (id->src_node->thunk.thunk_p)
449838fd1498Szrj     {
449938fd1498Szrj       cgraph_edge *edge;
450038fd1498Szrj       tree virtual_offset = NULL;
450138fd1498Szrj       profile_count count = cg_edge->count;
450238fd1498Szrj       tree op;
450338fd1498Szrj       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
450438fd1498Szrj 
450538fd1498Szrj       cg_edge->remove ();
450638fd1498Szrj       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
450738fd1498Szrj 		   		           gimple_uid (stmt),
450838fd1498Szrj 				   	   profile_count::one (),
450938fd1498Szrj 					   profile_count::one (),
451038fd1498Szrj 				           true);
451138fd1498Szrj       edge->count = count;
451238fd1498Szrj       if (id->src_node->thunk.virtual_offset_p)
451338fd1498Szrj         virtual_offset = size_int (id->src_node->thunk.virtual_value);
451438fd1498Szrj       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
451538fd1498Szrj 			      NULL);
451638fd1498Szrj       gsi_insert_before (&iter, gimple_build_assign (op,
451738fd1498Szrj 						    gimple_call_arg (stmt, 0)),
451838fd1498Szrj 			 GSI_NEW_STMT);
451938fd1498Szrj       gcc_assert (id->src_node->thunk.this_adjusting);
452038fd1498Szrj       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
452138fd1498Szrj 			 virtual_offset);
452238fd1498Szrj 
452338fd1498Szrj       gimple_call_set_arg (stmt, 0, op);
452438fd1498Szrj       gimple_call_set_fndecl (stmt, edge->callee->decl);
452538fd1498Szrj       update_stmt (stmt);
452638fd1498Szrj       id->src_node->remove ();
452738fd1498Szrj       expand_call_inline (bb, stmt, id);
452838fd1498Szrj       maybe_remove_unused_call_args (cfun, stmt);
452938fd1498Szrj       return true;
453038fd1498Szrj     }
453138fd1498Szrj   fn = cg_edge->callee->decl;
453238fd1498Szrj   cg_edge->callee->get_untransformed_body ();
453338fd1498Szrj 
453438fd1498Szrj   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
453538fd1498Szrj     cg_edge->callee->verify ();
453638fd1498Szrj 
453738fd1498Szrj   /* We will be inlining this callee.  */
453838fd1498Szrj   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
453938fd1498Szrj   id->assign_stmts.create (0);
454038fd1498Szrj 
454138fd1498Szrj   /* Update the callers EH personality.  */
454238fd1498Szrj   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
454338fd1498Szrj     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
454438fd1498Szrj       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
454538fd1498Szrj 
454638fd1498Szrj   /* Split the block before the GIMPLE_CALL.  */
454738fd1498Szrj   stmt_gsi = gsi_for_stmt (stmt);
454838fd1498Szrj   gsi_prev (&stmt_gsi);
454938fd1498Szrj   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
455038fd1498Szrj   bb = e->src;
455138fd1498Szrj   return_block = e->dest;
455238fd1498Szrj   remove_edge (e);
455338fd1498Szrj 
455438fd1498Szrj   /* If the GIMPLE_CALL was in the last statement of BB, it may have
455538fd1498Szrj      been the source of abnormal edges.  In this case, schedule
455638fd1498Szrj      the removal of dead abnormal edges.  */
455738fd1498Szrj   gsi = gsi_start_bb (return_block);
455838fd1498Szrj   gsi_next (&gsi);
455938fd1498Szrj   purge_dead_abnormal_edges = gsi_end_p (gsi);
456038fd1498Szrj 
456138fd1498Szrj   stmt_gsi = gsi_start_bb (return_block);
456238fd1498Szrj 
456338fd1498Szrj   /* Build a block containing code to initialize the arguments, the
456438fd1498Szrj      actual inline expansion of the body, and a label for the return
456538fd1498Szrj      statements within the function to jump to.  The type of the
456638fd1498Szrj      statement expression is the return type of the function call.
456738fd1498Szrj      ???  If the call does not have an associated block then we will
456838fd1498Szrj      remap all callee blocks to NULL, effectively dropping most of
456938fd1498Szrj      its debug information.  This should only happen for calls to
457038fd1498Szrj      artificial decls inserted by the compiler itself.  We need to
457138fd1498Szrj      either link the inlined blocks into the caller block tree or
457238fd1498Szrj      not refer to them in any way to not break GC for locations.  */
457338fd1498Szrj   if (gimple_block (stmt))
457438fd1498Szrj     {
457558e805e6Szrj       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
457658e805e6Szrj          to make inlined_function_outer_scope_p return true on this BLOCK.  */
457758e805e6Szrj       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
457858e805e6Szrj       if (loc == UNKNOWN_LOCATION)
457958e805e6Szrj 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
458058e805e6Szrj       if (loc == UNKNOWN_LOCATION)
458158e805e6Szrj 	loc = BUILTINS_LOCATION;
458238fd1498Szrj       id->block = make_node (BLOCK);
458338fd1498Szrj       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
458458e805e6Szrj       BLOCK_SOURCE_LOCATION (id->block) = loc;
458538fd1498Szrj       prepend_lexical_block (gimple_block (stmt), id->block);
458638fd1498Szrj     }
458738fd1498Szrj 
458838fd1498Szrj   /* Local declarations will be replaced by their equivalents in this
458938fd1498Szrj      map.  */
459038fd1498Szrj   st = id->decl_map;
459138fd1498Szrj   id->decl_map = new hash_map<tree, tree>;
459238fd1498Szrj   dst = id->debug_map;
459338fd1498Szrj   id->debug_map = NULL;
459438fd1498Szrj 
459538fd1498Szrj   /* Record the function we are about to inline.  */
459638fd1498Szrj   id->src_fn = fn;
459738fd1498Szrj   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
459838fd1498Szrj   id->call_stmt = call_stmt;
459938fd1498Szrj 
460038fd1498Szrj   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
460138fd1498Szrj      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
460238fd1498Szrj   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
460338fd1498Szrj   simtvars_save = id->dst_simt_vars;
460438fd1498Szrj   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
460538fd1498Szrj       && (simduid = bb->loop_father->simduid) != NULL_TREE
460638fd1498Szrj       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
460738fd1498Szrj       && single_imm_use (simduid, &use, &simtenter_stmt)
460838fd1498Szrj       && is_gimple_call (simtenter_stmt)
460938fd1498Szrj       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
461038fd1498Szrj     vec_alloc (id->dst_simt_vars, 0);
461138fd1498Szrj   else
461238fd1498Szrj     id->dst_simt_vars = NULL;
461338fd1498Szrj 
461438fd1498Szrj   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
461538fd1498Szrj     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
461638fd1498Szrj 
461738fd1498Szrj   /* If the src function contains an IFN_VA_ARG, then so will the dst
461838fd1498Szrj      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
461938fd1498Szrj   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
462038fd1498Szrj   src_properties = id->src_cfun->curr_properties & prop_mask;
462138fd1498Szrj   if (src_properties != prop_mask)
462238fd1498Szrj     dst_cfun->curr_properties &= src_properties | ~prop_mask;
462338fd1498Szrj 
462438fd1498Szrj   gcc_assert (!id->src_cfun->after_inlining);
462538fd1498Szrj 
462638fd1498Szrj   id->entry_bb = bb;
462738fd1498Szrj   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
462838fd1498Szrj     {
462938fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
463038fd1498Szrj       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
463138fd1498Szrj       						   NOT_TAKEN),
463238fd1498Szrj 			GSI_NEW_STMT);
463338fd1498Szrj     }
463438fd1498Szrj   initialize_inlined_parameters (id, stmt, fn, bb);
463538fd1498Szrj   if (debug_nonbind_markers_p && debug_inline_points && id->block
463638fd1498Szrj       && inlined_function_outer_scope_p (id->block))
463738fd1498Szrj     {
463838fd1498Szrj       gimple_stmt_iterator si = gsi_last_bb (bb);
463938fd1498Szrj       gsi_insert_after (&si, gimple_build_debug_inline_entry
464038fd1498Szrj 			(id->block, input_location), GSI_NEW_STMT);
464138fd1498Szrj     }
464238fd1498Szrj 
464338fd1498Szrj   if (DECL_INITIAL (fn))
464438fd1498Szrj     {
464538fd1498Szrj       if (gimple_block (stmt))
464638fd1498Szrj 	{
464738fd1498Szrj 	  tree *var;
464838fd1498Szrj 
464938fd1498Szrj 	  prepend_lexical_block (id->block,
465038fd1498Szrj 				 remap_blocks (DECL_INITIAL (fn), id));
465138fd1498Szrj 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
465238fd1498Szrj 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
465338fd1498Szrj 				   == NULL_TREE));
465438fd1498Szrj 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
465538fd1498Szrj 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
465638fd1498Szrj 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
465738fd1498Szrj 	     under it.  The parameters can be then evaluated in the debugger,
465838fd1498Szrj 	     but don't show in backtraces.  */
465938fd1498Szrj 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
466038fd1498Szrj 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
466138fd1498Szrj 	      {
466238fd1498Szrj 		tree v = *var;
466338fd1498Szrj 		*var = TREE_CHAIN (v);
466438fd1498Szrj 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
466538fd1498Szrj 		BLOCK_VARS (id->block) = v;
466638fd1498Szrj 	      }
466738fd1498Szrj 	    else
466838fd1498Szrj 	      var = &TREE_CHAIN (*var);
466938fd1498Szrj 	}
467038fd1498Szrj       else
467138fd1498Szrj 	remap_blocks_to_null (DECL_INITIAL (fn), id);
467238fd1498Szrj     }
467338fd1498Szrj 
467438fd1498Szrj   /* Return statements in the function body will be replaced by jumps
467538fd1498Szrj      to the RET_LABEL.  */
467638fd1498Szrj   gcc_assert (DECL_INITIAL (fn));
467738fd1498Szrj   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
467838fd1498Szrj 
467938fd1498Szrj   /* Find the LHS to which the result of this call is assigned.  */
468038fd1498Szrj   return_slot = NULL;
468138fd1498Szrj   if (gimple_call_lhs (stmt))
468238fd1498Szrj     {
468338fd1498Szrj       modify_dest = gimple_call_lhs (stmt);
468438fd1498Szrj 
468538fd1498Szrj       /* Remember where to copy returned bounds.  */
468638fd1498Szrj       if (gimple_call_with_bounds_p (stmt)
468738fd1498Szrj 	  && TREE_CODE (modify_dest) == SSA_NAME)
468838fd1498Szrj 	{
468938fd1498Szrj 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
469038fd1498Szrj 	  if (retbnd)
469138fd1498Szrj 	    {
469238fd1498Szrj 	      return_bounds = gimple_call_lhs (retbnd);
469338fd1498Szrj 	      /* If returned bounds are not used then just
469438fd1498Szrj 		 remove unused call.  */
469538fd1498Szrj 	      if (!return_bounds)
469638fd1498Szrj 		{
469738fd1498Szrj 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
469838fd1498Szrj 		  gsi_remove (&iter, true);
469938fd1498Szrj 		}
470038fd1498Szrj 	    }
470138fd1498Szrj 	}
470238fd1498Szrj 
470338fd1498Szrj       /* The function which we are inlining might not return a value,
470438fd1498Szrj 	 in which case we should issue a warning that the function
470538fd1498Szrj 	 does not return a value.  In that case the optimizers will
470638fd1498Szrj 	 see that the variable to which the value is assigned was not
470738fd1498Szrj 	 initialized.  We do not want to issue a warning about that
470838fd1498Szrj 	 uninitialized variable.  */
470938fd1498Szrj       if (DECL_P (modify_dest))
471038fd1498Szrj 	TREE_NO_WARNING (modify_dest) = 1;
471138fd1498Szrj 
471238fd1498Szrj       if (gimple_call_return_slot_opt_p (call_stmt))
471338fd1498Szrj 	{
471438fd1498Szrj 	  return_slot = modify_dest;
471538fd1498Szrj 	  modify_dest = NULL;
471638fd1498Szrj 	}
471738fd1498Szrj     }
471838fd1498Szrj   else
471938fd1498Szrj     modify_dest = NULL;
472038fd1498Szrj 
472138fd1498Szrj   /* If we are inlining a call to the C++ operator new, we don't want
472238fd1498Szrj      to use type based alias analysis on the return value.  Otherwise
472338fd1498Szrj      we may get confused if the compiler sees that the inlined new
472438fd1498Szrj      function returns a pointer which was just deleted.  See bug
472538fd1498Szrj      33407.  */
472638fd1498Szrj   if (DECL_IS_OPERATOR_NEW (fn))
472738fd1498Szrj     {
472838fd1498Szrj       return_slot = NULL;
472938fd1498Szrj       modify_dest = NULL;
473038fd1498Szrj     }
473138fd1498Szrj 
473238fd1498Szrj   /* Declare the return variable for the function.  */
473338fd1498Szrj   use_retvar = declare_return_variable (id, return_slot, modify_dest,
473438fd1498Szrj 					return_bounds, bb);
473538fd1498Szrj 
473638fd1498Szrj   /* Add local vars in this inlined callee to caller.  */
473738fd1498Szrj   add_local_variables (id->src_cfun, cfun, id);
473838fd1498Szrj 
473938fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
474038fd1498Szrj     {
474138fd1498Szrj       fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
474238fd1498Szrj 	       id->src_node->dump_name (),
474338fd1498Szrj 	       id->dst_node->dump_name (),
474438fd1498Szrj 	       cg_edge->sreal_frequency ().to_double ());
474538fd1498Szrj       id->src_node->dump (dump_file);
474638fd1498Szrj       id->dst_node->dump (dump_file);
474738fd1498Szrj     }
474838fd1498Szrj 
474938fd1498Szrj   /* This is it.  Duplicate the callee body.  Assume callee is
475038fd1498Szrj      pre-gimplified.  Note that we must not alter the caller
475138fd1498Szrj      function in any way before this point, as this CALL_EXPR may be
475238fd1498Szrj      a self-referential call; if we're calling ourselves, we need to
475338fd1498Szrj      duplicate our body before altering anything.  */
475438fd1498Szrj   copy_body (id, bb, return_block, NULL);
475538fd1498Szrj 
475638fd1498Szrj   reset_debug_bindings (id, stmt_gsi);
475738fd1498Szrj 
475838fd1498Szrj   if (flag_stack_reuse != SR_NONE)
475938fd1498Szrj     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
476038fd1498Szrj       if (!TREE_THIS_VOLATILE (p))
476138fd1498Szrj 	{
476238fd1498Szrj 	  tree *varp = id->decl_map->get (p);
476338fd1498Szrj 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
476438fd1498Szrj 	    {
476538fd1498Szrj 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
476638fd1498Szrj 	      gimple *clobber_stmt;
476738fd1498Szrj 	      TREE_THIS_VOLATILE (clobber) = 1;
476838fd1498Szrj 	      clobber_stmt = gimple_build_assign (*varp, clobber);
476938fd1498Szrj 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
477038fd1498Szrj 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
477138fd1498Szrj 	    }
477238fd1498Szrj 	}
477338fd1498Szrj 
477438fd1498Szrj   /* Reset the escaped solution.  */
477538fd1498Szrj   if (cfun->gimple_df)
477638fd1498Szrj     pt_solution_reset (&cfun->gimple_df->escaped);
477738fd1498Szrj 
477838fd1498Szrj   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
477938fd1498Szrj   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
478038fd1498Szrj     {
478138fd1498Szrj       size_t nargs = gimple_call_num_args (simtenter_stmt);
478238fd1498Szrj       vec<tree> *vars = id->dst_simt_vars;
478338fd1498Szrj       auto_vec<tree> newargs (nargs + vars->length ());
478438fd1498Szrj       for (size_t i = 0; i < nargs; i++)
478538fd1498Szrj 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
478638fd1498Szrj       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
478738fd1498Szrj 	{
478838fd1498Szrj 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
478938fd1498Szrj 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
479038fd1498Szrj 	}
479138fd1498Szrj       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
479238fd1498Szrj       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
479338fd1498Szrj       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
479438fd1498Szrj       gsi_replace (&gsi, g, false);
479538fd1498Szrj     }
479638fd1498Szrj   vec_free (id->dst_simt_vars);
479738fd1498Szrj   id->dst_simt_vars = simtvars_save;
479838fd1498Szrj 
479938fd1498Szrj   /* Clean up.  */
480038fd1498Szrj   if (id->debug_map)
480138fd1498Szrj     {
480238fd1498Szrj       delete id->debug_map;
480338fd1498Szrj       id->debug_map = dst;
480438fd1498Szrj     }
480538fd1498Szrj   delete id->decl_map;
480638fd1498Szrj   id->decl_map = st;
480738fd1498Szrj 
480838fd1498Szrj   /* Unlink the calls virtual operands before replacing it.  */
480938fd1498Szrj   unlink_stmt_vdef (stmt);
481038fd1498Szrj   if (gimple_vdef (stmt)
481138fd1498Szrj       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
481238fd1498Szrj     release_ssa_name (gimple_vdef (stmt));
481338fd1498Szrj 
481438fd1498Szrj   /* If the inlined function returns a result that we care about,
481538fd1498Szrj      substitute the GIMPLE_CALL with an assignment of the return
481638fd1498Szrj      variable to the LHS of the call.  That is, if STMT was
481738fd1498Szrj      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
481838fd1498Szrj   if (use_retvar && gimple_call_lhs (stmt))
481938fd1498Szrj     {
482038fd1498Szrj       gimple *old_stmt = stmt;
482138fd1498Szrj       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
482238fd1498Szrj       gsi_replace (&stmt_gsi, stmt, false);
482338fd1498Szrj       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
482438fd1498Szrj       /* Append a clobber for id->retvar if easily possible.  */
482538fd1498Szrj       if (flag_stack_reuse != SR_NONE
482638fd1498Szrj 	  && id->retvar
482738fd1498Szrj 	  && VAR_P (id->retvar)
482838fd1498Szrj 	  && id->retvar != return_slot
482938fd1498Szrj 	  && id->retvar != modify_dest
483038fd1498Szrj 	  && !TREE_THIS_VOLATILE (id->retvar)
483138fd1498Szrj 	  && !is_gimple_reg (id->retvar)
483238fd1498Szrj 	  && !stmt_ends_bb_p (stmt))
483338fd1498Szrj 	{
483438fd1498Szrj 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
483538fd1498Szrj 	  gimple *clobber_stmt;
483638fd1498Szrj 	  TREE_THIS_VOLATILE (clobber) = 1;
483738fd1498Szrj 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
483838fd1498Szrj 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
483938fd1498Szrj 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
484038fd1498Szrj 	}
484138fd1498Szrj 
484238fd1498Szrj       /* Copy bounds if we copy structure with bounds.  */
484338fd1498Szrj       if (chkp_function_instrumented_p (id->dst_fn)
484438fd1498Szrj 	  && !BOUNDED_P (use_retvar)
484538fd1498Szrj 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
484638fd1498Szrj 	id->assign_stmts.safe_push (stmt);
484738fd1498Szrj     }
484838fd1498Szrj   else
484938fd1498Szrj     {
485038fd1498Szrj       /* Handle the case of inlining a function with no return
485138fd1498Szrj 	 statement, which causes the return value to become undefined.  */
485238fd1498Szrj       if (gimple_call_lhs (stmt)
485338fd1498Szrj 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
485438fd1498Szrj 	{
485538fd1498Szrj 	  tree name = gimple_call_lhs (stmt);
485638fd1498Szrj 	  tree var = SSA_NAME_VAR (name);
485738fd1498Szrj 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
485838fd1498Szrj 
485938fd1498Szrj 	  if (def)
486038fd1498Szrj 	    {
486138fd1498Szrj 	      /* If the variable is used undefined, make this name
486238fd1498Szrj 		 undefined via a move.  */
486338fd1498Szrj 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
486438fd1498Szrj 	      gsi_replace (&stmt_gsi, stmt, true);
486538fd1498Szrj 	    }
486638fd1498Szrj 	  else
486738fd1498Szrj 	    {
486838fd1498Szrj 	      if (!var)
486938fd1498Szrj 		{
487038fd1498Szrj 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
487138fd1498Szrj 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
487238fd1498Szrj 		}
487338fd1498Szrj 	      /* Otherwise make this variable undefined.  */
487438fd1498Szrj 	      gsi_remove (&stmt_gsi, true);
487538fd1498Szrj 	      set_ssa_default_def (cfun, var, name);
487638fd1498Szrj 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
487738fd1498Szrj 	    }
487838fd1498Szrj 	}
487938fd1498Szrj       /* Replace with a clobber for id->retvar.  */
488038fd1498Szrj       else if (flag_stack_reuse != SR_NONE
488138fd1498Szrj 	       && id->retvar
488238fd1498Szrj 	       && VAR_P (id->retvar)
488338fd1498Szrj 	       && id->retvar != return_slot
488438fd1498Szrj 	       && id->retvar != modify_dest
488538fd1498Szrj 	       && !TREE_THIS_VOLATILE (id->retvar)
488638fd1498Szrj 	       && !is_gimple_reg (id->retvar))
488738fd1498Szrj 	{
488838fd1498Szrj 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
488938fd1498Szrj 	  gimple *clobber_stmt;
489038fd1498Szrj 	  TREE_THIS_VOLATILE (clobber) = 1;
489138fd1498Szrj 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
489238fd1498Szrj 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
489338fd1498Szrj 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
489438fd1498Szrj 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
489538fd1498Szrj 	}
489638fd1498Szrj       else
489738fd1498Szrj 	gsi_remove (&stmt_gsi, true);
489838fd1498Szrj     }
489938fd1498Szrj 
490038fd1498Szrj   /* Put returned bounds into the correct place if required.  */
490138fd1498Szrj   if (return_bounds)
490238fd1498Szrj     {
490338fd1498Szrj       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
490438fd1498Szrj       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
490538fd1498Szrj       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
490638fd1498Szrj       unlink_stmt_vdef (old_stmt);
490738fd1498Szrj       gsi_replace (&bnd_gsi, new_stmt, false);
490838fd1498Szrj       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
490938fd1498Szrj       cgraph_update_edges_for_call_stmt (old_stmt,
491038fd1498Szrj 					 gimple_call_fndecl (old_stmt),
491138fd1498Szrj 					 new_stmt);
491238fd1498Szrj     }
491338fd1498Szrj 
491438fd1498Szrj   if (purge_dead_abnormal_edges)
491538fd1498Szrj     {
491638fd1498Szrj       gimple_purge_dead_eh_edges (return_block);
491738fd1498Szrj       gimple_purge_dead_abnormal_call_edges (return_block);
491838fd1498Szrj     }
491938fd1498Szrj 
492038fd1498Szrj   /* If the value of the new expression is ignored, that's OK.  We
492138fd1498Szrj      don't warn about this for CALL_EXPRs, so we shouldn't warn about
492238fd1498Szrj      the equivalent inlined version either.  */
492338fd1498Szrj   if (is_gimple_assign (stmt))
492438fd1498Szrj     {
492538fd1498Szrj       gcc_assert (gimple_assign_single_p (stmt)
492638fd1498Szrj 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
492738fd1498Szrj       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
492838fd1498Szrj     }
492938fd1498Szrj 
493038fd1498Szrj   /* Copy bounds for all generated assigns that need it.  */
493138fd1498Szrj   for (i = 0; i < id->assign_stmts.length (); i++)
493238fd1498Szrj     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
493338fd1498Szrj   id->assign_stmts.release ();
493438fd1498Szrj 
493538fd1498Szrj   /* Output the inlining info for this abstract function, since it has been
493638fd1498Szrj      inlined.  If we don't do this now, we can lose the information about the
493738fd1498Szrj      variables in the function when the blocks get blown away as soon as we
493838fd1498Szrj      remove the cgraph node.  */
493938fd1498Szrj   if (gimple_block (stmt))
494038fd1498Szrj     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
494138fd1498Szrj 
494238fd1498Szrj   /* Update callgraph if needed.  */
494338fd1498Szrj   cg_edge->callee->remove ();
494438fd1498Szrj 
494538fd1498Szrj   id->block = NULL_TREE;
494638fd1498Szrj   id->retvar = NULL_TREE;
494738fd1498Szrj   id->retbnd = NULL_TREE;
494838fd1498Szrj   successfully_inlined = true;
494938fd1498Szrj 
495038fd1498Szrj  egress:
495138fd1498Szrj   input_location = saved_location;
495238fd1498Szrj   return successfully_inlined;
495338fd1498Szrj }
495438fd1498Szrj 
495538fd1498Szrj /* Expand call statements reachable from STMT_P.
495638fd1498Szrj    We can only have CALL_EXPRs as the "toplevel" tree code or nested
495738fd1498Szrj    in a MODIFY_EXPR.  */
495838fd1498Szrj 
495938fd1498Szrj static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id)496038fd1498Szrj gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
496138fd1498Szrj {
496238fd1498Szrj   gimple_stmt_iterator gsi;
496338fd1498Szrj   bool inlined = false;
496438fd1498Szrj 
496538fd1498Szrj   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
496638fd1498Szrj     {
496738fd1498Szrj       gimple *stmt = gsi_stmt (gsi);
496838fd1498Szrj       gsi_prev (&gsi);
496938fd1498Szrj 
497038fd1498Szrj       if (is_gimple_call (stmt)
497138fd1498Szrj 	  && !gimple_call_internal_p (stmt))
497238fd1498Szrj 	inlined |= expand_call_inline (bb, stmt, id);
497338fd1498Szrj     }
497438fd1498Szrj 
497538fd1498Szrj   return inlined;
497638fd1498Szrj }
497738fd1498Szrj 
497838fd1498Szrj 
497938fd1498Szrj /* Walk all basic blocks created after FIRST and try to fold every statement
498038fd1498Szrj    in the STATEMENTS pointer set.  */
498138fd1498Szrj 
498238fd1498Szrj static void
fold_marked_statements(int first,hash_set<gimple * > * statements)498338fd1498Szrj fold_marked_statements (int first, hash_set<gimple *> *statements)
498438fd1498Szrj {
498538fd1498Szrj   for (; first < n_basic_blocks_for_fn (cfun); first++)
498638fd1498Szrj     if (BASIC_BLOCK_FOR_FN (cfun, first))
498738fd1498Szrj       {
498838fd1498Szrj         gimple_stmt_iterator gsi;
498938fd1498Szrj 
499038fd1498Szrj 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
499138fd1498Szrj 	     !gsi_end_p (gsi);
499238fd1498Szrj 	     gsi_next (&gsi))
499338fd1498Szrj 	  if (statements->contains (gsi_stmt (gsi)))
499438fd1498Szrj 	    {
499538fd1498Szrj 	      gimple *old_stmt = gsi_stmt (gsi);
499638fd1498Szrj 	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
499738fd1498Szrj 
499838fd1498Szrj 	      if (old_decl && DECL_BUILT_IN (old_decl))
499938fd1498Szrj 		{
500038fd1498Szrj 		  /* Folding builtins can create multiple instructions,
500138fd1498Szrj 		     we need to look at all of them.  */
500238fd1498Szrj 		  gimple_stmt_iterator i2 = gsi;
500338fd1498Szrj 		  gsi_prev (&i2);
500438fd1498Szrj 		  if (fold_stmt (&gsi))
500538fd1498Szrj 		    {
500638fd1498Szrj 		      gimple *new_stmt;
500738fd1498Szrj 		      /* If a builtin at the end of a bb folded into nothing,
500838fd1498Szrj 			 the following loop won't work.  */
500938fd1498Szrj 		      if (gsi_end_p (gsi))
501038fd1498Szrj 			{
501138fd1498Szrj 			  cgraph_update_edges_for_call_stmt (old_stmt,
501238fd1498Szrj 							     old_decl, NULL);
501338fd1498Szrj 			  break;
501438fd1498Szrj 			}
501538fd1498Szrj 		      if (gsi_end_p (i2))
501638fd1498Szrj 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
501738fd1498Szrj 		      else
501838fd1498Szrj 			gsi_next (&i2);
501938fd1498Szrj 		      while (1)
502038fd1498Szrj 			{
502138fd1498Szrj 			  new_stmt = gsi_stmt (i2);
502238fd1498Szrj 			  update_stmt (new_stmt);
502338fd1498Szrj 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
502438fd1498Szrj 							     new_stmt);
502538fd1498Szrj 
502638fd1498Szrj 			  if (new_stmt == gsi_stmt (gsi))
502738fd1498Szrj 			    {
502838fd1498Szrj 			      /* It is okay to check only for the very last
502938fd1498Szrj 				 of these statements.  If it is a throwing
503038fd1498Szrj 				 statement nothing will change.  If it isn't
503138fd1498Szrj 				 this can remove EH edges.  If that weren't
503238fd1498Szrj 				 correct then because some intermediate stmts
503338fd1498Szrj 				 throw, but not the last one.  That would mean
503438fd1498Szrj 				 we'd have to split the block, which we can't
503538fd1498Szrj 				 here and we'd loose anyway.  And as builtins
503638fd1498Szrj 				 probably never throw, this all
503738fd1498Szrj 				 is mood anyway.  */
503838fd1498Szrj 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
503938fd1498Szrj 								  new_stmt))
504038fd1498Szrj 				gimple_purge_dead_eh_edges (
504138fd1498Szrj 				  BASIC_BLOCK_FOR_FN (cfun, first));
504238fd1498Szrj 			      break;
504338fd1498Szrj 			    }
504438fd1498Szrj 			  gsi_next (&i2);
504538fd1498Szrj 			}
504638fd1498Szrj 		    }
504738fd1498Szrj 		}
504838fd1498Szrj 	      else if (fold_stmt (&gsi))
504938fd1498Szrj 		{
505038fd1498Szrj 		  /* Re-read the statement from GSI as fold_stmt() may
505138fd1498Szrj 		     have changed it.  */
505238fd1498Szrj 		  gimple *new_stmt = gsi_stmt (gsi);
505338fd1498Szrj 		  update_stmt (new_stmt);
505438fd1498Szrj 
505538fd1498Szrj 		  if (is_gimple_call (old_stmt)
505638fd1498Szrj 		      || is_gimple_call (new_stmt))
505738fd1498Szrj 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
505838fd1498Szrj 						       new_stmt);
505938fd1498Szrj 
506038fd1498Szrj 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
506138fd1498Szrj 		    gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
506238fd1498Szrj 								    first));
506338fd1498Szrj 		}
506438fd1498Szrj 	    }
506538fd1498Szrj       }
506638fd1498Szrj }
506738fd1498Szrj 
506838fd1498Szrj /* Expand calls to inline functions in the body of FN.  */
506938fd1498Szrj 
507038fd1498Szrj unsigned int
optimize_inline_calls(tree fn)507138fd1498Szrj optimize_inline_calls (tree fn)
507238fd1498Szrj {
507338fd1498Szrj   copy_body_data id;
507438fd1498Szrj   basic_block bb;
507538fd1498Szrj   int last = n_basic_blocks_for_fn (cfun);
507638fd1498Szrj   bool inlined_p = false;
507738fd1498Szrj 
507838fd1498Szrj   /* Clear out ID.  */
507938fd1498Szrj   memset (&id, 0, sizeof (id));
508038fd1498Szrj 
508138fd1498Szrj   id.src_node = id.dst_node = cgraph_node::get (fn);
508238fd1498Szrj   gcc_assert (id.dst_node->definition);
508338fd1498Szrj   id.dst_fn = fn;
508438fd1498Szrj   /* Or any functions that aren't finished yet.  */
508538fd1498Szrj   if (current_function_decl)
508638fd1498Szrj     id.dst_fn = current_function_decl;
508738fd1498Szrj 
508838fd1498Szrj   id.copy_decl = copy_decl_maybe_to_var;
508938fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
509038fd1498Szrj   id.transform_new_cfg = false;
509138fd1498Szrj   id.transform_return_to_modify = true;
509238fd1498Szrj   id.transform_parameter = true;
509338fd1498Szrj   id.transform_lang_insert_block = NULL;
509438fd1498Szrj   id.statements_to_fold = new hash_set<gimple *>;
509538fd1498Szrj 
509638fd1498Szrj   push_gimplify_context ();
509738fd1498Szrj 
509838fd1498Szrj   /* We make no attempts to keep dominance info up-to-date.  */
509938fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
510038fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
510138fd1498Szrj 
510238fd1498Szrj   /* Register specific gimple functions.  */
510338fd1498Szrj   gimple_register_cfg_hooks ();
510438fd1498Szrj 
510538fd1498Szrj   /* Reach the trees by walking over the CFG, and note the
510638fd1498Szrj      enclosing basic-blocks in the call edges.  */
510738fd1498Szrj   /* We walk the blocks going forward, because inlined function bodies
510838fd1498Szrj      will split id->current_basic_block, and the new blocks will
510938fd1498Szrj      follow it; we'll trudge through them, processing their CALL_EXPRs
511038fd1498Szrj      along the way.  */
511138fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
511238fd1498Szrj     inlined_p |= gimple_expand_calls_inline (bb, &id);
511338fd1498Szrj 
511438fd1498Szrj   pop_gimplify_context (NULL);
511538fd1498Szrj 
511638fd1498Szrj   if (flag_checking)
511738fd1498Szrj     {
511838fd1498Szrj       struct cgraph_edge *e;
511938fd1498Szrj 
512038fd1498Szrj       id.dst_node->verify ();
512138fd1498Szrj 
512238fd1498Szrj       /* Double check that we inlined everything we are supposed to inline.  */
512338fd1498Szrj       for (e = id.dst_node->callees; e; e = e->next_callee)
512438fd1498Szrj 	gcc_assert (e->inline_failed);
512538fd1498Szrj     }
512638fd1498Szrj 
512738fd1498Szrj   /* Fold queued statements.  */
512838fd1498Szrj   update_max_bb_count ();
512938fd1498Szrj   fold_marked_statements (last, id.statements_to_fold);
513038fd1498Szrj   delete id.statements_to_fold;
513138fd1498Szrj 
513238fd1498Szrj   gcc_assert (!id.debug_stmts.exists ());
513338fd1498Szrj 
513438fd1498Szrj   /* If we didn't inline into the function there is nothing to do.  */
513538fd1498Szrj   if (!inlined_p)
513638fd1498Szrj     return 0;
513738fd1498Szrj 
513838fd1498Szrj   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
513938fd1498Szrj   number_blocks (fn);
514038fd1498Szrj 
514138fd1498Szrj   delete_unreachable_blocks_update_callgraph (&id);
514238fd1498Szrj   if (flag_checking)
514338fd1498Szrj     id.dst_node->verify ();
514438fd1498Szrj 
514538fd1498Szrj   /* It would be nice to check SSA/CFG/statement consistency here, but it is
514638fd1498Szrj      not possible yet - the IPA passes might make various functions to not
514738fd1498Szrj      throw and they don't care to proactively update local EH info.  This is
514838fd1498Szrj      done later in fixup_cfg pass that also execute the verification.  */
514938fd1498Szrj   return (TODO_update_ssa
515038fd1498Szrj 	  | TODO_cleanup_cfg
515138fd1498Szrj 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
515238fd1498Szrj 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
515338fd1498Szrj 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
515438fd1498Szrj 	     ? TODO_rebuild_frequencies : 0));
515538fd1498Szrj }
515638fd1498Szrj 
515738fd1498Szrj /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
515838fd1498Szrj 
515938fd1498Szrj tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)516038fd1498Szrj copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
516138fd1498Szrj {
516238fd1498Szrj   enum tree_code code = TREE_CODE (*tp);
516338fd1498Szrj   enum tree_code_class cl = TREE_CODE_CLASS (code);
516438fd1498Szrj 
516538fd1498Szrj   /* We make copies of most nodes.  */
516638fd1498Szrj   if (IS_EXPR_CODE_CLASS (cl)
516738fd1498Szrj       || code == TREE_LIST
516838fd1498Szrj       || code == TREE_VEC
516938fd1498Szrj       || code == TYPE_DECL
517038fd1498Szrj       || code == OMP_CLAUSE)
517138fd1498Szrj     {
517238fd1498Szrj       /* Because the chain gets clobbered when we make a copy, we save it
517338fd1498Szrj 	 here.  */
517438fd1498Szrj       tree chain = NULL_TREE, new_tree;
517538fd1498Szrj 
517638fd1498Szrj       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
517738fd1498Szrj 	chain = TREE_CHAIN (*tp);
517838fd1498Szrj 
517938fd1498Szrj       /* Copy the node.  */
518038fd1498Szrj       new_tree = copy_node (*tp);
518138fd1498Szrj 
518238fd1498Szrj       *tp = new_tree;
518338fd1498Szrj 
518438fd1498Szrj       /* Now, restore the chain, if appropriate.  That will cause
518538fd1498Szrj 	 walk_tree to walk into the chain as well.  */
518638fd1498Szrj       if (code == PARM_DECL
518738fd1498Szrj 	  || code == TREE_LIST
518838fd1498Szrj 	  || code == OMP_CLAUSE)
518938fd1498Szrj 	TREE_CHAIN (*tp) = chain;
519038fd1498Szrj 
519138fd1498Szrj       /* For now, we don't update BLOCKs when we make copies.  So, we
519238fd1498Szrj 	 have to nullify all BIND_EXPRs.  */
519338fd1498Szrj       if (TREE_CODE (*tp) == BIND_EXPR)
519438fd1498Szrj 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
519538fd1498Szrj     }
519638fd1498Szrj   else if (code == CONSTRUCTOR)
519738fd1498Szrj     {
519838fd1498Szrj       /* CONSTRUCTOR nodes need special handling because
519938fd1498Szrj          we need to duplicate the vector of elements.  */
520038fd1498Szrj       tree new_tree;
520138fd1498Szrj 
520238fd1498Szrj       new_tree = copy_node (*tp);
520338fd1498Szrj       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
520438fd1498Szrj       *tp = new_tree;
520538fd1498Szrj     }
520638fd1498Szrj   else if (code == STATEMENT_LIST)
520738fd1498Szrj     /* We used to just abort on STATEMENT_LIST, but we can run into them
520838fd1498Szrj        with statement-expressions (c++/40975).  */
520938fd1498Szrj     copy_statement_list (tp);
521038fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_type)
521138fd1498Szrj     *walk_subtrees = 0;
521238fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_declaration)
521338fd1498Szrj     *walk_subtrees = 0;
521438fd1498Szrj   else if (TREE_CODE_CLASS (code) == tcc_constant)
521538fd1498Szrj     *walk_subtrees = 0;
521638fd1498Szrj   return NULL_TREE;
521738fd1498Szrj }
521838fd1498Szrj 
521938fd1498Szrj /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
522038fd1498Szrj    information indicating to what new SAVE_EXPR this one should be mapped,
522138fd1498Szrj    use that one.  Otherwise, create a new node and enter it in ST.  FN is
522238fd1498Szrj    the function into which the copy will be placed.  */
522338fd1498Szrj 
522438fd1498Szrj static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)522538fd1498Szrj remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
522638fd1498Szrj {
522738fd1498Szrj   tree *n;
522838fd1498Szrj   tree t;
522938fd1498Szrj 
523038fd1498Szrj   /* See if we already encountered this SAVE_EXPR.  */
523138fd1498Szrj   n = st->get (*tp);
523238fd1498Szrj 
523338fd1498Szrj   /* If we didn't already remap this SAVE_EXPR, do so now.  */
523438fd1498Szrj   if (!n)
523538fd1498Szrj     {
523638fd1498Szrj       t = copy_node (*tp);
523738fd1498Szrj 
523838fd1498Szrj       /* Remember this SAVE_EXPR.  */
523938fd1498Szrj       st->put (*tp, t);
524038fd1498Szrj       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
524138fd1498Szrj       st->put (t, t);
524238fd1498Szrj     }
524338fd1498Szrj   else
524438fd1498Szrj     {
524538fd1498Szrj       /* We've already walked into this SAVE_EXPR; don't do it again.  */
524638fd1498Szrj       *walk_subtrees = 0;
524738fd1498Szrj       t = *n;
524838fd1498Szrj     }
524938fd1498Szrj 
525038fd1498Szrj   /* Replace this SAVE_EXPR with the copy.  */
525138fd1498Szrj   *tp = t;
525238fd1498Szrj }
525338fd1498Szrj 
525438fd1498Szrj /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
525538fd1498Szrj    label, copies the declaration and enters it in the splay_tree in DATA (which
525638fd1498Szrj    is really a 'copy_body_data *'.  */
525738fd1498Szrj 
525838fd1498Szrj static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)525938fd1498Szrj mark_local_labels_stmt (gimple_stmt_iterator *gsip,
526038fd1498Szrj 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
526138fd1498Szrj 		        struct walk_stmt_info *wi)
526238fd1498Szrj {
526338fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
526438fd1498Szrj   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
526538fd1498Szrj 
526638fd1498Szrj   if (stmt)
526738fd1498Szrj     {
526838fd1498Szrj       tree decl = gimple_label_label (stmt);
526938fd1498Szrj 
527038fd1498Szrj       /* Copy the decl and remember the copy.  */
527138fd1498Szrj       insert_decl_map (id, decl, id->copy_decl (decl, id));
527238fd1498Szrj     }
527338fd1498Szrj 
527438fd1498Szrj   return NULL_TREE;
527538fd1498Szrj }
527638fd1498Szrj 
527738fd1498Szrj static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
527838fd1498Szrj 						  struct walk_stmt_info *wi);
527938fd1498Szrj 
528038fd1498Szrj /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
528138fd1498Szrj    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
528238fd1498Szrj    remaps all local declarations to appropriate replacements in gimple
528338fd1498Szrj    operands. */
528438fd1498Szrj 
528538fd1498Szrj static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)528638fd1498Szrj replace_locals_op (tree *tp, int *walk_subtrees, void *data)
528738fd1498Szrj {
528838fd1498Szrj   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
528938fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
529038fd1498Szrj   hash_map<tree, tree> *st = id->decl_map;
529138fd1498Szrj   tree *n;
529238fd1498Szrj   tree expr = *tp;
529338fd1498Szrj 
529438fd1498Szrj   /* For recursive invocations this is no longer the LHS itself.  */
529538fd1498Szrj   bool is_lhs = wi->is_lhs;
529638fd1498Szrj   wi->is_lhs = false;
529738fd1498Szrj 
529838fd1498Szrj   if (TREE_CODE (expr) == SSA_NAME)
529938fd1498Szrj     {
530038fd1498Szrj       *tp = remap_ssa_name (*tp, id);
530138fd1498Szrj       *walk_subtrees = 0;
530238fd1498Szrj       if (is_lhs)
530338fd1498Szrj 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
530438fd1498Szrj     }
530538fd1498Szrj   /* Only a local declaration (variable or label).  */
530638fd1498Szrj   else if ((VAR_P (expr) && !TREE_STATIC (expr))
530738fd1498Szrj 	   || TREE_CODE (expr) == LABEL_DECL)
530838fd1498Szrj     {
530938fd1498Szrj       /* Lookup the declaration.  */
531038fd1498Szrj       n = st->get (expr);
531138fd1498Szrj 
531238fd1498Szrj       /* If it's there, remap it.  */
531338fd1498Szrj       if (n)
531438fd1498Szrj 	*tp = *n;
531538fd1498Szrj       *walk_subtrees = 0;
531638fd1498Szrj     }
531738fd1498Szrj   else if (TREE_CODE (expr) == STATEMENT_LIST
531838fd1498Szrj 	   || TREE_CODE (expr) == BIND_EXPR
531938fd1498Szrj 	   || TREE_CODE (expr) == SAVE_EXPR)
532038fd1498Szrj     gcc_unreachable ();
532138fd1498Szrj   else if (TREE_CODE (expr) == TARGET_EXPR)
532238fd1498Szrj     {
532338fd1498Szrj       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
532438fd1498Szrj          It's OK for this to happen if it was part of a subtree that
532538fd1498Szrj          isn't immediately expanded, such as operand 2 of another
532638fd1498Szrj          TARGET_EXPR.  */
532738fd1498Szrj       if (!TREE_OPERAND (expr, 1))
532838fd1498Szrj 	{
532938fd1498Szrj 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
533038fd1498Szrj 	  TREE_OPERAND (expr, 3) = NULL_TREE;
533138fd1498Szrj 	}
533238fd1498Szrj     }
533338fd1498Szrj   else if (TREE_CODE (expr) == OMP_CLAUSE)
533438fd1498Szrj     {
533538fd1498Szrj       /* Before the omplower pass completes, some OMP clauses can contain
533638fd1498Szrj 	 sequences that are neither copied by gimple_seq_copy nor walked by
533738fd1498Szrj 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
533838fd1498Szrj 	 in those situations, we have to copy and process them explicitely.  */
533938fd1498Szrj 
534038fd1498Szrj       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
534138fd1498Szrj 	{
534238fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
534338fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
534438fd1498Szrj 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
534538fd1498Szrj 	}
534638fd1498Szrj       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
534738fd1498Szrj 	{
534838fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
534938fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
535038fd1498Szrj 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
535138fd1498Szrj 	}
535238fd1498Szrj       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
535338fd1498Szrj 	{
535438fd1498Szrj 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
535538fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
535638fd1498Szrj 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
535738fd1498Szrj 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
535838fd1498Szrj 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
535938fd1498Szrj 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
536038fd1498Szrj 	}
536138fd1498Szrj     }
536238fd1498Szrj 
536338fd1498Szrj   /* Keep iterating.  */
536438fd1498Szrj   return NULL_TREE;
536538fd1498Szrj }
536638fd1498Szrj 
536738fd1498Szrj 
536838fd1498Szrj /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
536938fd1498Szrj    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
537038fd1498Szrj    remaps all local declarations to appropriate replacements in gimple
537138fd1498Szrj    statements. */
537238fd1498Szrj 
537338fd1498Szrj static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)537438fd1498Szrj replace_locals_stmt (gimple_stmt_iterator *gsip,
537538fd1498Szrj 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
537638fd1498Szrj 		     struct walk_stmt_info *wi)
537738fd1498Szrj {
537838fd1498Szrj   copy_body_data *id = (copy_body_data *) wi->info;
537938fd1498Szrj   gimple *gs = gsi_stmt (*gsip);
538038fd1498Szrj 
538138fd1498Szrj   if (gbind *stmt = dyn_cast <gbind *> (gs))
538238fd1498Szrj     {
538338fd1498Szrj       tree block = gimple_bind_block (stmt);
538438fd1498Szrj 
538538fd1498Szrj       if (block)
538638fd1498Szrj 	{
538738fd1498Szrj 	  remap_block (&block, id);
538838fd1498Szrj 	  gimple_bind_set_block (stmt, block);
538938fd1498Szrj 	}
539038fd1498Szrj 
539138fd1498Szrj       /* This will remap a lot of the same decls again, but this should be
539238fd1498Szrj 	 harmless.  */
539338fd1498Szrj       if (gimple_bind_vars (stmt))
539438fd1498Szrj 	{
539538fd1498Szrj 	  tree old_var, decls = gimple_bind_vars (stmt);
539638fd1498Szrj 
539738fd1498Szrj 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
539838fd1498Szrj 	    if (!can_be_nonlocal (old_var, id)
539938fd1498Szrj 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
540038fd1498Szrj 	      remap_decl (old_var, id);
540138fd1498Szrj 
540238fd1498Szrj 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
540338fd1498Szrj 	  id->prevent_decl_creation_for_types = true;
540438fd1498Szrj 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
540538fd1498Szrj 	  id->prevent_decl_creation_for_types = false;
540638fd1498Szrj 	}
540738fd1498Szrj     }
540838fd1498Szrj 
540938fd1498Szrj   /* Keep iterating.  */
541038fd1498Szrj   return NULL_TREE;
541138fd1498Szrj }
541238fd1498Szrj 
541338fd1498Szrj /* Create a copy of SEQ and remap all decls in it.  */
541438fd1498Szrj 
541538fd1498Szrj static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)541638fd1498Szrj duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
541738fd1498Szrj {
541838fd1498Szrj   if (!seq)
541938fd1498Szrj     return NULL;
542038fd1498Szrj 
542138fd1498Szrj   /* If there are any labels in OMP sequences, they can be only referred to in
542238fd1498Szrj      the sequence itself and therefore we can do both here.  */
542338fd1498Szrj   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
542438fd1498Szrj   gimple_seq copy = gimple_seq_copy (seq);
542538fd1498Szrj   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
542638fd1498Szrj   return copy;
542738fd1498Szrj }
542838fd1498Szrj 
542938fd1498Szrj /* Copies everything in SEQ and replaces variables and labels local to
543038fd1498Szrj    current_function_decl.  */
543138fd1498Szrj 
543238fd1498Szrj gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)543338fd1498Szrj copy_gimple_seq_and_replace_locals (gimple_seq seq)
543438fd1498Szrj {
543538fd1498Szrj   copy_body_data id;
543638fd1498Szrj   struct walk_stmt_info wi;
543738fd1498Szrj   gimple_seq copy;
543838fd1498Szrj 
543938fd1498Szrj   /* There's nothing to do for NULL_TREE.  */
544038fd1498Szrj   if (seq == NULL)
544138fd1498Szrj     return seq;
544238fd1498Szrj 
544338fd1498Szrj   /* Set up ID.  */
544438fd1498Szrj   memset (&id, 0, sizeof (id));
544538fd1498Szrj   id.src_fn = current_function_decl;
544638fd1498Szrj   id.dst_fn = current_function_decl;
544738fd1498Szrj   id.src_cfun = cfun;
544838fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
544938fd1498Szrj   id.debug_map = NULL;
545038fd1498Szrj 
545138fd1498Szrj   id.copy_decl = copy_decl_no_change;
545238fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
545338fd1498Szrj   id.transform_new_cfg = false;
545438fd1498Szrj   id.transform_return_to_modify = false;
545538fd1498Szrj   id.transform_parameter = false;
545638fd1498Szrj   id.transform_lang_insert_block = NULL;
545738fd1498Szrj 
545838fd1498Szrj   /* Walk the tree once to find local labels.  */
545938fd1498Szrj   memset (&wi, 0, sizeof (wi));
546038fd1498Szrj   hash_set<tree> visited;
546138fd1498Szrj   wi.info = &id;
546238fd1498Szrj   wi.pset = &visited;
546338fd1498Szrj   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
546438fd1498Szrj 
546538fd1498Szrj   copy = gimple_seq_copy (seq);
546638fd1498Szrj 
546738fd1498Szrj   /* Walk the copy, remapping decls.  */
546838fd1498Szrj   memset (&wi, 0, sizeof (wi));
546938fd1498Szrj   wi.info = &id;
547038fd1498Szrj   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
547138fd1498Szrj 
547238fd1498Szrj   /* Clean up.  */
547338fd1498Szrj   delete id.decl_map;
547438fd1498Szrj   if (id.debug_map)
547538fd1498Szrj     delete id.debug_map;
547638fd1498Szrj   if (id.dependence_map)
547738fd1498Szrj     {
547838fd1498Szrj       delete id.dependence_map;
547938fd1498Szrj       id.dependence_map = NULL;
548038fd1498Szrj     }
548138fd1498Szrj 
548238fd1498Szrj   return copy;
548338fd1498Szrj }
548438fd1498Szrj 
548538fd1498Szrj 
548638fd1498Szrj /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
548738fd1498Szrj 
548838fd1498Szrj static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)548938fd1498Szrj debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
549038fd1498Szrj {
549138fd1498Szrj   if (*tp == data)
549238fd1498Szrj     return (tree) data;
549338fd1498Szrj   else
549438fd1498Szrj     return NULL;
549538fd1498Szrj }
549638fd1498Szrj 
549738fd1498Szrj DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)549838fd1498Szrj debug_find_tree (tree top, tree search)
549938fd1498Szrj {
550038fd1498Szrj   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
550138fd1498Szrj }
550238fd1498Szrj 
550338fd1498Szrj 
550438fd1498Szrj /* Declare the variables created by the inliner.  Add all the variables in
550538fd1498Szrj    VARS to BIND_EXPR.  */
550638fd1498Szrj 
550738fd1498Szrj static void
declare_inline_vars(tree block,tree vars)550838fd1498Szrj declare_inline_vars (tree block, tree vars)
550938fd1498Szrj {
551038fd1498Szrj   tree t;
551138fd1498Szrj   for (t = vars; t; t = DECL_CHAIN (t))
551238fd1498Szrj     {
551338fd1498Szrj       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
551438fd1498Szrj       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
551538fd1498Szrj       add_local_decl (cfun, t);
551638fd1498Szrj     }
551738fd1498Szrj 
551838fd1498Szrj   if (block)
551938fd1498Szrj     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
552038fd1498Szrj }
552138fd1498Szrj 
552238fd1498Szrj /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
552338fd1498Szrj    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
552438fd1498Szrj    VAR_DECL translation.  */
552538fd1498Szrj 
552638fd1498Szrj tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)552738fd1498Szrj copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
552838fd1498Szrj {
552938fd1498Szrj   /* Don't generate debug information for the copy if we wouldn't have
553038fd1498Szrj      generated it for the copy either.  */
553138fd1498Szrj   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
553238fd1498Szrj   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
553338fd1498Szrj 
553438fd1498Szrj   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
553538fd1498Szrj      declaration inspired this copy.  */
553638fd1498Szrj   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
553738fd1498Szrj 
553838fd1498Szrj   /* The new variable/label has no RTL, yet.  */
553938fd1498Szrj   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
554038fd1498Szrj       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
554138fd1498Szrj     SET_DECL_RTL (copy, 0);
5542*e215fc28Szrj   /* For vector typed decls make sure to update DECL_MODE according
5543*e215fc28Szrj      to the new function context.  */
5544*e215fc28Szrj   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5545*e215fc28Szrj     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
554638fd1498Szrj 
554738fd1498Szrj   /* These args would always appear unused, if not for this.  */
554838fd1498Szrj   TREE_USED (copy) = 1;
554938fd1498Szrj 
555038fd1498Szrj   /* Set the context for the new declaration.  */
555138fd1498Szrj   if (!DECL_CONTEXT (decl))
555238fd1498Szrj     /* Globals stay global.  */
555338fd1498Szrj     ;
555438fd1498Szrj   else if (DECL_CONTEXT (decl) != id->src_fn)
555538fd1498Szrj     /* Things that weren't in the scope of the function we're inlining
555638fd1498Szrj        from aren't in the scope we're inlining to, either.  */
555738fd1498Szrj     ;
555838fd1498Szrj   else if (TREE_STATIC (decl))
555938fd1498Szrj     /* Function-scoped static variables should stay in the original
556038fd1498Szrj        function.  */
556138fd1498Szrj     ;
556238fd1498Szrj   else
556338fd1498Szrj     {
556438fd1498Szrj       /* Ordinary automatic local variables are now in the scope of the
556538fd1498Szrj 	 new function.  */
556638fd1498Szrj       DECL_CONTEXT (copy) = id->dst_fn;
556738fd1498Szrj       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
556838fd1498Szrj 	{
556938fd1498Szrj 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
557038fd1498Szrj 	    DECL_ATTRIBUTES (copy)
557138fd1498Szrj 	      = tree_cons (get_identifier ("omp simt private"), NULL,
557238fd1498Szrj 			   DECL_ATTRIBUTES (copy));
557338fd1498Szrj 	  id->dst_simt_vars->safe_push (copy);
557438fd1498Szrj 	}
557538fd1498Szrj     }
557638fd1498Szrj 
557738fd1498Szrj   return copy;
557838fd1498Szrj }
557938fd1498Szrj 
558038fd1498Szrj static tree
copy_decl_to_var(tree decl,copy_body_data * id)558138fd1498Szrj copy_decl_to_var (tree decl, copy_body_data *id)
558238fd1498Szrj {
558338fd1498Szrj   tree copy, type;
558438fd1498Szrj 
558538fd1498Szrj   gcc_assert (TREE_CODE (decl) == PARM_DECL
558638fd1498Szrj 	      || TREE_CODE (decl) == RESULT_DECL);
558738fd1498Szrj 
558838fd1498Szrj   type = TREE_TYPE (decl);
558938fd1498Szrj 
559038fd1498Szrj   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
559138fd1498Szrj 		     VAR_DECL, DECL_NAME (decl), type);
559238fd1498Szrj   if (DECL_PT_UID_SET_P (decl))
559338fd1498Szrj     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
559438fd1498Szrj   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
559538fd1498Szrj   TREE_READONLY (copy) = TREE_READONLY (decl);
559638fd1498Szrj   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
559738fd1498Szrj   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
559838fd1498Szrj 
559938fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
560038fd1498Szrj }
560138fd1498Szrj 
560238fd1498Szrj /* Like copy_decl_to_var, but create a return slot object instead of a
560338fd1498Szrj    pointer variable for return by invisible reference.  */
560438fd1498Szrj 
560538fd1498Szrj static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)560638fd1498Szrj copy_result_decl_to_var (tree decl, copy_body_data *id)
560738fd1498Szrj {
560838fd1498Szrj   tree copy, type;
560938fd1498Szrj 
561038fd1498Szrj   gcc_assert (TREE_CODE (decl) == PARM_DECL
561138fd1498Szrj 	      || TREE_CODE (decl) == RESULT_DECL);
561238fd1498Szrj 
561338fd1498Szrj   type = TREE_TYPE (decl);
561438fd1498Szrj   if (DECL_BY_REFERENCE (decl))
561538fd1498Szrj     type = TREE_TYPE (type);
561638fd1498Szrj 
561738fd1498Szrj   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
561838fd1498Szrj 		     VAR_DECL, DECL_NAME (decl), type);
561938fd1498Szrj   if (DECL_PT_UID_SET_P (decl))
562038fd1498Szrj     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
562138fd1498Szrj   TREE_READONLY (copy) = TREE_READONLY (decl);
562238fd1498Szrj   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
562338fd1498Szrj   if (!DECL_BY_REFERENCE (decl))
562438fd1498Szrj     {
562538fd1498Szrj       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
562638fd1498Szrj       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
562738fd1498Szrj     }
562838fd1498Szrj 
562938fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
563038fd1498Szrj }
563138fd1498Szrj 
563238fd1498Szrj tree
copy_decl_no_change(tree decl,copy_body_data * id)563338fd1498Szrj copy_decl_no_change (tree decl, copy_body_data *id)
563438fd1498Szrj {
563538fd1498Szrj   tree copy;
563638fd1498Szrj 
563738fd1498Szrj   copy = copy_node (decl);
563838fd1498Szrj 
563938fd1498Szrj   /* The COPY is not abstract; it will be generated in DST_FN.  */
564038fd1498Szrj   DECL_ABSTRACT_P (copy) = false;
564138fd1498Szrj   lang_hooks.dup_lang_specific_decl (copy);
564238fd1498Szrj 
564338fd1498Szrj   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
564438fd1498Szrj      been taken; it's for internal bookkeeping in expand_goto_internal.  */
564538fd1498Szrj   if (TREE_CODE (copy) == LABEL_DECL)
564638fd1498Szrj     {
564738fd1498Szrj       TREE_ADDRESSABLE (copy) = 0;
564838fd1498Szrj       LABEL_DECL_UID (copy) = -1;
564938fd1498Szrj     }
565038fd1498Szrj 
565138fd1498Szrj   return copy_decl_for_dup_finish (id, decl, copy);
565238fd1498Szrj }
565338fd1498Szrj 
565438fd1498Szrj static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)565538fd1498Szrj copy_decl_maybe_to_var (tree decl, copy_body_data *id)
565638fd1498Szrj {
565738fd1498Szrj   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
565838fd1498Szrj     return copy_decl_to_var (decl, id);
565938fd1498Szrj   else
566038fd1498Szrj     return copy_decl_no_change (decl, id);
566138fd1498Szrj }
566238fd1498Szrj 
566338fd1498Szrj /* Return a copy of the function's argument tree.  */
566438fd1498Szrj static tree
copy_arguments_for_versioning(tree orig_parm,copy_body_data * id,bitmap args_to_skip,tree * vars)566538fd1498Szrj copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
566638fd1498Szrj 			       bitmap args_to_skip, tree *vars)
566738fd1498Szrj {
566838fd1498Szrj   tree arg, *parg;
566938fd1498Szrj   tree new_parm = NULL;
567038fd1498Szrj   int i = 0;
567138fd1498Szrj 
567238fd1498Szrj   parg = &new_parm;
567338fd1498Szrj 
567438fd1498Szrj   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
567538fd1498Szrj     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
567638fd1498Szrj       {
567738fd1498Szrj         tree new_tree = remap_decl (arg, id);
567838fd1498Szrj 	if (TREE_CODE (new_tree) != PARM_DECL)
567938fd1498Szrj 	  new_tree = id->copy_decl (arg, id);
568038fd1498Szrj         lang_hooks.dup_lang_specific_decl (new_tree);
568138fd1498Szrj         *parg = new_tree;
568238fd1498Szrj 	parg = &DECL_CHAIN (new_tree);
568338fd1498Szrj       }
568438fd1498Szrj     else if (!id->decl_map->get (arg))
568538fd1498Szrj       {
568638fd1498Szrj 	/* Make an equivalent VAR_DECL.  If the argument was used
568738fd1498Szrj 	   as temporary variable later in function, the uses will be
568838fd1498Szrj 	   replaced by local variable.  */
568938fd1498Szrj 	tree var = copy_decl_to_var (arg, id);
569038fd1498Szrj 	insert_decl_map (id, arg, var);
569138fd1498Szrj         /* Declare this new variable.  */
569238fd1498Szrj         DECL_CHAIN (var) = *vars;
569338fd1498Szrj         *vars = var;
569438fd1498Szrj       }
569538fd1498Szrj   return new_parm;
569638fd1498Szrj }
569738fd1498Szrj 
569838fd1498Szrj /* Return a copy of the function's static chain.  */
569938fd1498Szrj static tree
copy_static_chain(tree static_chain,copy_body_data * id)570038fd1498Szrj copy_static_chain (tree static_chain, copy_body_data * id)
570138fd1498Szrj {
570238fd1498Szrj   tree *chain_copy, *pvar;
570338fd1498Szrj 
570438fd1498Szrj   chain_copy = &static_chain;
570538fd1498Szrj   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
570638fd1498Szrj     {
570738fd1498Szrj       tree new_tree = remap_decl (*pvar, id);
570838fd1498Szrj       lang_hooks.dup_lang_specific_decl (new_tree);
570938fd1498Szrj       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
571038fd1498Szrj       *pvar = new_tree;
571138fd1498Szrj     }
571238fd1498Szrj   return static_chain;
571338fd1498Szrj }
571438fd1498Szrj 
571538fd1498Szrj /* Return true if the function is allowed to be versioned.
571638fd1498Szrj    This is a guard for the versioning functionality.  */
571738fd1498Szrj 
571838fd1498Szrj bool
tree_versionable_function_p(tree fndecl)571938fd1498Szrj tree_versionable_function_p (tree fndecl)
572038fd1498Szrj {
572138fd1498Szrj   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
572238fd1498Szrj 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
572338fd1498Szrj }
572438fd1498Szrj 
572538fd1498Szrj /* Delete all unreachable basic blocks and update callgraph.
572638fd1498Szrj    Doing so is somewhat nontrivial because we need to update all clones and
572738fd1498Szrj    remove inline function that become unreachable.  */
572838fd1498Szrj 
572938fd1498Szrj static bool
delete_unreachable_blocks_update_callgraph(copy_body_data * id)573038fd1498Szrj delete_unreachable_blocks_update_callgraph (copy_body_data *id)
573138fd1498Szrj {
573238fd1498Szrj   bool changed = false;
573338fd1498Szrj   basic_block b, next_bb;
573438fd1498Szrj 
573538fd1498Szrj   find_unreachable_blocks ();
573638fd1498Szrj 
573738fd1498Szrj   /* Delete all unreachable basic blocks.  */
573838fd1498Szrj 
573938fd1498Szrj   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
574038fd1498Szrj        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
574138fd1498Szrj     {
574238fd1498Szrj       next_bb = b->next_bb;
574338fd1498Szrj 
574438fd1498Szrj       if (!(b->flags & BB_REACHABLE))
574538fd1498Szrj 	{
574638fd1498Szrj           gimple_stmt_iterator bsi;
574738fd1498Szrj 
574838fd1498Szrj           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
574938fd1498Szrj 	    {
575038fd1498Szrj 	      struct cgraph_edge *e;
575138fd1498Szrj 	      struct cgraph_node *node;
575238fd1498Szrj 
575338fd1498Szrj 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
575438fd1498Szrj 
575538fd1498Szrj 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
575638fd1498Szrj 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
575738fd1498Szrj 		{
575838fd1498Szrj 		  if (!e->inline_failed)
575938fd1498Szrj 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
576038fd1498Szrj 		  else
576138fd1498Szrj 		    e->remove ();
576238fd1498Szrj 		}
576338fd1498Szrj 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
576438fd1498Szrj 		  && id->dst_node->clones)
576538fd1498Szrj 		for (node = id->dst_node->clones; node != id->dst_node;)
576638fd1498Szrj 		  {
576738fd1498Szrj 		    node->remove_stmt_references (gsi_stmt (bsi));
576838fd1498Szrj 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
576938fd1498Szrj 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
577038fd1498Szrj 		      {
577138fd1498Szrj 			if (!e->inline_failed)
577238fd1498Szrj 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
577338fd1498Szrj 			else
577438fd1498Szrj 			  e->remove ();
577538fd1498Szrj 		      }
577638fd1498Szrj 
577738fd1498Szrj 		    if (node->clones)
577838fd1498Szrj 		      node = node->clones;
577938fd1498Szrj 		    else if (node->next_sibling_clone)
578038fd1498Szrj 		      node = node->next_sibling_clone;
578138fd1498Szrj 		    else
578238fd1498Szrj 		      {
578338fd1498Szrj 			while (node != id->dst_node && !node->next_sibling_clone)
578438fd1498Szrj 			  node = node->clone_of;
578538fd1498Szrj 			if (node != id->dst_node)
578638fd1498Szrj 			  node = node->next_sibling_clone;
578738fd1498Szrj 		      }
578838fd1498Szrj 		  }
578938fd1498Szrj 	    }
579038fd1498Szrj 	  delete_basic_block (b);
579138fd1498Szrj 	  changed = true;
579238fd1498Szrj 	}
579338fd1498Szrj     }
579438fd1498Szrj 
579538fd1498Szrj   return changed;
579638fd1498Szrj }
579738fd1498Szrj 
579838fd1498Szrj /* Update clone info after duplication.  */
579938fd1498Szrj 
580038fd1498Szrj static void
update_clone_info(copy_body_data * id)580138fd1498Szrj update_clone_info (copy_body_data * id)
580238fd1498Szrj {
580338fd1498Szrj   struct cgraph_node *node;
580438fd1498Szrj   if (!id->dst_node->clones)
580538fd1498Szrj     return;
580638fd1498Szrj   for (node = id->dst_node->clones; node != id->dst_node;)
580738fd1498Szrj     {
580838fd1498Szrj       /* First update replace maps to match the new body.  */
580938fd1498Szrj       if (node->clone.tree_map)
581038fd1498Szrj         {
581138fd1498Szrj 	  unsigned int i;
581238fd1498Szrj           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
581338fd1498Szrj 	    {
581438fd1498Szrj 	      struct ipa_replace_map *replace_info;
581538fd1498Szrj 	      replace_info = (*node->clone.tree_map)[i];
581638fd1498Szrj 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
581738fd1498Szrj 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
581838fd1498Szrj 	    }
581938fd1498Szrj 	}
582038fd1498Szrj       if (node->clones)
582138fd1498Szrj 	node = node->clones;
582238fd1498Szrj       else if (node->next_sibling_clone)
582338fd1498Szrj 	node = node->next_sibling_clone;
582438fd1498Szrj       else
582538fd1498Szrj 	{
582638fd1498Szrj 	  while (node != id->dst_node && !node->next_sibling_clone)
582738fd1498Szrj 	    node = node->clone_of;
582838fd1498Szrj 	  if (node != id->dst_node)
582938fd1498Szrj 	    node = node->next_sibling_clone;
583038fd1498Szrj 	}
583138fd1498Szrj     }
583238fd1498Szrj }
583338fd1498Szrj 
583438fd1498Szrj /* Create a copy of a function's tree.
583538fd1498Szrj    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
583638fd1498Szrj    of the original function and the new copied function
583738fd1498Szrj    respectively.  In case we want to replace a DECL
583838fd1498Szrj    tree with another tree while duplicating the function's
583938fd1498Szrj    body, TREE_MAP represents the mapping between these
584038fd1498Szrj    trees. If UPDATE_CLONES is set, the call_stmt fields
584138fd1498Szrj    of edges of clones of the function will be updated.
584238fd1498Szrj 
584338fd1498Szrj    If non-NULL ARGS_TO_SKIP determine function parameters to remove
584438fd1498Szrj    from new version.
584538fd1498Szrj    If SKIP_RETURN is true, the new version will return void.
584638fd1498Szrj    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
584738fd1498Szrj    If non_NULL NEW_ENTRY determine new entry BB of the clone.
584838fd1498Szrj */
584938fd1498Szrj void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,bool update_clones,bitmap args_to_skip,bool skip_return,bitmap blocks_to_copy,basic_block new_entry)585038fd1498Szrj tree_function_versioning (tree old_decl, tree new_decl,
585138fd1498Szrj 			  vec<ipa_replace_map *, va_gc> *tree_map,
585238fd1498Szrj 			  bool update_clones, bitmap args_to_skip,
585338fd1498Szrj 			  bool skip_return, bitmap blocks_to_copy,
585438fd1498Szrj 			  basic_block new_entry)
585538fd1498Szrj {
585638fd1498Szrj   struct cgraph_node *old_version_node;
585738fd1498Szrj   struct cgraph_node *new_version_node;
585838fd1498Szrj   copy_body_data id;
585938fd1498Szrj   tree p;
586038fd1498Szrj   unsigned i;
586138fd1498Szrj   struct ipa_replace_map *replace_info;
586238fd1498Szrj   basic_block old_entry_block, bb;
586338fd1498Szrj   auto_vec<gimple *, 10> init_stmts;
586438fd1498Szrj   tree vars = NULL_TREE;
586538fd1498Szrj   bitmap debug_args_to_skip = args_to_skip;
586638fd1498Szrj 
586738fd1498Szrj   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
586838fd1498Szrj 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
586938fd1498Szrj   DECL_POSSIBLY_INLINED (old_decl) = 1;
587038fd1498Szrj 
587138fd1498Szrj   old_version_node = cgraph_node::get (old_decl);
587238fd1498Szrj   gcc_checking_assert (old_version_node);
587338fd1498Szrj   new_version_node = cgraph_node::get (new_decl);
587438fd1498Szrj   gcc_checking_assert (new_version_node);
587538fd1498Szrj 
587638fd1498Szrj   /* Copy over debug args.  */
587738fd1498Szrj   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
587838fd1498Szrj     {
587938fd1498Szrj       vec<tree, va_gc> **new_debug_args, **old_debug_args;
588038fd1498Szrj       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
588138fd1498Szrj       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
588238fd1498Szrj       old_debug_args = decl_debug_args_lookup (old_decl);
588338fd1498Szrj       if (old_debug_args)
588438fd1498Szrj 	{
588538fd1498Szrj 	  new_debug_args = decl_debug_args_insert (new_decl);
588638fd1498Szrj 	  *new_debug_args = vec_safe_copy (*old_debug_args);
588738fd1498Szrj 	}
588838fd1498Szrj     }
588938fd1498Szrj 
589038fd1498Szrj   /* Output the inlining info for this abstract function, since it has been
589138fd1498Szrj      inlined.  If we don't do this now, we can lose the information about the
589238fd1498Szrj      variables in the function when the blocks get blown away as soon as we
589338fd1498Szrj      remove the cgraph node.  */
589438fd1498Szrj   (*debug_hooks->outlining_inline_function) (old_decl);
589538fd1498Szrj 
589638fd1498Szrj   DECL_ARTIFICIAL (new_decl) = 1;
589738fd1498Szrj   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
589838fd1498Szrj   if (DECL_ORIGIN (old_decl) == old_decl)
589938fd1498Szrj     old_version_node->used_as_abstract_origin = true;
590038fd1498Szrj   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
590138fd1498Szrj 
590238fd1498Szrj   /* Prepare the data structures for the tree copy.  */
590338fd1498Szrj   memset (&id, 0, sizeof (id));
590438fd1498Szrj 
590538fd1498Szrj   /* Generate a new name for the new version. */
590638fd1498Szrj   id.statements_to_fold = new hash_set<gimple *>;
590738fd1498Szrj 
590838fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
590938fd1498Szrj   id.debug_map = NULL;
591038fd1498Szrj   id.src_fn = old_decl;
591138fd1498Szrj   id.dst_fn = new_decl;
591238fd1498Szrj   id.src_node = old_version_node;
591338fd1498Szrj   id.dst_node = new_version_node;
591438fd1498Szrj   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
591538fd1498Szrj   id.blocks_to_copy = blocks_to_copy;
591638fd1498Szrj 
591738fd1498Szrj   id.copy_decl = copy_decl_no_change;
591838fd1498Szrj   id.transform_call_graph_edges
591938fd1498Szrj     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
592038fd1498Szrj   id.transform_new_cfg = true;
592138fd1498Szrj   id.transform_return_to_modify = false;
592238fd1498Szrj   id.transform_parameter = false;
592338fd1498Szrj   id.transform_lang_insert_block = NULL;
592438fd1498Szrj 
592538fd1498Szrj   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
592638fd1498Szrj     (DECL_STRUCT_FUNCTION (old_decl));
592738fd1498Szrj   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
592838fd1498Szrj   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
592938fd1498Szrj   initialize_cfun (new_decl, old_decl,
593038fd1498Szrj 		   new_entry ? new_entry->count : old_entry_block->count);
593138fd1498Szrj   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
593238fd1498Szrj     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
593338fd1498Szrj       = id.src_cfun->gimple_df->ipa_pta;
593438fd1498Szrj 
593538fd1498Szrj   /* Copy the function's static chain.  */
593638fd1498Szrj   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
593738fd1498Szrj   if (p)
593838fd1498Szrj     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
593938fd1498Szrj       = copy_static_chain (p, &id);
594038fd1498Szrj 
594138fd1498Szrj   /* If there's a tree_map, prepare for substitution.  */
594238fd1498Szrj   if (tree_map)
594338fd1498Szrj     for (i = 0; i < tree_map->length (); i++)
594438fd1498Szrj       {
594538fd1498Szrj 	gimple *init;
594638fd1498Szrj 	replace_info = (*tree_map)[i];
594738fd1498Szrj 	if (replace_info->replace_p)
594838fd1498Szrj 	  {
594938fd1498Szrj 	    int parm_num = -1;
595038fd1498Szrj 	    if (!replace_info->old_tree)
595138fd1498Szrj 	      {
595238fd1498Szrj 		int p = replace_info->parm_num;
595338fd1498Szrj 		tree parm;
595438fd1498Szrj 		tree req_type, new_type;
595538fd1498Szrj 
595638fd1498Szrj 		for (parm = DECL_ARGUMENTS (old_decl); p;
595738fd1498Szrj 		     parm = DECL_CHAIN (parm))
595838fd1498Szrj 		  p--;
595938fd1498Szrj 		replace_info->old_tree = parm;
596038fd1498Szrj 		parm_num = replace_info->parm_num;
596138fd1498Szrj 		req_type = TREE_TYPE (parm);
596238fd1498Szrj 		new_type = TREE_TYPE (replace_info->new_tree);
596338fd1498Szrj 		if (!useless_type_conversion_p (req_type, new_type))
596438fd1498Szrj 		  {
596538fd1498Szrj 		    if (fold_convertible_p (req_type, replace_info->new_tree))
596638fd1498Szrj 		      replace_info->new_tree
596738fd1498Szrj 			= fold_build1 (NOP_EXPR, req_type,
596838fd1498Szrj 				       replace_info->new_tree);
596938fd1498Szrj 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
597038fd1498Szrj 		      replace_info->new_tree
597138fd1498Szrj 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
597238fd1498Szrj 				       replace_info->new_tree);
597338fd1498Szrj 		    else
597438fd1498Szrj 		      {
597538fd1498Szrj 			if (dump_file)
597638fd1498Szrj 			  {
597738fd1498Szrj 			    fprintf (dump_file, "    const ");
597838fd1498Szrj 			    print_generic_expr (dump_file,
597938fd1498Szrj 						replace_info->new_tree);
598038fd1498Szrj 			    fprintf (dump_file,
598138fd1498Szrj 				     "  can't be converted to param ");
598238fd1498Szrj 			    print_generic_expr (dump_file, parm);
598338fd1498Szrj 			    fprintf (dump_file, "\n");
598438fd1498Szrj 			  }
598538fd1498Szrj 			replace_info->old_tree = NULL;
598638fd1498Szrj 		      }
598738fd1498Szrj 		  }
598838fd1498Szrj 	      }
598938fd1498Szrj 	    else
599038fd1498Szrj 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
599138fd1498Szrj 	    if (replace_info->old_tree)
599238fd1498Szrj 	      {
599338fd1498Szrj 		init = setup_one_parameter (&id, replace_info->old_tree,
599438fd1498Szrj 					    replace_info->new_tree, id.src_fn,
599538fd1498Szrj 					    NULL,
599638fd1498Szrj 					    &vars);
599738fd1498Szrj 		if (init)
599838fd1498Szrj 		  init_stmts.safe_push (init);
599938fd1498Szrj 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
600038fd1498Szrj 		  {
600138fd1498Szrj 		    if (parm_num == -1)
600238fd1498Szrj 		      {
600338fd1498Szrj 			tree parm;
600438fd1498Szrj 			int p;
600538fd1498Szrj 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
600638fd1498Szrj 			     parm = DECL_CHAIN (parm), p++)
600738fd1498Szrj 			  if (parm == replace_info->old_tree)
600838fd1498Szrj 			    {
600938fd1498Szrj 			      parm_num = p;
601038fd1498Szrj 			      break;
601138fd1498Szrj 			    }
601238fd1498Szrj 		      }
601338fd1498Szrj 		    if (parm_num != -1)
601438fd1498Szrj 		      {
601538fd1498Szrj 			if (debug_args_to_skip == args_to_skip)
601638fd1498Szrj 			  {
601738fd1498Szrj 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
601838fd1498Szrj 			    bitmap_copy (debug_args_to_skip, args_to_skip);
601938fd1498Szrj 			  }
602038fd1498Szrj 			bitmap_clear_bit (debug_args_to_skip, parm_num);
602138fd1498Szrj 		      }
602238fd1498Szrj 		  }
602338fd1498Szrj 	      }
602438fd1498Szrj 	  }
602538fd1498Szrj       }
602638fd1498Szrj   /* Copy the function's arguments.  */
602738fd1498Szrj   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
602838fd1498Szrj     DECL_ARGUMENTS (new_decl)
602938fd1498Szrj       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
603038fd1498Szrj 				       args_to_skip, &vars);
603138fd1498Szrj 
603238fd1498Szrj   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
603338fd1498Szrj   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
603438fd1498Szrj 
603538fd1498Szrj   declare_inline_vars (DECL_INITIAL (new_decl), vars);
603638fd1498Szrj 
603738fd1498Szrj   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
603838fd1498Szrj     /* Add local vars.  */
603938fd1498Szrj     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
604038fd1498Szrj 
604138fd1498Szrj   if (DECL_RESULT (old_decl) == NULL_TREE)
604238fd1498Szrj     ;
604338fd1498Szrj   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
604438fd1498Szrj     {
604538fd1498Szrj       DECL_RESULT (new_decl)
604638fd1498Szrj 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
604738fd1498Szrj 		      RESULT_DECL, NULL_TREE, void_type_node);
604838fd1498Szrj       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
604938fd1498Szrj       cfun->returns_struct = 0;
605038fd1498Szrj       cfun->returns_pcc_struct = 0;
605138fd1498Szrj     }
605238fd1498Szrj   else
605338fd1498Szrj     {
605438fd1498Szrj       tree old_name;
605538fd1498Szrj       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
605638fd1498Szrj       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
605738fd1498Szrj       if (gimple_in_ssa_p (id.src_cfun)
605838fd1498Szrj 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
605938fd1498Szrj 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
606038fd1498Szrj 	{
606138fd1498Szrj 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
606238fd1498Szrj 	  insert_decl_map (&id, old_name, new_name);
606338fd1498Szrj 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
606438fd1498Szrj 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
606538fd1498Szrj 	}
606638fd1498Szrj     }
606738fd1498Szrj 
606838fd1498Szrj   /* Set up the destination functions loop tree.  */
606938fd1498Szrj   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
607038fd1498Szrj     {
607138fd1498Szrj       cfun->curr_properties &= ~PROP_loops;
607238fd1498Szrj       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
607338fd1498Szrj       cfun->curr_properties |= PROP_loops;
607438fd1498Szrj     }
607538fd1498Szrj 
607638fd1498Szrj   /* Copy the Function's body.  */
607738fd1498Szrj   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
607838fd1498Szrj 	     new_entry);
607938fd1498Szrj 
608038fd1498Szrj   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
608138fd1498Szrj   number_blocks (new_decl);
608238fd1498Szrj 
608338fd1498Szrj   /* We want to create the BB unconditionally, so that the addition of
608438fd1498Szrj      debug stmts doesn't affect BB count, which may in the end cause
608538fd1498Szrj      codegen differences.  */
608638fd1498Szrj   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
608738fd1498Szrj   while (init_stmts.length ())
608838fd1498Szrj     insert_init_stmt (&id, bb, init_stmts.pop ());
608938fd1498Szrj   update_clone_info (&id);
609038fd1498Szrj 
609138fd1498Szrj   /* Remap the nonlocal_goto_save_area, if any.  */
609238fd1498Szrj   if (cfun->nonlocal_goto_save_area)
609338fd1498Szrj     {
609438fd1498Szrj       struct walk_stmt_info wi;
609538fd1498Szrj 
609638fd1498Szrj       memset (&wi, 0, sizeof (wi));
609738fd1498Szrj       wi.info = &id;
609838fd1498Szrj       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
609938fd1498Szrj     }
610038fd1498Szrj 
610138fd1498Szrj   /* Clean up.  */
610238fd1498Szrj   delete id.decl_map;
610338fd1498Szrj   if (id.debug_map)
610438fd1498Szrj     delete id.debug_map;
610538fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
610638fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
610738fd1498Szrj 
610838fd1498Szrj   update_max_bb_count ();
610938fd1498Szrj   fold_marked_statements (0, id.statements_to_fold);
611038fd1498Szrj   delete id.statements_to_fold;
611138fd1498Szrj   delete_unreachable_blocks_update_callgraph (&id);
611238fd1498Szrj   if (id.dst_node->definition)
611338fd1498Szrj     cgraph_edge::rebuild_references ();
611438fd1498Szrj   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
611538fd1498Szrj     {
611638fd1498Szrj       calculate_dominance_info (CDI_DOMINATORS);
611738fd1498Szrj       fix_loop_structure (NULL);
611838fd1498Szrj     }
611938fd1498Szrj   update_ssa (TODO_update_ssa);
612038fd1498Szrj 
612138fd1498Szrj   /* After partial cloning we need to rescale frequencies, so they are
612238fd1498Szrj      within proper range in the cloned function.  */
612338fd1498Szrj   if (new_entry)
612438fd1498Szrj     {
612538fd1498Szrj       struct cgraph_edge *e;
612638fd1498Szrj       rebuild_frequencies ();
612738fd1498Szrj 
612838fd1498Szrj       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
612938fd1498Szrj       for (e = new_version_node->callees; e; e = e->next_callee)
613038fd1498Szrj 	{
613138fd1498Szrj 	  basic_block bb = gimple_bb (e->call_stmt);
613238fd1498Szrj 	  e->count = bb->count;
613338fd1498Szrj 	}
613438fd1498Szrj       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
613538fd1498Szrj 	{
613638fd1498Szrj 	  basic_block bb = gimple_bb (e->call_stmt);
613738fd1498Szrj 	  e->count = bb->count;
613838fd1498Szrj 	}
613938fd1498Szrj     }
614038fd1498Szrj 
614138fd1498Szrj   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
614238fd1498Szrj     {
614338fd1498Szrj       tree parm;
614438fd1498Szrj       vec<tree, va_gc> **debug_args = NULL;
614538fd1498Szrj       unsigned int len = 0;
614638fd1498Szrj       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
614738fd1498Szrj 	   parm; parm = DECL_CHAIN (parm), i++)
614838fd1498Szrj 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
614938fd1498Szrj 	  {
615038fd1498Szrj 	    tree ddecl;
615138fd1498Szrj 
615238fd1498Szrj 	    if (debug_args == NULL)
615338fd1498Szrj 	      {
615438fd1498Szrj 		debug_args = decl_debug_args_insert (new_decl);
615538fd1498Szrj 		len = vec_safe_length (*debug_args);
615638fd1498Szrj 	      }
615738fd1498Szrj 	    ddecl = make_node (DEBUG_EXPR_DECL);
615838fd1498Szrj 	    DECL_ARTIFICIAL (ddecl) = 1;
615938fd1498Szrj 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
616038fd1498Szrj 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
616138fd1498Szrj 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
616238fd1498Szrj 	    vec_safe_push (*debug_args, ddecl);
616338fd1498Szrj 	  }
616438fd1498Szrj       if (debug_args != NULL)
616538fd1498Szrj 	{
616638fd1498Szrj 	  /* On the callee side, add
616738fd1498Szrj 	     DEBUG D#Y s=> parm
616838fd1498Szrj 	     DEBUG var => D#Y
616938fd1498Szrj 	     stmts to the first bb where var is a VAR_DECL created for the
617038fd1498Szrj 	     optimized away parameter in DECL_INITIAL block.  This hints
617138fd1498Szrj 	     in the debug info that var (whole DECL_ORIGIN is the parm
617238fd1498Szrj 	     PARM_DECL) is optimized away, but could be looked up at the
617338fd1498Szrj 	     call site as value of D#X there.  */
617438fd1498Szrj 	  tree var = vars, vexpr;
617538fd1498Szrj 	  gimple_stmt_iterator cgsi
617638fd1498Szrj 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
617738fd1498Szrj 	  gimple *def_temp;
617838fd1498Szrj 	  var = vars;
617938fd1498Szrj 	  i = vec_safe_length (*debug_args);
618038fd1498Szrj 	  do
618138fd1498Szrj 	    {
618238fd1498Szrj 	      i -= 2;
618338fd1498Szrj 	      while (var != NULL_TREE
618438fd1498Szrj 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
618538fd1498Szrj 		var = TREE_CHAIN (var);
618638fd1498Szrj 	      if (var == NULL_TREE)
618738fd1498Szrj 		break;
618838fd1498Szrj 	      vexpr = make_node (DEBUG_EXPR_DECL);
618938fd1498Szrj 	      parm = (**debug_args)[i];
619038fd1498Szrj 	      DECL_ARTIFICIAL (vexpr) = 1;
619138fd1498Szrj 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
619238fd1498Szrj 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
619338fd1498Szrj 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
619438fd1498Szrj 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
619538fd1498Szrj 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
619638fd1498Szrj 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
619738fd1498Szrj 	    }
619838fd1498Szrj 	  while (i > len);
619938fd1498Szrj 	}
620038fd1498Szrj     }
620138fd1498Szrj 
620238fd1498Szrj   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
620338fd1498Szrj     BITMAP_FREE (debug_args_to_skip);
620438fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
620538fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
620638fd1498Szrj 
620738fd1498Szrj   gcc_assert (!id.debug_stmts.exists ());
620838fd1498Szrj   pop_cfun ();
620938fd1498Szrj   return;
621038fd1498Szrj }
621138fd1498Szrj 
621238fd1498Szrj /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
621338fd1498Szrj    the callee and return the inlined body on success.  */
621438fd1498Szrj 
621538fd1498Szrj tree
maybe_inline_call_in_expr(tree exp)621638fd1498Szrj maybe_inline_call_in_expr (tree exp)
621738fd1498Szrj {
621838fd1498Szrj   tree fn = get_callee_fndecl (exp);
621938fd1498Szrj 
622038fd1498Szrj   /* We can only try to inline "const" functions.  */
622138fd1498Szrj   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
622238fd1498Szrj     {
622338fd1498Szrj       call_expr_arg_iterator iter;
622438fd1498Szrj       copy_body_data id;
622538fd1498Szrj       tree param, arg, t;
622638fd1498Szrj       hash_map<tree, tree> decl_map;
622738fd1498Szrj 
622838fd1498Szrj       /* Remap the parameters.  */
622938fd1498Szrj       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
623038fd1498Szrj 	   param;
623138fd1498Szrj 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
623238fd1498Szrj 	decl_map.put (param, arg);
623338fd1498Szrj 
623438fd1498Szrj       memset (&id, 0, sizeof (id));
623538fd1498Szrj       id.src_fn = fn;
623638fd1498Szrj       id.dst_fn = current_function_decl;
623738fd1498Szrj       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
623838fd1498Szrj       id.decl_map = &decl_map;
623938fd1498Szrj 
624038fd1498Szrj       id.copy_decl = copy_decl_no_change;
624138fd1498Szrj       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
624238fd1498Szrj       id.transform_new_cfg = false;
624338fd1498Szrj       id.transform_return_to_modify = true;
624438fd1498Szrj       id.transform_parameter = true;
624538fd1498Szrj       id.transform_lang_insert_block = NULL;
624638fd1498Szrj 
624738fd1498Szrj       /* Make sure not to unshare trees behind the front-end's back
624838fd1498Szrj 	 since front-end specific mechanisms may rely on sharing.  */
624938fd1498Szrj       id.regimplify = false;
625038fd1498Szrj       id.do_not_unshare = true;
625138fd1498Szrj 
625238fd1498Szrj       /* We're not inside any EH region.  */
625338fd1498Szrj       id.eh_lp_nr = 0;
625438fd1498Szrj 
625538fd1498Szrj       t = copy_tree_body (&id);
625638fd1498Szrj 
625738fd1498Szrj       /* We can only return something suitable for use in a GENERIC
625838fd1498Szrj 	 expression tree.  */
625938fd1498Szrj       if (TREE_CODE (t) == MODIFY_EXPR)
626038fd1498Szrj 	return TREE_OPERAND (t, 1);
626138fd1498Szrj     }
626238fd1498Szrj 
626338fd1498Szrj    return NULL_TREE;
626438fd1498Szrj }
626538fd1498Szrj 
626638fd1498Szrj /* Duplicate a type, fields and all.  */
626738fd1498Szrj 
626838fd1498Szrj tree
build_duplicate_type(tree type)626938fd1498Szrj build_duplicate_type (tree type)
627038fd1498Szrj {
627138fd1498Szrj   struct copy_body_data id;
627238fd1498Szrj 
627338fd1498Szrj   memset (&id, 0, sizeof (id));
627438fd1498Szrj   id.src_fn = current_function_decl;
627538fd1498Szrj   id.dst_fn = current_function_decl;
627638fd1498Szrj   id.src_cfun = cfun;
627738fd1498Szrj   id.decl_map = new hash_map<tree, tree>;
627838fd1498Szrj   id.debug_map = NULL;
627938fd1498Szrj   id.copy_decl = copy_decl_no_change;
628038fd1498Szrj 
628138fd1498Szrj   type = remap_type_1 (type, &id);
628238fd1498Szrj 
628338fd1498Szrj   delete id.decl_map;
628438fd1498Szrj   if (id.debug_map)
628538fd1498Szrj     delete id.debug_map;
628638fd1498Szrj 
628738fd1498Szrj   TYPE_CANONICAL (type) = type;
628838fd1498Szrj 
628938fd1498Szrj   return type;
629038fd1498Szrj }
629138fd1498Szrj 
629238fd1498Szrj /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
629338fd1498Szrj    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
629438fd1498Szrj    evaluation.  */
629538fd1498Szrj 
629638fd1498Szrj tree
copy_fn(tree fn,tree & parms,tree & result)629738fd1498Szrj copy_fn (tree fn, tree& parms, tree& result)
629838fd1498Szrj {
629938fd1498Szrj   copy_body_data id;
630038fd1498Szrj   tree param;
630138fd1498Szrj   hash_map<tree, tree> decl_map;
630238fd1498Szrj 
630338fd1498Szrj   tree *p = &parms;
630438fd1498Szrj   *p = NULL_TREE;
630538fd1498Szrj 
630638fd1498Szrj   memset (&id, 0, sizeof (id));
630738fd1498Szrj   id.src_fn = fn;
630838fd1498Szrj   id.dst_fn = current_function_decl;
630938fd1498Szrj   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
631038fd1498Szrj   id.decl_map = &decl_map;
631138fd1498Szrj 
631238fd1498Szrj   id.copy_decl = copy_decl_no_change;
631338fd1498Szrj   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
631438fd1498Szrj   id.transform_new_cfg = false;
631538fd1498Szrj   id.transform_return_to_modify = false;
631638fd1498Szrj   id.transform_parameter = true;
631738fd1498Szrj   id.transform_lang_insert_block = NULL;
631838fd1498Szrj 
631938fd1498Szrj   /* Make sure not to unshare trees behind the front-end's back
632038fd1498Szrj      since front-end specific mechanisms may rely on sharing.  */
632138fd1498Szrj   id.regimplify = false;
632238fd1498Szrj   id.do_not_unshare = true;
632338fd1498Szrj 
632438fd1498Szrj   /* We're not inside any EH region.  */
632538fd1498Szrj   id.eh_lp_nr = 0;
632638fd1498Szrj 
632738fd1498Szrj   /* Remap the parameters and result and return them to the caller.  */
632838fd1498Szrj   for (param = DECL_ARGUMENTS (fn);
632938fd1498Szrj        param;
633038fd1498Szrj        param = DECL_CHAIN (param))
633138fd1498Szrj     {
633238fd1498Szrj       *p = remap_decl (param, &id);
633338fd1498Szrj       p = &DECL_CHAIN (*p);
633438fd1498Szrj     }
633538fd1498Szrj 
633638fd1498Szrj   if (DECL_RESULT (fn))
633738fd1498Szrj     result = remap_decl (DECL_RESULT (fn), &id);
633838fd1498Szrj   else
633938fd1498Szrj     result = NULL_TREE;
634038fd1498Szrj 
634138fd1498Szrj   return copy_tree_body (&id);
634238fd1498Szrj }
6343