1 /* Tree inlining.
2    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3    Contributed by Alexandre Oliva <aoliva@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "sreal.h"
64 
65 /* I'm not real happy about this, but we need to handle gimple and
66    non-gimple trees.  */
67 
68 /* Inlining, Cloning, Versioning, Parallelization
69 
70    Inlining: a function body is duplicated, but the PARM_DECLs are
71    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72    MODIFY_EXPRs that store to a dedicated returned-value variable.
73    The duplicated eh_region info of the copy will later be appended
74    to the info for the caller; the eh_region info in copied throwing
75    statements and RESX statements are adjusted accordingly.
76 
77    Cloning: (only in C++) We have one body for a con/de/structor, and
78    multiple function decls, each with a unique parameter list.
79    Duplicate the body, using the given splay tree; some parameters
80    will become constants (like 0 or 1).
81 
82    Versioning: a function body is duplicated and the result is a new
83    function rather than into blocks of an existing function as with
84    inlining.  Some parameters will become constants.
85 
86    Parallelization: a region of a function is duplicated resulting in
87    a new function.  Variables may be replaced with complex expressions
88    to enable shared variable semantics.
89 
90    All of these will simultaneously lookup any callgraph edges.  If
91    we're going to inline the duplicated function body, and the given
92    function has some cloned callgraph nodes (one for each place this
93    function will be inlined) those callgraph edges will be duplicated.
94    If we're cloning the body, those callgraph edges will be
95    updated to point into the new body.  (Note that the original
96    callgraph node and edge list will not be altered.)
97 
98    See the CALL_EXPR handling case in copy_tree_body_r ().  */
99 
100 /* To Do:
101 
102    o In order to make inlining-on-trees work, we pessimized
103      function-local static constants.  In particular, they are now
104      always output, even when not addressed.  Fix this by treating
105      function-local static constants just like global static
106      constants; the back-end already knows not to output them if they
107      are not needed.
108 
109    o Provide heuristics to clamp inlining of recursive template
110      calls?  */
111 
112 
113 /* Weights that estimate_num_insns uses to estimate the size of the
114    produced code.  */
115 
116 eni_weights eni_size_weights;
117 
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119    to execute the produced code.  */
120 
121 eni_weights eni_time_weights;
122 
123 /* Prototypes.  */
124 
125 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
126 				     basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138 
139 /* Insert a tree->tree mapping for ID.  Despite the name suggests
140    that the trees should be variables, it is used for more than that.  */
141 
142 void
insert_decl_map(copy_body_data * id,tree key,tree value)143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145   id->decl_map->put (key, value);
146 
147   /* Always insert an identity map as well.  If we see this same new
148      node again, we won't want to duplicate it a second time.  */
149   if (key != value)
150     id->decl_map->put (value, value);
151 }
152 
153 /* Insert a tree->tree mapping for ID.  This is only used for
154    variables.  */
155 
156 static void
insert_debug_decl_map(copy_body_data * id,tree key,tree value)157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159   if (!gimple_in_ssa_p (id->src_cfun))
160     return;
161 
162   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163     return;
164 
165   if (!target_for_debug_bind (key))
166     return;
167 
168   gcc_assert (TREE_CODE (key) == PARM_DECL);
169   gcc_assert (VAR_P (value));
170 
171   if (!id->debug_map)
172     id->debug_map = new hash_map<tree, tree>;
173 
174   id->debug_map->put (key, value);
175 }
176 
177 /* If nonzero, we're remapping the contents of inlined debug
178    statements.  If negative, an error has occurred, such as a
179    reference to a variable that isn't available in the inlined
180    context.  */
181 static int processing_debug_stmt = 0;
182 
183 /* Construct new SSA name for old NAME. ID is the inline context.  */
184 
185 static tree
remap_ssa_name(tree name,copy_body_data * id)186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188   tree new_tree, var;
189   tree *n;
190 
191   gcc_assert (TREE_CODE (name) == SSA_NAME);
192 
193   n = id->decl_map->get (name);
194   if (n)
195     return unshare_expr (*n);
196 
197   if (processing_debug_stmt)
198     {
199       if (SSA_NAME_IS_DEFAULT_DEF (name)
200 	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 	  && id->entry_bb == NULL
202 	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 	{
204 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
205 	  gimple *def_temp;
206 	  gimple_stmt_iterator gsi;
207 	  tree val = SSA_NAME_VAR (name);
208 
209 	  n = id->decl_map->get (val);
210 	  if (n != NULL)
211 	    val = *n;
212 	  if (TREE_CODE (val) != PARM_DECL)
213 	    {
214 	      processing_debug_stmt = -1;
215 	      return name;
216 	    }
217 	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 	  DECL_ARTIFICIAL (vexpr) = 1;
219 	  TREE_TYPE (vexpr) = TREE_TYPE (name);
220 	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221 	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222 	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 	  return vexpr;
224 	}
225 
226       processing_debug_stmt = -1;
227       return name;
228     }
229 
230   /* Remap anonymous SSA names or SSA names of anonymous decls.  */
231   var = SSA_NAME_VAR (name);
232   if (!var
233       || (!SSA_NAME_IS_DEFAULT_DEF (name)
234 	  && VAR_P (var)
235 	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236 	  && DECL_ARTIFICIAL (var)
237 	  && DECL_IGNORED_P (var)
238 	  && !DECL_NAME (var)))
239     {
240       struct ptr_info_def *pi;
241       new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242       if (!var && SSA_NAME_IDENTIFIER (name))
243 	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244       insert_decl_map (id, name, new_tree);
245       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247       /* At least IPA points-to info can be directly transferred.  */
248       if (id->src_cfun->gimple_df
249 	  && id->src_cfun->gimple_df->ipa_pta
250 	  && POINTER_TYPE_P (TREE_TYPE (name))
251 	  && (pi = SSA_NAME_PTR_INFO (name))
252 	  && !pi->pt.anything)
253 	{
254 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255 	  new_pi->pt = pi->pt;
256 	}
257       return new_tree;
258     }
259 
260   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261      in copy_bb.  */
262   new_tree = remap_decl (var, id);
263 
264   /* We might've substituted constant or another SSA_NAME for
265      the variable.
266 
267      Replace the SSA name representing RESULT_DECL by variable during
268      inlining:  this saves us from need to introduce PHI node in a case
269      return value is just partly initialized.  */
270   if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271       && (!SSA_NAME_VAR (name)
272 	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273 	  || !id->transform_return_to_modify))
274     {
275       struct ptr_info_def *pi;
276       new_tree = make_ssa_name (new_tree);
277       insert_decl_map (id, name, new_tree);
278       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280       /* At least IPA points-to info can be directly transferred.  */
281       if (id->src_cfun->gimple_df
282 	  && id->src_cfun->gimple_df->ipa_pta
283 	  && POINTER_TYPE_P (TREE_TYPE (name))
284 	  && (pi = SSA_NAME_PTR_INFO (name))
285 	  && !pi->pt.anything)
286 	{
287 	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 	  new_pi->pt = pi->pt;
289 	}
290       if (SSA_NAME_IS_DEFAULT_DEF (name))
291 	{
292 	  /* By inlining function having uninitialized variable, we might
293 	     extend the lifetime (variable might get reused).  This cause
294 	     ICE in the case we end up extending lifetime of SSA name across
295 	     abnormal edge, but also increase register pressure.
296 
297 	     We simply initialize all uninitialized vars by 0 except
298 	     for case we are inlining to very first BB.  We can avoid
299 	     this for all BBs that are not inside strongly connected
300 	     regions of the CFG, but this is expensive to test.  */
301 	  if (id->entry_bb
302 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303 	      && (!SSA_NAME_VAR (name)
304 		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305 	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306 					     0)->dest
307 		  || EDGE_COUNT (id->entry_bb->preds) != 1))
308 	    {
309 	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310 	      gimple *init_stmt;
311 	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
312 
313 	      init_stmt = gimple_build_assign (new_tree, zero);
314 	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315 	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 	    }
317 	  else
318 	    {
319 	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320 	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321 	    }
322 	}
323     }
324   else
325     insert_decl_map (id, name, new_tree);
326   return new_tree;
327 }
328 
329 /* Remap DECL during the copying of the BLOCK tree for the function.  */
330 
331 tree
remap_decl(tree decl,copy_body_data * id)332 remap_decl (tree decl, copy_body_data *id)
333 {
334   tree *n;
335 
336   /* We only remap local variables in the current function.  */
337 
338   /* See if we have remapped this declaration.  */
339 
340   n = id->decl_map->get (decl);
341 
342   if (!n && processing_debug_stmt)
343     {
344       processing_debug_stmt = -1;
345       return decl;
346     }
347 
348   /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349      necessary DECLs have already been remapped and we do not want to duplicate
350      a decl coming from outside of the sequence we are copying.  */
351   if (!n
352       && id->prevent_decl_creation_for_types
353       && id->remapping_type_depth > 0
354       && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355     return decl;
356 
357   /* If we didn't already have an equivalent for this declaration, create one
358      now.  */
359   if (!n)
360     {
361       /* Make a copy of the variable or label.  */
362       tree t = id->copy_decl (decl, id);
363 
364       /* Remember it, so that if we encounter this local entity again
365 	 we can reuse this copy.  Do this early because remap_type may
366 	 need this decl for TYPE_STUB_DECL.  */
367       insert_decl_map (id, decl, t);
368 
369       if (!DECL_P (t))
370 	return t;
371 
372       /* Remap types, if necessary.  */
373       TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374       if (TREE_CODE (t) == TYPE_DECL)
375 	{
376 	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377 
378 	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 	     is not set on the TYPE_DECL, for example in LTO mode.  */
381 	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 	    {
383 	      tree x = build_variant_type_copy (TREE_TYPE (t));
384 	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385 	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386 	      DECL_ORIGINAL_TYPE (t) = x;
387 	    }
388 	}
389 
390       /* Remap sizes as necessary.  */
391       walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392       walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393 
394       /* If fields, do likewise for offset and qualifier.  */
395       if (TREE_CODE (t) == FIELD_DECL)
396 	{
397 	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 	}
401 
402       return t;
403     }
404 
405   if (id->do_not_unshare)
406     return *n;
407   else
408     return unshare_expr (*n);
409 }
410 
411 static tree
remap_type_1(tree type,copy_body_data * id)412 remap_type_1 (tree type, copy_body_data *id)
413 {
414   tree new_tree, t;
415 
416   /* We do need a copy.  build and register it now.  If this is a pointer or
417      reference type, remap the designated type and make a new pointer or
418      reference type.  */
419   if (TREE_CODE (type) == POINTER_TYPE)
420     {
421       new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 					 TYPE_MODE (type),
423 					 TYPE_REF_CAN_ALIAS_ALL (type));
424       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 	new_tree = build_type_attribute_qual_variant (new_tree,
426 						      TYPE_ATTRIBUTES (type),
427 						      TYPE_QUALS (type));
428       insert_decl_map (id, type, new_tree);
429       return new_tree;
430     }
431   else if (TREE_CODE (type) == REFERENCE_TYPE)
432     {
433       new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 					    TYPE_MODE (type),
435 					    TYPE_REF_CAN_ALIAS_ALL (type));
436       if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 	new_tree = build_type_attribute_qual_variant (new_tree,
438 						      TYPE_ATTRIBUTES (type),
439 						      TYPE_QUALS (type));
440       insert_decl_map (id, type, new_tree);
441       return new_tree;
442     }
443   else
444     new_tree = copy_node (type);
445 
446   insert_decl_map (id, type, new_tree);
447 
448   /* This is a new type, not a copy of an old type.  Need to reassociate
449      variants.  We can handle everything except the main variant lazily.  */
450   t = TYPE_MAIN_VARIANT (type);
451   if (type != t)
452     {
453       t = remap_type (t, id);
454       TYPE_MAIN_VARIANT (new_tree) = t;
455       TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456       TYPE_NEXT_VARIANT (t) = new_tree;
457     }
458   else
459     {
460       TYPE_MAIN_VARIANT (new_tree) = new_tree;
461       TYPE_NEXT_VARIANT (new_tree) = NULL;
462     }
463 
464   if (TYPE_STUB_DECL (type))
465     TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466 
467   /* Lazily create pointer and reference types.  */
468   TYPE_POINTER_TO (new_tree) = NULL;
469   TYPE_REFERENCE_TO (new_tree) = NULL;
470 
471   /* Copy all types that may contain references to local variables; be sure to
472      preserve sharing in between type and its main variant when possible.  */
473   switch (TREE_CODE (new_tree))
474     {
475     case INTEGER_TYPE:
476     case REAL_TYPE:
477     case FIXED_POINT_TYPE:
478     case ENUMERAL_TYPE:
479     case BOOLEAN_TYPE:
480       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 	{
482 	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484 
485 	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 	}
488       else
489 	{
490 	  t = TYPE_MIN_VALUE (new_tree);
491 	  if (t && TREE_CODE (t) != INTEGER_CST)
492 	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493 
494 	  t = TYPE_MAX_VALUE (new_tree);
495 	  if (t && TREE_CODE (t) != INTEGER_CST)
496 	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 	}
498       return new_tree;
499 
500     case FUNCTION_TYPE:
501       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504       else
505         TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509       else
510         walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511       return new_tree;
512 
513     case ARRAY_TYPE:
514       if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517       else
518 	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519 
520       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 	{
522 	  gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 	}
525       else
526 	TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527       break;
528 
529     case RECORD_TYPE:
530     case UNION_TYPE:
531     case QUAL_UNION_TYPE:
532       if (TYPE_MAIN_VARIANT (type) != type
533 	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535       else
536 	{
537 	  tree f, nf = NULL;
538 
539 	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 	    {
541 	      t = remap_decl (f, id);
542 	      DECL_CONTEXT (t) = new_tree;
543 	      DECL_CHAIN (t) = nf;
544 	      nf = t;
545 	    }
546 	  TYPE_FIELDS (new_tree) = nreverse (nf);
547 	}
548       break;
549 
550     case OFFSET_TYPE:
551     default:
552       /* Shouldn't have been thought variable sized.  */
553       gcc_unreachable ();
554     }
555 
556   /* All variants of type share the same size, so use the already remaped data.  */
557   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558     {
559       tree s = TYPE_SIZE (type);
560       tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561       tree su = TYPE_SIZE_UNIT (type);
562       tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563       gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564 			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565 			   || s == mvs);
566       gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567 			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568 			   || su == mvsu);
569       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571     }
572   else
573     {
574       walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575       walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576     }
577 
578   return new_tree;
579 }
580 
581 /* Helper function for remap_type_2, called through walk_tree.  */
582 
583 static tree
remap_type_3(tree * tp,int * walk_subtrees,void * data)584 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
585 {
586   copy_body_data *id = (copy_body_data *) data;
587 
588   if (TYPE_P (*tp))
589     *walk_subtrees = 0;
590 
591   else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
592     return *tp;
593 
594   return NULL_TREE;
595 }
596 
597 /* Return true if TYPE needs to be remapped because remap_decl on any
598    needed embedded decl returns something other than that decl.  */
599 
600 static bool
remap_type_2(tree type,copy_body_data * id)601 remap_type_2 (tree type, copy_body_data *id)
602 {
603   tree t;
604 
605 #define RETURN_TRUE_IF_VAR(T) \
606   do								\
607     {								\
608       tree _t = (T);						\
609       if (_t)							\
610 	{							\
611 	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
612 	    return true;					\
613 	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
614 	      && walk_tree (&_t, remap_type_3, id, NULL))	\
615 	    return true;					\
616 	}							\
617     }								\
618   while (0)
619 
620   switch (TREE_CODE (type))
621     {
622     case POINTER_TYPE:
623     case REFERENCE_TYPE:
624     case FUNCTION_TYPE:
625     case METHOD_TYPE:
626       return remap_type_2 (TREE_TYPE (type), id);
627 
628     case INTEGER_TYPE:
629     case REAL_TYPE:
630     case FIXED_POINT_TYPE:
631     case ENUMERAL_TYPE:
632     case BOOLEAN_TYPE:
633       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
634       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
635       return false;
636 
637     case ARRAY_TYPE:
638       if (remap_type_2 (TREE_TYPE (type), id)
639 	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
640 	return true;
641       break;
642 
643     case RECORD_TYPE:
644     case UNION_TYPE:
645     case QUAL_UNION_TYPE:
646       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
647 	if (TREE_CODE (t) == FIELD_DECL)
648 	  {
649 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
650 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
651 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
652 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
653 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
654 	  }
655       break;
656 
657     default:
658       return false;
659     }
660 
661   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
662   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
663   return false;
664 #undef RETURN_TRUE_IF_VAR
665 }
666 
667 tree
remap_type(tree type,copy_body_data * id)668 remap_type (tree type, copy_body_data *id)
669 {
670   tree *node;
671   tree tmp;
672 
673   if (type == NULL)
674     return type;
675 
676   /* See if we have remapped this type.  */
677   node = id->decl_map->get (type);
678   if (node)
679     return *node;
680 
681   /* The type only needs remapping if it's variably modified.  */
682   if (! variably_modified_type_p (type, id->src_fn)
683       /* Don't remap if copy_decl method doesn't always return a new
684 	 decl and for all embedded decls returns the passed in decl.  */
685       || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
686     {
687       insert_decl_map (id, type, type);
688       return type;
689     }
690 
691   id->remapping_type_depth++;
692   tmp = remap_type_1 (type, id);
693   id->remapping_type_depth--;
694 
695   return tmp;
696 }
697 
698 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
699 
700 static bool
can_be_nonlocal(tree decl,copy_body_data * id)701 can_be_nonlocal (tree decl, copy_body_data *id)
702 {
703   /* We can not duplicate function decls.  */
704   if (TREE_CODE (decl) == FUNCTION_DECL)
705     return true;
706 
707   /* Local static vars must be non-local or we get multiple declaration
708      problems.  */
709   if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
710     return true;
711 
712   return false;
713 }
714 
715 static tree
remap_decls(tree decls,vec<tree,va_gc> ** nonlocalized_list,copy_body_data * id)716 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
717 	     copy_body_data *id)
718 {
719   tree old_var;
720   tree new_decls = NULL_TREE;
721 
722   /* Remap its variables.  */
723   for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
724     {
725       tree new_var;
726 
727       if (can_be_nonlocal (old_var, id))
728 	{
729 	  /* We need to add this variable to the local decls as otherwise
730 	     nothing else will do so.  */
731 	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
732 	    add_local_decl (cfun, old_var);
733 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
734 	      && !DECL_IGNORED_P (old_var)
735 	      && nonlocalized_list)
736 	    vec_safe_push (*nonlocalized_list, old_var);
737 	  continue;
738 	}
739 
740       /* Remap the variable.  */
741       new_var = remap_decl (old_var, id);
742 
743       /* If we didn't remap this variable, we can't mess with its
744 	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
745 	 already declared somewhere else, so don't declare it here.  */
746 
747       if (new_var == id->retvar)
748 	;
749       else if (!new_var)
750         {
751 	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
752 	      && !DECL_IGNORED_P (old_var)
753 	      && nonlocalized_list)
754 	    vec_safe_push (*nonlocalized_list, old_var);
755 	}
756       else
757 	{
758 	  gcc_assert (DECL_P (new_var));
759 	  DECL_CHAIN (new_var) = new_decls;
760 	  new_decls = new_var;
761 
762 	  /* Also copy value-expressions.  */
763 	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
764 	    {
765 	      tree tem = DECL_VALUE_EXPR (new_var);
766 	      bool old_regimplify = id->regimplify;
767 	      id->remapping_type_depth++;
768 	      walk_tree (&tem, copy_tree_body_r, id, NULL);
769 	      id->remapping_type_depth--;
770 	      id->regimplify = old_regimplify;
771 	      SET_DECL_VALUE_EXPR (new_var, tem);
772 	    }
773 	}
774     }
775 
776   return nreverse (new_decls);
777 }
778 
779 /* Copy the BLOCK to contain remapped versions of the variables
780    therein.  And hook the new block into the block-tree.  */
781 
782 static void
remap_block(tree * block,copy_body_data * id)783 remap_block (tree *block, copy_body_data *id)
784 {
785   tree old_block;
786   tree new_block;
787 
788   /* Make the new block.  */
789   old_block = *block;
790   new_block = make_node (BLOCK);
791   TREE_USED (new_block) = TREE_USED (old_block);
792   BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
793   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
794   BLOCK_NONLOCALIZED_VARS (new_block)
795     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
796   *block = new_block;
797 
798   /* Remap its variables.  */
799   BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
800   					&BLOCK_NONLOCALIZED_VARS (new_block),
801 					id);
802 
803   if (id->transform_lang_insert_block)
804     id->transform_lang_insert_block (new_block);
805 
806   /* Remember the remapped block.  */
807   insert_decl_map (id, old_block, new_block);
808 }
809 
810 /* Copy the whole block tree and root it in id->block.  */
811 static tree
remap_blocks(tree block,copy_body_data * id)812 remap_blocks (tree block, copy_body_data *id)
813 {
814   tree t;
815   tree new_tree = block;
816 
817   if (!block)
818     return NULL;
819 
820   remap_block (&new_tree, id);
821   gcc_assert (new_tree != block);
822   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
823     prepend_lexical_block (new_tree, remap_blocks (t, id));
824   /* Blocks are in arbitrary order, but make things slightly prettier and do
825      not swap order when producing a copy.  */
826   BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
827   return new_tree;
828 }
829 
830 /* Remap the block tree rooted at BLOCK to nothing.  */
831 static void
remap_blocks_to_null(tree block,copy_body_data * id)832 remap_blocks_to_null (tree block, copy_body_data *id)
833 {
834   tree t;
835   insert_decl_map (id, block, NULL_TREE);
836   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
837     remap_blocks_to_null (t, id);
838 }
839 
840 static void
copy_statement_list(tree * tp)841 copy_statement_list (tree *tp)
842 {
843   tree_stmt_iterator oi, ni;
844   tree new_tree;
845 
846   new_tree = alloc_stmt_list ();
847   ni = tsi_start (new_tree);
848   oi = tsi_start (*tp);
849   TREE_TYPE (new_tree) = TREE_TYPE (*tp);
850   *tp = new_tree;
851 
852   for (; !tsi_end_p (oi); tsi_next (&oi))
853     {
854       tree stmt = tsi_stmt (oi);
855       if (TREE_CODE (stmt) == STATEMENT_LIST)
856 	/* This copy is not redundant; tsi_link_after will smash this
857 	   STATEMENT_LIST into the end of the one we're building, and we
858 	   don't want to do that with the original.  */
859 	copy_statement_list (&stmt);
860       tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
861     }
862 }
863 
864 static void
copy_bind_expr(tree * tp,int * walk_subtrees,copy_body_data * id)865 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
866 {
867   tree block = BIND_EXPR_BLOCK (*tp);
868   /* Copy (and replace) the statement.  */
869   copy_tree_r (tp, walk_subtrees, NULL);
870   if (block)
871     {
872       remap_block (&block, id);
873       BIND_EXPR_BLOCK (*tp) = block;
874     }
875 
876   if (BIND_EXPR_VARS (*tp))
877     /* This will remap a lot of the same decls again, but this should be
878        harmless.  */
879     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
880 }
881 
882 
883 /* Create a new gimple_seq by remapping all the statements in BODY
884    using the inlining information in ID.  */
885 
886 static gimple_seq
remap_gimple_seq(gimple_seq body,copy_body_data * id)887 remap_gimple_seq (gimple_seq body, copy_body_data *id)
888 {
889   gimple_stmt_iterator si;
890   gimple_seq new_body = NULL;
891 
892   for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
893     {
894       gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
895       gimple_seq_add_seq (&new_body, new_stmts);
896     }
897 
898   return new_body;
899 }
900 
901 
902 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
903    block using the mapping information in ID.  */
904 
905 static gimple *
copy_gimple_bind(gbind * stmt,copy_body_data * id)906 copy_gimple_bind (gbind *stmt, copy_body_data *id)
907 {
908   gimple *new_bind;
909   tree new_block, new_vars;
910   gimple_seq body, new_body;
911 
912   /* Copy the statement.  Note that we purposely don't use copy_stmt
913      here because we need to remap statements as we copy.  */
914   body = gimple_bind_body (stmt);
915   new_body = remap_gimple_seq (body, id);
916 
917   new_block = gimple_bind_block (stmt);
918   if (new_block)
919     remap_block (&new_block, id);
920 
921   /* This will remap a lot of the same decls again, but this should be
922      harmless.  */
923   new_vars = gimple_bind_vars (stmt);
924   if (new_vars)
925     new_vars = remap_decls (new_vars, NULL, id);
926 
927   new_bind = gimple_build_bind (new_vars, new_body, new_block);
928 
929   return new_bind;
930 }
931 
932 /* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
933 
934 static bool
is_parm(tree decl)935 is_parm (tree decl)
936 {
937   if (TREE_CODE (decl) == SSA_NAME)
938     {
939       decl = SSA_NAME_VAR (decl);
940       if (!decl)
941 	return false;
942     }
943 
944   return (TREE_CODE (decl) == PARM_DECL);
945 }
946 
947 /* Remap the dependence CLIQUE from the source to the destination function
948    as specified in ID.  */
949 
950 static unsigned short
remap_dependence_clique(copy_body_data * id,unsigned short clique)951 remap_dependence_clique (copy_body_data *id, unsigned short clique)
952 {
953   if (clique == 0 || processing_debug_stmt)
954     return 0;
955   if (!id->dependence_map)
956     id->dependence_map = new hash_map<dependence_hash, unsigned short>;
957   bool existed;
958   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
959   if (!existed)
960     {
961       /* Clique 1 is reserved for local ones set by PTA.  */
962       if (cfun->last_clique == 0)
963 	cfun->last_clique = 1;
964       newc = ++cfun->last_clique;
965     }
966   return newc;
967 }
968 
969 /* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
970    'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
971    WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
972    recursing into the children nodes of *TP.  */
973 
974 static tree
remap_gimple_op_r(tree * tp,int * walk_subtrees,void * data)975 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
976 {
977   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
978   copy_body_data *id = (copy_body_data *) wi_p->info;
979   tree fn = id->src_fn;
980 
981   /* For recursive invocations this is no longer the LHS itself.  */
982   bool is_lhs = wi_p->is_lhs;
983   wi_p->is_lhs = false;
984 
985   if (TREE_CODE (*tp) == SSA_NAME)
986     {
987       *tp = remap_ssa_name (*tp, id);
988       *walk_subtrees = 0;
989       if (is_lhs)
990 	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
991       return NULL;
992     }
993   else if (auto_var_in_fn_p (*tp, fn))
994     {
995       /* Local variables and labels need to be replaced by equivalent
996 	 variables.  We don't want to copy static variables; there's
997 	 only one of those, no matter how many times we inline the
998 	 containing function.  Similarly for globals from an outer
999 	 function.  */
1000       tree new_decl;
1001 
1002       /* Remap the declaration.  */
1003       new_decl = remap_decl (*tp, id);
1004       gcc_assert (new_decl);
1005       /* Replace this variable with the copy.  */
1006       STRIP_TYPE_NOPS (new_decl);
1007       /* ???  The C++ frontend uses void * pointer zero to initialize
1008          any other type.  This confuses the middle-end type verification.
1009 	 As cloned bodies do not go through gimplification again the fixup
1010 	 there doesn't trigger.  */
1011       if (TREE_CODE (new_decl) == INTEGER_CST
1012 	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1013 	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1014       *tp = new_decl;
1015       *walk_subtrees = 0;
1016     }
1017   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1018     gcc_unreachable ();
1019   else if (TREE_CODE (*tp) == SAVE_EXPR)
1020     gcc_unreachable ();
1021   else if (TREE_CODE (*tp) == LABEL_DECL
1022 	   && (!DECL_CONTEXT (*tp)
1023 	       || decl_function_context (*tp) == id->src_fn))
1024     /* These may need to be remapped for EH handling.  */
1025     *tp = remap_decl (*tp, id);
1026   else if (TREE_CODE (*tp) == FIELD_DECL)
1027     {
1028       /* If the enclosing record type is variably_modified_type_p, the field
1029 	 has already been remapped.  Otherwise, it need not be.  */
1030       tree *n = id->decl_map->get (*tp);
1031       if (n)
1032 	*tp = *n;
1033       *walk_subtrees = 0;
1034     }
1035   else if (TYPE_P (*tp))
1036     /* Types may need remapping as well.  */
1037     *tp = remap_type (*tp, id);
1038   else if (CONSTANT_CLASS_P (*tp))
1039     {
1040       /* If this is a constant, we have to copy the node iff the type
1041 	 will be remapped.  copy_tree_r will not copy a constant.  */
1042       tree new_type = remap_type (TREE_TYPE (*tp), id);
1043 
1044       if (new_type == TREE_TYPE (*tp))
1045 	*walk_subtrees = 0;
1046 
1047       else if (TREE_CODE (*tp) == INTEGER_CST)
1048 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1049       else
1050 	{
1051 	  *tp = copy_node (*tp);
1052 	  TREE_TYPE (*tp) = new_type;
1053 	}
1054     }
1055   else
1056     {
1057       /* Otherwise, just copy the node.  Note that copy_tree_r already
1058 	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1059 
1060       if (TREE_CODE (*tp) == MEM_REF)
1061 	{
1062 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1063 	     that can happen when a pointer argument is an ADDR_EXPR.
1064 	     Recurse here manually to allow that.  */
1065 	  tree ptr = TREE_OPERAND (*tp, 0);
1066 	  tree type = remap_type (TREE_TYPE (*tp), id);
1067 	  tree old = *tp;
1068 	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1069 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1070 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1071 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1072 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1073 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1074 	    {
1075 	      MR_DEPENDENCE_CLIQUE (*tp)
1076 	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1077 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1078 	    }
1079 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1080 	     remapped a parameter as the property might be valid only
1081 	     for the parameter itself.  */
1082 	  if (TREE_THIS_NOTRAP (old)
1083 	      && (!is_parm (TREE_OPERAND (old, 0))
1084 		  || (!id->transform_parameter && is_parm (ptr))))
1085 	    TREE_THIS_NOTRAP (*tp) = 1;
1086 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1087 	  *walk_subtrees = 0;
1088 	  return NULL;
1089 	}
1090 
1091       /* Here is the "usual case".  Copy this tree node, and then
1092 	 tweak some special cases.  */
1093       copy_tree_r (tp, walk_subtrees, NULL);
1094 
1095       if (TREE_CODE (*tp) != OMP_CLAUSE)
1096 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1097 
1098       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1099 	{
1100 	  /* The copied TARGET_EXPR has never been expanded, even if the
1101 	     original node was expanded already.  */
1102 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1103 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1104 	}
1105       else if (TREE_CODE (*tp) == ADDR_EXPR)
1106 	{
1107 	  /* Variable substitution need not be simple.  In particular,
1108 	     the MEM_REF substitution above.  Make sure that
1109 	     TREE_CONSTANT and friends are up-to-date.  */
1110 	  int invariant = is_gimple_min_invariant (*tp);
1111 	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1112 	  recompute_tree_invariant_for_addr_expr (*tp);
1113 
1114 	  /* If this used to be invariant, but is not any longer,
1115 	     then regimplification is probably needed.  */
1116 	  if (invariant && !is_gimple_min_invariant (*tp))
1117 	    id->regimplify = true;
1118 
1119 	  *walk_subtrees = 0;
1120 	}
1121     }
1122 
1123   /* Update the TREE_BLOCK for the cloned expr.  */
1124   if (EXPR_P (*tp))
1125     {
1126       tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1127       tree old_block = TREE_BLOCK (*tp);
1128       if (old_block)
1129 	{
1130 	  tree *n;
1131 	  n = id->decl_map->get (TREE_BLOCK (*tp));
1132 	  if (n)
1133 	    new_block = *n;
1134 	}
1135       TREE_SET_BLOCK (*tp, new_block);
1136     }
1137 
1138   /* Keep iterating.  */
1139   return NULL_TREE;
1140 }
1141 
1142 
1143 /* Called from copy_body_id via walk_tree.  DATA is really a
1144    `copy_body_data *'.  */
1145 
1146 tree
copy_tree_body_r(tree * tp,int * walk_subtrees,void * data)1147 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1148 {
1149   copy_body_data *id = (copy_body_data *) data;
1150   tree fn = id->src_fn;
1151   tree new_block;
1152 
1153   /* Begin by recognizing trees that we'll completely rewrite for the
1154      inlining context.  Our output for these trees is completely
1155      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1156      into an edge).  Further down, we'll handle trees that get
1157      duplicated and/or tweaked.  */
1158 
1159   /* When requested, RETURN_EXPRs should be transformed to just the
1160      contained MODIFY_EXPR.  The branch semantics of the return will
1161      be handled elsewhere by manipulating the CFG rather than a statement.  */
1162   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1163     {
1164       tree assignment = TREE_OPERAND (*tp, 0);
1165 
1166       /* If we're returning something, just turn that into an
1167 	 assignment into the equivalent of the original RESULT_DECL.
1168 	 If the "assignment" is just the result decl, the result
1169 	 decl has already been set (e.g. a recent "foo (&result_decl,
1170 	 ...)"); just toss the entire RETURN_EXPR.  */
1171       if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1172 	{
1173 	  /* Replace the RETURN_EXPR with (a copy of) the
1174 	     MODIFY_EXPR hanging underneath.  */
1175 	  *tp = copy_node (assignment);
1176 	}
1177       else /* Else the RETURN_EXPR returns no value.  */
1178 	{
1179 	  *tp = NULL;
1180 	  return (tree) (void *)1;
1181 	}
1182     }
1183   else if (TREE_CODE (*tp) == SSA_NAME)
1184     {
1185       *tp = remap_ssa_name (*tp, id);
1186       *walk_subtrees = 0;
1187       return NULL;
1188     }
1189 
1190   /* Local variables and labels need to be replaced by equivalent
1191      variables.  We don't want to copy static variables; there's only
1192      one of those, no matter how many times we inline the containing
1193      function.  Similarly for globals from an outer function.  */
1194   else if (auto_var_in_fn_p (*tp, fn))
1195     {
1196       tree new_decl;
1197 
1198       /* Remap the declaration.  */
1199       new_decl = remap_decl (*tp, id);
1200       gcc_assert (new_decl);
1201       /* Replace this variable with the copy.  */
1202       STRIP_TYPE_NOPS (new_decl);
1203       *tp = new_decl;
1204       *walk_subtrees = 0;
1205     }
1206   else if (TREE_CODE (*tp) == STATEMENT_LIST)
1207     copy_statement_list (tp);
1208   else if (TREE_CODE (*tp) == SAVE_EXPR
1209 	   || TREE_CODE (*tp) == TARGET_EXPR)
1210     remap_save_expr (tp, id->decl_map, walk_subtrees);
1211   else if (TREE_CODE (*tp) == LABEL_DECL
1212 	   && (! DECL_CONTEXT (*tp)
1213 	       || decl_function_context (*tp) == id->src_fn))
1214     /* These may need to be remapped for EH handling.  */
1215     *tp = remap_decl (*tp, id);
1216   else if (TREE_CODE (*tp) == BIND_EXPR)
1217     copy_bind_expr (tp, walk_subtrees, id);
1218   /* Types may need remapping as well.  */
1219   else if (TYPE_P (*tp))
1220     *tp = remap_type (*tp, id);
1221 
1222   /* If this is a constant, we have to copy the node iff the type will be
1223      remapped.  copy_tree_r will not copy a constant.  */
1224   else if (CONSTANT_CLASS_P (*tp))
1225     {
1226       tree new_type = remap_type (TREE_TYPE (*tp), id);
1227 
1228       if (new_type == TREE_TYPE (*tp))
1229 	*walk_subtrees = 0;
1230 
1231       else if (TREE_CODE (*tp) == INTEGER_CST)
1232 	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1233       else
1234 	{
1235 	  *tp = copy_node (*tp);
1236 	  TREE_TYPE (*tp) = new_type;
1237 	}
1238     }
1239 
1240   /* Otherwise, just copy the node.  Note that copy_tree_r already
1241      knows not to copy VAR_DECLs, etc., so this is safe.  */
1242   else
1243     {
1244       /* Here we handle trees that are not completely rewritten.
1245 	 First we detect some inlining-induced bogosities for
1246 	 discarding.  */
1247       if (TREE_CODE (*tp) == MODIFY_EXPR
1248 	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1249 	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1250 	{
1251 	  /* Some assignments VAR = VAR; don't generate any rtl code
1252 	     and thus don't count as variable modification.  Avoid
1253 	     keeping bogosities like 0 = 0.  */
1254 	  tree decl = TREE_OPERAND (*tp, 0), value;
1255 	  tree *n;
1256 
1257 	  n = id->decl_map->get (decl);
1258 	  if (n)
1259 	    {
1260 	      value = *n;
1261 	      STRIP_TYPE_NOPS (value);
1262 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1263 		{
1264 		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1265 		  return copy_tree_body_r (tp, walk_subtrees, data);
1266 		}
1267 	    }
1268 	}
1269       else if (TREE_CODE (*tp) == INDIRECT_REF)
1270 	{
1271 	  /* Get rid of *& from inline substitutions that can happen when a
1272 	     pointer argument is an ADDR_EXPR.  */
1273 	  tree decl = TREE_OPERAND (*tp, 0);
1274 	  tree *n = id->decl_map->get (decl);
1275 	  if (n)
1276 	    {
1277 	      /* If we happen to get an ADDR_EXPR in n->value, strip
1278 	         it manually here as we'll eventually get ADDR_EXPRs
1279 		 which lie about their types pointed to.  In this case
1280 		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1281 		 but we absolutely rely on that.  As fold_indirect_ref
1282 	         does other useful transformations, try that first, though.  */
1283 	      tree type = TREE_TYPE (*tp);
1284 	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1285 	      tree old = *tp;
1286 	      *tp = gimple_fold_indirect_ref (ptr);
1287 	      if (! *tp)
1288 	        {
1289 		  type = remap_type (type, id);
1290 		  if (TREE_CODE (ptr) == ADDR_EXPR)
1291 		    {
1292 		      *tp
1293 		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1294 		      /* ???  We should either assert here or build
1295 			 a VIEW_CONVERT_EXPR instead of blindly leaking
1296 			 incompatible types to our IL.  */
1297 		      if (! *tp)
1298 			*tp = TREE_OPERAND (ptr, 0);
1299 		    }
1300 	          else
1301 		    {
1302 	              *tp = build1 (INDIRECT_REF, type, ptr);
1303 		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1304 		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1305 		      TREE_READONLY (*tp) = TREE_READONLY (old);
1306 		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1307 			 have remapped a parameter as the property might be
1308 			 valid only for the parameter itself.  */
1309 		      if (TREE_THIS_NOTRAP (old)
1310 			  && (!is_parm (TREE_OPERAND (old, 0))
1311 			      || (!id->transform_parameter && is_parm (ptr))))
1312 		        TREE_THIS_NOTRAP (*tp) = 1;
1313 		    }
1314 		}
1315 	      *walk_subtrees = 0;
1316 	      return NULL;
1317 	    }
1318 	}
1319       else if (TREE_CODE (*tp) == MEM_REF)
1320 	{
1321 	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1322 	     that can happen when a pointer argument is an ADDR_EXPR.
1323 	     Recurse here manually to allow that.  */
1324 	  tree ptr = TREE_OPERAND (*tp, 0);
1325 	  tree type = remap_type (TREE_TYPE (*tp), id);
1326 	  tree old = *tp;
1327 	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1328 	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1329 	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1330 	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1331 	  TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1332 	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1333 	    {
1334 	      MR_DEPENDENCE_CLIQUE (*tp)
1335 		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1336 	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1337 	    }
1338 	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1339 	     remapped a parameter as the property might be valid only
1340 	     for the parameter itself.  */
1341 	  if (TREE_THIS_NOTRAP (old)
1342 	      && (!is_parm (TREE_OPERAND (old, 0))
1343 		  || (!id->transform_parameter && is_parm (ptr))))
1344 	    TREE_THIS_NOTRAP (*tp) = 1;
1345 	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1346 	  *walk_subtrees = 0;
1347 	  return NULL;
1348 	}
1349 
1350       /* Here is the "usual case".  Copy this tree node, and then
1351 	 tweak some special cases.  */
1352       copy_tree_r (tp, walk_subtrees, NULL);
1353 
1354       /* If EXPR has block defined, map it to newly constructed block.
1355          When inlining we want EXPRs without block appear in the block
1356 	 of function call if we are not remapping a type.  */
1357       if (EXPR_P (*tp))
1358 	{
1359 	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1360 	  if (TREE_BLOCK (*tp))
1361 	    {
1362 	      tree *n;
1363 	      n = id->decl_map->get (TREE_BLOCK (*tp));
1364 	      if (n)
1365 		new_block = *n;
1366 	    }
1367 	  TREE_SET_BLOCK (*tp, new_block);
1368 	}
1369 
1370       if (TREE_CODE (*tp) != OMP_CLAUSE)
1371 	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1372 
1373       /* The copied TARGET_EXPR has never been expanded, even if the
1374 	 original node was expanded already.  */
1375       if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1376 	{
1377 	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1378 	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1379 	}
1380 
1381       /* Variable substitution need not be simple.  In particular, the
1382 	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1383 	 and friends are up-to-date.  */
1384       else if (TREE_CODE (*tp) == ADDR_EXPR)
1385 	{
1386 	  int invariant = is_gimple_min_invariant (*tp);
1387 	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1388 
1389 	  /* Handle the case where we substituted an INDIRECT_REF
1390 	     into the operand of the ADDR_EXPR.  */
1391 	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1392 	    {
1393 	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1394 	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1395 		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1396 	      *tp = t;
1397 	    }
1398 	  else
1399 	    recompute_tree_invariant_for_addr_expr (*tp);
1400 
1401 	  /* If this used to be invariant, but is not any longer,
1402 	     then regimplification is probably needed.  */
1403 	  if (invariant && !is_gimple_min_invariant (*tp))
1404 	    id->regimplify = true;
1405 
1406 	  *walk_subtrees = 0;
1407 	}
1408     }
1409 
1410   /* Keep iterating.  */
1411   return NULL_TREE;
1412 }
1413 
1414 /* Helper for remap_gimple_stmt.  Given an EH region number for the
1415    source function, map that to the duplicate EH region number in
1416    the destination function.  */
1417 
1418 static int
remap_eh_region_nr(int old_nr,copy_body_data * id)1419 remap_eh_region_nr (int old_nr, copy_body_data *id)
1420 {
1421   eh_region old_r, new_r;
1422 
1423   old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1424   new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1425 
1426   return new_r->index;
1427 }
1428 
1429 /* Similar, but operate on INTEGER_CSTs.  */
1430 
1431 static tree
remap_eh_region_tree_nr(tree old_t_nr,copy_body_data * id)1432 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1433 {
1434   int old_nr, new_nr;
1435 
1436   old_nr = tree_to_shwi (old_t_nr);
1437   new_nr = remap_eh_region_nr (old_nr, id);
1438 
1439   return build_int_cst (integer_type_node, new_nr);
1440 }
1441 
1442 /* Helper for copy_bb.  Remap statement STMT using the inlining
1443    information in ID.  Return the new statement copy.  */
1444 
1445 static gimple_seq
remap_gimple_stmt(gimple * stmt,copy_body_data * id)1446 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1447 {
1448   gimple *copy = NULL;
1449   struct walk_stmt_info wi;
1450   bool skip_first = false;
1451   gimple_seq stmts = NULL;
1452 
1453   if (is_gimple_debug (stmt)
1454       && (gimple_debug_nonbind_marker_p (stmt)
1455 	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1456 	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1457     return stmts;
1458 
1459   /* Begin by recognizing trees that we'll completely rewrite for the
1460      inlining context.  Our output for these trees is completely
1461      different from out input (e.g. RETURN_EXPR is deleted, and morphs
1462      into an edge).  Further down, we'll handle trees that get
1463      duplicated and/or tweaked.  */
1464 
1465   /* When requested, GIMPLE_RETURNs should be transformed to just the
1466      contained GIMPLE_ASSIGN.  The branch semantics of the return will
1467      be handled elsewhere by manipulating the CFG rather than the
1468      statement.  */
1469   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1470     {
1471       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1472       tree retbnd = gimple_return_retbnd (stmt);
1473       tree bndslot = id->retbnd;
1474 
1475       if (retbnd && bndslot)
1476 	{
1477 	  gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1478 	  memset (&wi, 0, sizeof (wi));
1479 	  wi.info = id;
1480 	  walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1481 	  gimple_seq_add_stmt (&stmts, bndcopy);
1482 	}
1483 
1484       /* If we're returning something, just turn that into an
1485 	 assignment into the equivalent of the original RESULT_DECL.
1486 	 If RETVAL is just the result decl, the result decl has
1487 	 already been set (e.g. a recent "foo (&result_decl, ...)");
1488 	 just toss the entire GIMPLE_RETURN.  */
1489       if (retval
1490 	  && (TREE_CODE (retval) != RESULT_DECL
1491 	      && (TREE_CODE (retval) != SSA_NAME
1492 		  || ! SSA_NAME_VAR (retval)
1493 		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1494         {
1495 	  copy = gimple_build_assign (id->do_not_unshare
1496 				      ? id->retvar : unshare_expr (id->retvar),
1497 				      retval);
1498 	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1499 	  skip_first = true;
1500 
1501 	  /* We need to copy bounds if return structure with pointers into
1502 	     instrumented function.  */
1503 	  if (chkp_function_instrumented_p (id->dst_fn)
1504 	      && !bndslot
1505 	      && !BOUNDED_P (id->retvar)
1506 	      && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1507 	    id->assign_stmts.safe_push (copy);
1508 
1509 	}
1510       else
1511 	return stmts;
1512     }
1513   else if (gimple_has_substatements (stmt))
1514     {
1515       gimple_seq s1, s2;
1516 
1517       /* When cloning bodies from the C++ front end, we will be handed bodies
1518 	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1519 	 have embedded statements.  */
1520       switch (gimple_code (stmt))
1521 	{
1522 	case GIMPLE_BIND:
1523 	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1524 	  break;
1525 
1526 	case GIMPLE_CATCH:
1527 	  {
1528 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1529 	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1530 	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1531 	  }
1532 	  break;
1533 
1534 	case GIMPLE_EH_FILTER:
1535 	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1536 	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1537 	  break;
1538 
1539 	case GIMPLE_TRY:
1540 	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1541 	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1542 	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1543 	  break;
1544 
1545 	case GIMPLE_WITH_CLEANUP_EXPR:
1546 	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1547 	  copy = gimple_build_wce (s1);
1548 	  break;
1549 
1550 	case GIMPLE_OMP_PARALLEL:
1551 	  {
1552 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1553 	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1554 	    copy = gimple_build_omp_parallel
1555 	             (s1,
1556 		      gimple_omp_parallel_clauses (omp_par_stmt),
1557 		      gimple_omp_parallel_child_fn (omp_par_stmt),
1558 		      gimple_omp_parallel_data_arg (omp_par_stmt));
1559 	  }
1560 	  break;
1561 
1562 	case GIMPLE_OMP_TASK:
1563 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1564 	  copy = gimple_build_omp_task
1565 	           (s1,
1566 		    gimple_omp_task_clauses (stmt),
1567 		    gimple_omp_task_child_fn (stmt),
1568 		    gimple_omp_task_data_arg (stmt),
1569 		    gimple_omp_task_copy_fn (stmt),
1570 		    gimple_omp_task_arg_size (stmt),
1571 		    gimple_omp_task_arg_align (stmt));
1572 	  break;
1573 
1574 	case GIMPLE_OMP_FOR:
1575 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1576 	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1577 	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1578 				       gimple_omp_for_clauses (stmt),
1579 				       gimple_omp_for_collapse (stmt), s2);
1580 	  {
1581 	    size_t i;
1582 	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1583 	      {
1584 		gimple_omp_for_set_index (copy, i,
1585 					  gimple_omp_for_index (stmt, i));
1586 		gimple_omp_for_set_initial (copy, i,
1587 					    gimple_omp_for_initial (stmt, i));
1588 		gimple_omp_for_set_final (copy, i,
1589 					  gimple_omp_for_final (stmt, i));
1590 		gimple_omp_for_set_incr (copy, i,
1591 					 gimple_omp_for_incr (stmt, i));
1592 		gimple_omp_for_set_cond (copy, i,
1593 					 gimple_omp_for_cond (stmt, i));
1594 	      }
1595 	  }
1596 	  break;
1597 
1598 	case GIMPLE_OMP_MASTER:
1599 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1600 	  copy = gimple_build_omp_master (s1);
1601 	  break;
1602 
1603 	case GIMPLE_OMP_TASKGROUP:
1604 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1605 	  copy = gimple_build_omp_taskgroup (s1);
1606 	  break;
1607 
1608 	case GIMPLE_OMP_ORDERED:
1609 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1610 	  copy = gimple_build_omp_ordered
1611 		   (s1,
1612 		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1613 	  break;
1614 
1615 	case GIMPLE_OMP_SECTION:
1616 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1617 	  copy = gimple_build_omp_section (s1);
1618 	  break;
1619 
1620 	case GIMPLE_OMP_SECTIONS:
1621 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1622 	  copy = gimple_build_omp_sections
1623 	           (s1, gimple_omp_sections_clauses (stmt));
1624 	  break;
1625 
1626 	case GIMPLE_OMP_SINGLE:
1627 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1628 	  copy = gimple_build_omp_single
1629 	           (s1, gimple_omp_single_clauses (stmt));
1630 	  break;
1631 
1632 	case GIMPLE_OMP_TARGET:
1633 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1634 	  copy = gimple_build_omp_target
1635 		   (s1, gimple_omp_target_kind (stmt),
1636 		    gimple_omp_target_clauses (stmt));
1637 	  break;
1638 
1639 	case GIMPLE_OMP_TEAMS:
1640 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1641 	  copy = gimple_build_omp_teams
1642 		   (s1, gimple_omp_teams_clauses (stmt));
1643 	  break;
1644 
1645 	case GIMPLE_OMP_CRITICAL:
1646 	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1647 	  copy = gimple_build_omp_critical (s1,
1648 					    gimple_omp_critical_name
1649 					      (as_a <gomp_critical *> (stmt)),
1650 					    gimple_omp_critical_clauses
1651 					      (as_a <gomp_critical *> (stmt)));
1652 	  break;
1653 
1654 	case GIMPLE_TRANSACTION:
1655 	  {
1656 	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1657 	    gtransaction *new_trans_stmt;
1658 	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1659 				   id);
1660 	    copy = new_trans_stmt = gimple_build_transaction (s1);
1661 	    gimple_transaction_set_subcode (new_trans_stmt,
1662 	      gimple_transaction_subcode (old_trans_stmt));
1663 	    gimple_transaction_set_label_norm (new_trans_stmt,
1664 	      gimple_transaction_label_norm (old_trans_stmt));
1665 	    gimple_transaction_set_label_uninst (new_trans_stmt,
1666 	      gimple_transaction_label_uninst (old_trans_stmt));
1667 	    gimple_transaction_set_label_over (new_trans_stmt,
1668 	      gimple_transaction_label_over (old_trans_stmt));
1669 	  }
1670 	  break;
1671 
1672 	default:
1673 	  gcc_unreachable ();
1674 	}
1675     }
1676   else
1677     {
1678       if (gimple_assign_copy_p (stmt)
1679 	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1680 	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1681 	{
1682 	  /* Here we handle statements that are not completely rewritten.
1683 	     First we detect some inlining-induced bogosities for
1684 	     discarding.  */
1685 
1686 	  /* Some assignments VAR = VAR; don't generate any rtl code
1687 	     and thus don't count as variable modification.  Avoid
1688 	     keeping bogosities like 0 = 0.  */
1689 	  tree decl = gimple_assign_lhs (stmt), value;
1690 	  tree *n;
1691 
1692 	  n = id->decl_map->get (decl);
1693 	  if (n)
1694 	    {
1695 	      value = *n;
1696 	      STRIP_TYPE_NOPS (value);
1697 	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1698 		return NULL;
1699 	    }
1700 	}
1701 
1702       /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1703 	 in a block that we aren't copying during tree_function_versioning,
1704 	 just drop the clobber stmt.  */
1705       if (id->blocks_to_copy && gimple_clobber_p (stmt))
1706 	{
1707 	  tree lhs = gimple_assign_lhs (stmt);
1708 	  if (TREE_CODE (lhs) == MEM_REF
1709 	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1710 	    {
1711 	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1712 	      if (gimple_bb (def_stmt)
1713 		  && !bitmap_bit_p (id->blocks_to_copy,
1714 				    gimple_bb (def_stmt)->index))
1715 		return NULL;
1716 	    }
1717 	}
1718 
1719       if (gimple_debug_bind_p (stmt))
1720 	{
1721 	  gdebug *copy
1722 	    = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1723 				       gimple_debug_bind_get_value (stmt),
1724 				       stmt);
1725 	  id->debug_stmts.safe_push (copy);
1726 	  gimple_seq_add_stmt (&stmts, copy);
1727 	  return stmts;
1728 	}
1729       if (gimple_debug_source_bind_p (stmt))
1730 	{
1731 	  gdebug *copy = gimple_build_debug_source_bind
1732 	                   (gimple_debug_source_bind_get_var (stmt),
1733 			    gimple_debug_source_bind_get_value (stmt),
1734 			    stmt);
1735 	  id->debug_stmts.safe_push (copy);
1736 	  gimple_seq_add_stmt (&stmts, copy);
1737 	  return stmts;
1738 	}
1739       if (gimple_debug_nonbind_marker_p (stmt))
1740 	{
1741 	  /* If the inlined function has too many debug markers,
1742 	     don't copy them.  */
1743 	  if (id->src_cfun->debug_marker_count
1744 	      > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1745 	    return stmts;
1746 
1747 	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1748 	  id->debug_stmts.safe_push (copy);
1749 	  gimple_seq_add_stmt (&stmts, copy);
1750 	  return stmts;
1751 	}
1752       gcc_checking_assert (!is_gimple_debug (stmt));
1753 
1754       /* Create a new deep copy of the statement.  */
1755       copy = gimple_copy (stmt);
1756 
1757       /* Clear flags that need revisiting.  */
1758       if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1759         {
1760 	  if (gimple_call_tail_p (call_stmt))
1761 	    gimple_call_set_tail (call_stmt, false);
1762 	  if (gimple_call_from_thunk_p (call_stmt))
1763 	    gimple_call_set_from_thunk (call_stmt, false);
1764 	  if (gimple_call_internal_p (call_stmt))
1765 	    switch (gimple_call_internal_fn (call_stmt))
1766 	      {
1767 	      case IFN_GOMP_SIMD_LANE:
1768 	      case IFN_GOMP_SIMD_VF:
1769 	      case IFN_GOMP_SIMD_LAST_LANE:
1770 	      case IFN_GOMP_SIMD_ORDERED_START:
1771 	      case IFN_GOMP_SIMD_ORDERED_END:
1772 		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1773 	        break;
1774 	      default:
1775 		break;
1776 	      }
1777 	}
1778 
1779       /* Remap the region numbers for __builtin_eh_{pointer,filter},
1780 	 RESX and EH_DISPATCH.  */
1781       if (id->eh_map)
1782 	switch (gimple_code (copy))
1783 	  {
1784 	  case GIMPLE_CALL:
1785 	    {
1786 	      tree r, fndecl = gimple_call_fndecl (copy);
1787 	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1788 		switch (DECL_FUNCTION_CODE (fndecl))
1789 		  {
1790 		  case BUILT_IN_EH_COPY_VALUES:
1791 		    r = gimple_call_arg (copy, 1);
1792 		    r = remap_eh_region_tree_nr (r, id);
1793 		    gimple_call_set_arg (copy, 1, r);
1794 		    /* FALLTHRU */
1795 
1796 		  case BUILT_IN_EH_POINTER:
1797 		  case BUILT_IN_EH_FILTER:
1798 		    r = gimple_call_arg (copy, 0);
1799 		    r = remap_eh_region_tree_nr (r, id);
1800 		    gimple_call_set_arg (copy, 0, r);
1801 		    break;
1802 
1803 		  default:
1804 		    break;
1805 		  }
1806 
1807 	      /* Reset alias info if we didn't apply measures to
1808 		 keep it valid over inlining by setting DECL_PT_UID.  */
1809 	      if (!id->src_cfun->gimple_df
1810 		  || !id->src_cfun->gimple_df->ipa_pta)
1811 		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1812 	    }
1813 	    break;
1814 
1815 	  case GIMPLE_RESX:
1816 	    {
1817 	      gresx *resx_stmt = as_a <gresx *> (copy);
1818 	      int r = gimple_resx_region (resx_stmt);
1819 	      r = remap_eh_region_nr (r, id);
1820 	      gimple_resx_set_region (resx_stmt, r);
1821 	    }
1822 	    break;
1823 
1824 	  case GIMPLE_EH_DISPATCH:
1825 	    {
1826 	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1827 	      int r = gimple_eh_dispatch_region (eh_dispatch);
1828 	      r = remap_eh_region_nr (r, id);
1829 	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1830 	    }
1831 	    break;
1832 
1833 	  default:
1834 	    break;
1835 	  }
1836     }
1837 
1838   /* If STMT has a block defined, map it to the newly constructed
1839      block.  */
1840   if (gimple_block (copy))
1841     {
1842       tree *n;
1843       n = id->decl_map->get (gimple_block (copy));
1844       gcc_assert (n);
1845       gimple_set_block (copy, *n);
1846     }
1847 
1848   if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1849       || gimple_debug_nonbind_marker_p (copy))
1850     {
1851       gimple_seq_add_stmt (&stmts, copy);
1852       return stmts;
1853     }
1854 
1855   /* Remap all the operands in COPY.  */
1856   memset (&wi, 0, sizeof (wi));
1857   wi.info = id;
1858   if (skip_first)
1859     walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1860   else
1861     walk_gimple_op (copy, remap_gimple_op_r, &wi);
1862 
1863   /* Clear the copied virtual operands.  We are not remapping them here
1864      but are going to recreate them from scratch.  */
1865   if (gimple_has_mem_ops (copy))
1866     {
1867       gimple_set_vdef (copy, NULL_TREE);
1868       gimple_set_vuse (copy, NULL_TREE);
1869     }
1870 
1871   gimple_seq_add_stmt (&stmts, copy);
1872   return stmts;
1873 }
1874 
1875 
1876 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
1877    later  */
1878 
1879 static basic_block
copy_bb(copy_body_data * id,basic_block bb,profile_count num,profile_count den)1880 copy_bb (copy_body_data *id, basic_block bb,
1881          profile_count num, profile_count den)
1882 {
1883   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1884   basic_block copy_basic_block;
1885   tree decl;
1886   basic_block prev;
1887 
1888   profile_count::adjust_for_ipa_scaling (&num, &den);
1889 
1890   /* Search for previous copied basic block.  */
1891   prev = bb->prev_bb;
1892   while (!prev->aux)
1893     prev = prev->prev_bb;
1894 
1895   /* create_basic_block() will append every new block to
1896      basic_block_info automatically.  */
1897   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1898   copy_basic_block->count = bb->count.apply_scale (num, den);
1899 
1900   copy_gsi = gsi_start_bb (copy_basic_block);
1901 
1902   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1903     {
1904       gimple_seq stmts;
1905       gimple *stmt = gsi_stmt (gsi);
1906       gimple *orig_stmt = stmt;
1907       gimple_stmt_iterator stmts_gsi;
1908       bool stmt_added = false;
1909 
1910       id->regimplify = false;
1911       stmts = remap_gimple_stmt (stmt, id);
1912 
1913       if (gimple_seq_empty_p (stmts))
1914 	continue;
1915 
1916       seq_gsi = copy_gsi;
1917 
1918       for (stmts_gsi = gsi_start (stmts);
1919 	   !gsi_end_p (stmts_gsi); )
1920 	{
1921 	  stmt = gsi_stmt (stmts_gsi);
1922 
1923 	  /* Advance iterator now before stmt is moved to seq_gsi.  */
1924 	  gsi_next (&stmts_gsi);
1925 
1926 	  if (gimple_nop_p (stmt))
1927 	      continue;
1928 
1929 	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1930 					    orig_stmt);
1931 
1932 	  /* With return slot optimization we can end up with
1933 	     non-gimple (foo *)&this->m, fix that here.  */
1934 	  if (is_gimple_assign (stmt)
1935 	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1936 	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1937 	    {
1938 	      tree new_rhs;
1939 	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
1940 						  gimple_assign_rhs1 (stmt),
1941 						  true, NULL, false,
1942 						  GSI_CONTINUE_LINKING);
1943 	      gimple_assign_set_rhs1 (stmt, new_rhs);
1944 	      id->regimplify = false;
1945 	    }
1946 
1947 	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1948 
1949 	  if (id->regimplify)
1950 	    gimple_regimplify_operands (stmt, &seq_gsi);
1951 
1952 	  stmt_added = true;
1953 	}
1954 
1955       if (!stmt_added)
1956 	continue;
1957 
1958       /* If copy_basic_block has been empty at the start of this iteration,
1959 	 call gsi_start_bb again to get at the newly added statements.  */
1960       if (gsi_end_p (copy_gsi))
1961 	copy_gsi = gsi_start_bb (copy_basic_block);
1962       else
1963 	gsi_next (&copy_gsi);
1964 
1965       /* Process the new statement.  The call to gimple_regimplify_operands
1966 	 possibly turned the statement into multiple statements, we
1967 	 need to process all of them.  */
1968       do
1969 	{
1970 	  tree fn;
1971 	  gcall *call_stmt;
1972 
1973 	  stmt = gsi_stmt (copy_gsi);
1974 	  call_stmt = dyn_cast <gcall *> (stmt);
1975 	  if (call_stmt
1976 	      && gimple_call_va_arg_pack_p (call_stmt)
1977 	      && id->call_stmt
1978 	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
1979 	    {
1980 	      /* __builtin_va_arg_pack () should be replaced by
1981 		 all arguments corresponding to ... in the caller.  */
1982 	      tree p;
1983 	      gcall *new_call;
1984 	      vec<tree> argarray;
1985 	      size_t nargs = gimple_call_num_args (id->call_stmt);
1986 	      size_t n, i, nargs_to_copy;
1987 	      bool remove_bounds = false;
1988 
1989 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1990 		nargs--;
1991 
1992 	      /* Bounds should be removed from arg pack in case
1993 		 we handle not instrumented call in instrumented
1994 		 function.  */
1995 	      nargs_to_copy = nargs;
1996 	      if (gimple_call_with_bounds_p (id->call_stmt)
1997 		  && !gimple_call_with_bounds_p (stmt))
1998 		{
1999 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
2000 		       i < gimple_call_num_args (id->call_stmt);
2001 		       i++)
2002 		    if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
2003 		      nargs_to_copy--;
2004 		  remove_bounds = true;
2005 		}
2006 
2007 	      /* Create the new array of arguments.  */
2008 	      n = nargs_to_copy + gimple_call_num_args (call_stmt);
2009 	      argarray.create (n);
2010 	      argarray.safe_grow_cleared (n);
2011 
2012 	      /* Copy all the arguments before '...'  */
2013 	      memcpy (argarray.address (),
2014 		      gimple_call_arg_ptr (call_stmt, 0),
2015 		      gimple_call_num_args (call_stmt) * sizeof (tree));
2016 
2017 	      if (remove_bounds)
2018 		{
2019 		  /* Append the rest of arguments removing bounds.  */
2020 		  unsigned cur = gimple_call_num_args (call_stmt);
2021 		  i = gimple_call_num_args (id->call_stmt) - nargs;
2022 		  for (i = gimple_call_num_args (id->call_stmt) - nargs;
2023 		       i < gimple_call_num_args (id->call_stmt);
2024 		       i++)
2025 		    if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
2026 		      argarray[cur++] = gimple_call_arg (id->call_stmt, i);
2027 		  gcc_assert (cur == n);
2028 		}
2029 	      else
2030 		{
2031 		  /* Append the arguments passed in '...'  */
2032 		  memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2033 			  gimple_call_arg_ptr (id->call_stmt, 0)
2034 			  + (gimple_call_num_args (id->call_stmt) - nargs),
2035 			  nargs * sizeof (tree));
2036 		}
2037 
2038 	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2039 						argarray);
2040 
2041 	      argarray.release ();
2042 
2043 	      /* Copy all GIMPLE_CALL flags, location and block, except
2044 		 GF_CALL_VA_ARG_PACK.  */
2045 	      gimple_call_copy_flags (new_call, call_stmt);
2046 	      gimple_call_set_va_arg_pack (new_call, false);
2047 	      gimple_set_location (new_call, gimple_location (stmt));
2048 	      gimple_set_block (new_call, gimple_block (stmt));
2049 	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2050 
2051 	      gsi_replace (&copy_gsi, new_call, false);
2052 	      stmt = new_call;
2053 	    }
2054 	  else if (call_stmt
2055 		   && id->call_stmt
2056 		   && (decl = gimple_call_fndecl (stmt))
2057 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2058 		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
2059 	    {
2060 	      /* __builtin_va_arg_pack_len () should be replaced by
2061 		 the number of anonymous arguments.  */
2062 	      size_t nargs = gimple_call_num_args (id->call_stmt), i;
2063 	      tree count, p;
2064 	      gimple *new_stmt;
2065 
2066 	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2067 		nargs--;
2068 
2069 	      /* For instrumented calls we should ignore bounds.  */
2070 	      for (i = gimple_call_num_args (id->call_stmt) - nargs;
2071 		   i < gimple_call_num_args (id->call_stmt);
2072 		   i++)
2073 		if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
2074 		  nargs--;
2075 
2076 	      if (!gimple_call_lhs (stmt))
2077 		{
2078 		  /* Drop unused calls.  */
2079 		  gsi_remove (&copy_gsi, false);
2080 		  continue;
2081 		}
2082 	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2083 		{
2084 		  count = build_int_cst (integer_type_node, nargs);
2085 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2086 		  gsi_replace (&copy_gsi, new_stmt, false);
2087 		  stmt = new_stmt;
2088 		}
2089 	      else if (nargs != 0)
2090 		{
2091 		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2092 		  count = build_int_cst (integer_type_node, nargs);
2093 		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2094 						  PLUS_EXPR, newlhs, count);
2095 		  gimple_call_set_lhs (stmt, newlhs);
2096 		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2097 		}
2098 	    }
2099 	  else if (call_stmt
2100 		   && id->call_stmt
2101 		   && gimple_call_internal_p (stmt)
2102 		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2103 	    {
2104 	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2105 	      gsi_remove (&copy_gsi, false);
2106 	      continue;
2107 	    }
2108 
2109 	  /* Statements produced by inlining can be unfolded, especially
2110 	     when we constant propagated some operands.  We can't fold
2111 	     them right now for two reasons:
2112 	     1) folding require SSA_NAME_DEF_STMTs to be correct
2113 	     2) we can't change function calls to builtins.
2114 	     So we just mark statement for later folding.  We mark
2115 	     all new statements, instead just statements that has changed
2116 	     by some nontrivial substitution so even statements made
2117 	     foldable indirectly are updated.  If this turns out to be
2118 	     expensive, copy_body can be told to watch for nontrivial
2119 	     changes.  */
2120 	  if (id->statements_to_fold)
2121 	    id->statements_to_fold->add (stmt);
2122 
2123 	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2124 	     callgraph edges and update or duplicate them.  */
2125 	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2126 	    {
2127 	      struct cgraph_edge *edge;
2128 
2129 	      switch (id->transform_call_graph_edges)
2130 		{
2131 		case CB_CGE_DUPLICATE:
2132 		  edge = id->src_node->get_edge (orig_stmt);
2133 		  if (edge)
2134 		    {
2135 		      struct cgraph_edge *old_edge = edge;
2136 		      profile_count old_cnt = edge->count;
2137 		      edge = edge->clone (id->dst_node, call_stmt,
2138 					  gimple_uid (stmt),
2139 					  num, den,
2140 					  true);
2141 
2142 		      /* Speculative calls consist of two edges - direct and
2143 			 indirect.  Duplicate the whole thing and distribute
2144 			 frequencies accordingly.  */
2145 		      if (edge->speculative)
2146 			{
2147 			  struct cgraph_edge *direct, *indirect;
2148 			  struct ipa_ref *ref;
2149 
2150 			  gcc_assert (!edge->indirect_unknown_callee);
2151 			  old_edge->speculative_call_info (direct, indirect, ref);
2152 
2153 			  profile_count indir_cnt = indirect->count;
2154 			  indirect = indirect->clone (id->dst_node, call_stmt,
2155 						      gimple_uid (stmt),
2156 						      num, den,
2157 						      true);
2158 
2159 			  profile_probability prob
2160 			     = indir_cnt.probability_in (old_cnt + indir_cnt);
2161 			  indirect->count
2162 			     = copy_basic_block->count.apply_probability (prob);
2163 			  edge->count = copy_basic_block->count - indirect->count;
2164 			  id->dst_node->clone_reference (ref, stmt);
2165 			}
2166 		      else
2167 			edge->count = copy_basic_block->count;
2168 		    }
2169 		  break;
2170 
2171 		case CB_CGE_MOVE_CLONES:
2172 		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2173 								call_stmt);
2174 		  edge = id->dst_node->get_edge (stmt);
2175 		  break;
2176 
2177 		case CB_CGE_MOVE:
2178 		  edge = id->dst_node->get_edge (orig_stmt);
2179 		  if (edge)
2180 		    edge->set_call_stmt (call_stmt);
2181 		  break;
2182 
2183 		default:
2184 		  gcc_unreachable ();
2185 		}
2186 
2187 	      /* Constant propagation on argument done during inlining
2188 		 may create new direct call.  Produce an edge for it.  */
2189 	      if ((!edge
2190 		   || (edge->indirect_inlining_edge
2191 		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2192 		  && id->dst_node->definition
2193 		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2194 		{
2195 		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2196 
2197 		  /* We have missing edge in the callgraph.  This can happen
2198 		     when previous inlining turned an indirect call into a
2199 		     direct call by constant propagating arguments or we are
2200 		     producing dead clone (for further cloning).  In all
2201 		     other cases we hit a bug (incorrect node sharing is the
2202 		     most common reason for missing edges).  */
2203 		  gcc_assert (!dest->definition
2204 			      || dest->address_taken
2205 		  	      || !id->src_node->definition
2206 			      || !id->dst_node->definition);
2207 		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2208 		    id->dst_node->create_edge_including_clones
2209 		      (dest, orig_stmt, call_stmt, bb->count,
2210 		       CIF_ORIGINALLY_INDIRECT_CALL);
2211 		  else
2212 		    id->dst_node->create_edge (dest, call_stmt,
2213 					bb->count)->inline_failed
2214 		      = CIF_ORIGINALLY_INDIRECT_CALL;
2215 		  if (dump_file)
2216 		    {
2217 		      fprintf (dump_file, "Created new direct edge to %s\n",
2218 			       dest->name ());
2219 		    }
2220 		}
2221 
2222 	      notice_special_calls (as_a <gcall *> (stmt));
2223 	    }
2224 
2225 	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2226 				      id->eh_map, id->eh_lp_nr);
2227 
2228 	  gsi_next (&copy_gsi);
2229 	}
2230       while (!gsi_end_p (copy_gsi));
2231 
2232       copy_gsi = gsi_last_bb (copy_basic_block);
2233     }
2234 
2235   return copy_basic_block;
2236 }
2237 
2238 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2239    form is quite easy, since dominator relationship for old basic blocks does
2240    not change.
2241 
2242    There is however exception where inlining might change dominator relation
2243    across EH edges from basic block within inlined functions destinating
2244    to landing pads in function we inline into.
2245 
2246    The function fills in PHI_RESULTs of such PHI nodes if they refer
2247    to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2248    PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2249    EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2250    set, and this means that there will be no overlapping live ranges
2251    for the underlying symbol.
2252 
2253    This might change in future if we allow redirecting of EH edges and
2254    we might want to change way build CFG pre-inlining to include
2255    all the possible edges then.  */
2256 static void
update_ssa_across_abnormal_edges(basic_block bb,basic_block ret_bb,bool can_throw,bool nonlocal_goto)2257 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2258 				  bool can_throw, bool nonlocal_goto)
2259 {
2260   edge e;
2261   edge_iterator ei;
2262 
2263   FOR_EACH_EDGE (e, ei, bb->succs)
2264     if (!e->dest->aux
2265 	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2266       {
2267 	gphi *phi;
2268 	gphi_iterator si;
2269 
2270 	if (!nonlocal_goto)
2271 	  gcc_assert (e->flags & EDGE_EH);
2272 
2273 	if (!can_throw)
2274 	  gcc_assert (!(e->flags & EDGE_EH));
2275 
2276 	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2277 	  {
2278 	    edge re;
2279 
2280 	    phi = si.phi ();
2281 
2282 	    /* For abnormal goto/call edges the receiver can be the
2283 	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2284 
2285 	    gcc_assert ((e->flags & EDGE_EH)
2286 			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2287 
2288 	    re = find_edge (ret_bb, e->dest);
2289 	    gcc_checking_assert (re);
2290 	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2291 			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2292 
2293 	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2294 		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2295 	  }
2296       }
2297 }
2298 
2299 
2300 /* Copy edges from BB into its copy constructed earlier, scale profile
2301    accordingly.  Edges will be taken care of later.  Assume aux
2302    pointers to point to the copies of each BB.  Return true if any
2303    debug stmts are left after a statement that must end the basic block.  */
2304 
2305 static bool
copy_edges_for_bb(basic_block bb,profile_count num,profile_count den,basic_block ret_bb,basic_block abnormal_goto_dest)2306 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2307 		   basic_block ret_bb, basic_block abnormal_goto_dest)
2308 {
2309   basic_block new_bb = (basic_block) bb->aux;
2310   edge_iterator ei;
2311   edge old_edge;
2312   gimple_stmt_iterator si;
2313   int flags;
2314   bool need_debug_cleanup = false;
2315 
2316   /* Use the indices from the original blocks to create edges for the
2317      new ones.  */
2318   FOR_EACH_EDGE (old_edge, ei, bb->succs)
2319     if (!(old_edge->flags & EDGE_EH))
2320       {
2321 	edge new_edge;
2322 
2323 	flags = old_edge->flags;
2324 
2325 	/* Return edges do get a FALLTHRU flag when the get inlined.  */
2326 	if (old_edge->dest->index == EXIT_BLOCK
2327 	    && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2328 	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2329 	  flags |= EDGE_FALLTHRU;
2330 	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2331 	new_edge->probability = old_edge->probability;
2332       }
2333 
2334   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2335     return false;
2336 
2337   /* When doing function splitting, we must decreate count of the return block
2338      which was previously reachable by block we did not copy.  */
2339   if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2340     FOR_EACH_EDGE (old_edge, ei, bb->preds)
2341       if (old_edge->src->index != ENTRY_BLOCK
2342 	  && !old_edge->src->aux)
2343 	new_bb->count -= old_edge->count ().apply_scale (num, den);
2344 
2345   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2346     {
2347       gimple *copy_stmt;
2348       bool can_throw, nonlocal_goto;
2349 
2350       copy_stmt = gsi_stmt (si);
2351       if (!is_gimple_debug (copy_stmt))
2352 	update_stmt (copy_stmt);
2353 
2354       /* Do this before the possible split_block.  */
2355       gsi_next (&si);
2356 
2357       /* If this tree could throw an exception, there are two
2358          cases where we need to add abnormal edge(s): the
2359          tree wasn't in a region and there is a "current
2360          region" in the caller; or the original tree had
2361          EH edges.  In both cases split the block after the tree,
2362          and add abnormal edge(s) as needed; we need both
2363          those from the callee and the caller.
2364          We check whether the copy can throw, because the const
2365          propagation can change an INDIRECT_REF which throws
2366          into a COMPONENT_REF which doesn't.  If the copy
2367          can throw, the original could also throw.  */
2368       can_throw = stmt_can_throw_internal (copy_stmt);
2369       nonlocal_goto
2370 	= (stmt_can_make_abnormal_goto (copy_stmt)
2371 	   && !computed_goto_p (copy_stmt));
2372 
2373       if (can_throw || nonlocal_goto)
2374 	{
2375 	  if (!gsi_end_p (si))
2376 	    {
2377 	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2378 		gsi_next (&si);
2379 	      if (gsi_end_p (si))
2380 		need_debug_cleanup = true;
2381 	    }
2382 	  if (!gsi_end_p (si))
2383 	    /* Note that bb's predecessor edges aren't necessarily
2384 	       right at this point; split_block doesn't care.  */
2385 	    {
2386 	      edge e = split_block (new_bb, copy_stmt);
2387 
2388 	      new_bb = e->dest;
2389 	      new_bb->aux = e->src->aux;
2390 	      si = gsi_start_bb (new_bb);
2391 	    }
2392 	}
2393 
2394       bool update_probs = false;
2395 
2396       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2397 	{
2398 	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2399 	  update_probs = true;
2400 	}
2401       else if (can_throw)
2402 	{
2403 	  make_eh_edges (copy_stmt);
2404 	  update_probs = true;
2405 	}
2406 
2407       /* EH edges may not match old edges.  Copy as much as possible.  */
2408       if (update_probs)
2409 	{
2410           edge e;
2411           edge_iterator ei;
2412 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2413 
2414           FOR_EACH_EDGE (old_edge, ei, bb->succs)
2415             if ((old_edge->flags & EDGE_EH)
2416 		&& (e = find_edge (copy_stmt_bb,
2417 				   (basic_block) old_edge->dest->aux))
2418 		&& (e->flags & EDGE_EH))
2419 	      e->probability = old_edge->probability;
2420 
2421           FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2422 	    if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2423 	      e->probability = profile_probability::never ();
2424         }
2425 
2426 
2427       /* If the call we inline cannot make abnormal goto do not add
2428          additional abnormal edges but only retain those already present
2429 	 in the original function body.  */
2430       if (abnormal_goto_dest == NULL)
2431 	nonlocal_goto = false;
2432       if (nonlocal_goto)
2433 	{
2434 	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2435 
2436 	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2437 	    nonlocal_goto = false;
2438 	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2439 	     in OpenMP regions which aren't allowed to be left abnormally.
2440 	     So, no need to add abnormal edge in that case.  */
2441 	  else if (is_gimple_call (copy_stmt)
2442 		   && gimple_call_internal_p (copy_stmt)
2443 		   && (gimple_call_internal_fn (copy_stmt)
2444 		       == IFN_ABNORMAL_DISPATCHER)
2445 		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2446 	    nonlocal_goto = false;
2447 	  else
2448 	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2449 				   EDGE_ABNORMAL);
2450 	}
2451 
2452       if ((can_throw || nonlocal_goto)
2453 	  && gimple_in_ssa_p (cfun))
2454 	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2455 					  can_throw, nonlocal_goto);
2456     }
2457   return need_debug_cleanup;
2458 }
2459 
2460 /* Copy the PHIs.  All blocks and edges are copied, some blocks
2461    was possibly split and new outgoing EH edges inserted.
2462    BB points to the block of original function and AUX pointers links
2463    the original and newly copied blocks.  */
2464 
2465 static void
copy_phis_for_bb(basic_block bb,copy_body_data * id)2466 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2467 {
2468   basic_block const new_bb = (basic_block) bb->aux;
2469   edge_iterator ei;
2470   gphi *phi;
2471   gphi_iterator si;
2472   edge new_edge;
2473   bool inserted = false;
2474 
2475   for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2476     {
2477       tree res, new_res;
2478       gphi *new_phi;
2479 
2480       phi = si.phi ();
2481       res = PHI_RESULT (phi);
2482       new_res = res;
2483       if (!virtual_operand_p (res))
2484 	{
2485 	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2486 	  if (EDGE_COUNT (new_bb->preds) == 0)
2487 	    {
2488 	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2489 	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2490 	    }
2491 	  else
2492 	    {
2493 	      new_phi = create_phi_node (new_res, new_bb);
2494 	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2495 		{
2496 		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2497 					     bb);
2498 		  tree arg;
2499 		  tree new_arg;
2500 		  edge_iterator ei2;
2501 		  location_t locus;
2502 
2503 		  /* When doing partial cloning, we allow PHIs on the entry
2504 		     block as long as all the arguments are the same.
2505 		     Find any input edge to see argument to copy.  */
2506 		  if (!old_edge)
2507 		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2508 		      if (!old_edge->src->aux)
2509 			break;
2510 
2511 		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2512 		  new_arg = arg;
2513 		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2514 		  gcc_assert (new_arg);
2515 		  /* With return slot optimization we can end up with
2516 		     non-gimple (foo *)&this->m, fix that here.  */
2517 		  if (TREE_CODE (new_arg) != SSA_NAME
2518 		      && TREE_CODE (new_arg) != FUNCTION_DECL
2519 		      && !is_gimple_val (new_arg))
2520 		    {
2521 		      gimple_seq stmts = NULL;
2522 		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2523 						      NULL);
2524 		      gsi_insert_seq_on_edge (new_edge, stmts);
2525 		      inserted = true;
2526 		    }
2527 		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2528 		  if (LOCATION_BLOCK (locus))
2529 		    {
2530 		      tree *n;
2531 		      n = id->decl_map->get (LOCATION_BLOCK (locus));
2532 		      gcc_assert (n);
2533 		      locus = set_block (locus, *n);
2534 		    }
2535 		  else
2536 		    locus = LOCATION_LOCUS (locus);
2537 
2538 		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2539 		}
2540 	    }
2541 	}
2542     }
2543 
2544   /* Commit the delayed edge insertions.  */
2545   if (inserted)
2546     FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2547       gsi_commit_one_edge_insert (new_edge, NULL);
2548 }
2549 
2550 
2551 /* Wrapper for remap_decl so it can be used as a callback.  */
2552 
2553 static tree
remap_decl_1(tree decl,void * data)2554 remap_decl_1 (tree decl, void *data)
2555 {
2556   return remap_decl (decl, (copy_body_data *) data);
2557 }
2558 
2559 /* Build struct function and associated datastructures for the new clone
2560    NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2561    the cfun to the function of new_fndecl (and current_function_decl too).  */
2562 
2563 static void
initialize_cfun(tree new_fndecl,tree callee_fndecl,profile_count count)2564 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2565 {
2566   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2567 
2568   if (!DECL_ARGUMENTS (new_fndecl))
2569     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2570   if (!DECL_RESULT (new_fndecl))
2571     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2572 
2573   /* Register specific tree functions.  */
2574   gimple_register_cfg_hooks ();
2575 
2576   /* Get clean struct function.  */
2577   push_struct_function (new_fndecl);
2578 
2579   /* We will rebuild these, so just sanity check that they are empty.  */
2580   gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2581   gcc_assert (cfun->local_decls == NULL);
2582   gcc_assert (cfun->cfg == NULL);
2583   gcc_assert (cfun->decl == new_fndecl);
2584 
2585   /* Copy items we preserve during cloning.  */
2586   cfun->static_chain_decl = src_cfun->static_chain_decl;
2587   cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2588   cfun->function_end_locus = src_cfun->function_end_locus;
2589   cfun->curr_properties = src_cfun->curr_properties;
2590   cfun->last_verified = src_cfun->last_verified;
2591   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2592   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2593   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2594   cfun->stdarg = src_cfun->stdarg;
2595   cfun->after_inlining = src_cfun->after_inlining;
2596   cfun->can_throw_non_call_exceptions
2597     = src_cfun->can_throw_non_call_exceptions;
2598   cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2599   cfun->returns_struct = src_cfun->returns_struct;
2600   cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2601 
2602   init_empty_tree_cfg ();
2603 
2604   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2605 
2606   profile_count num = count;
2607   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2608   profile_count::adjust_for_ipa_scaling (&num, &den);
2609 
2610   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2611     ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2612 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2613   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2614     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2615 				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2616   if (src_cfun->eh)
2617     init_eh_for_function ();
2618 
2619   if (src_cfun->gimple_df)
2620     {
2621       init_tree_ssa (cfun);
2622       cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2623       if (cfun->gimple_df->in_ssa_p)
2624 	init_ssa_operands (cfun);
2625     }
2626 }
2627 
2628 /* Helper function for copy_cfg_body.  Move debug stmts from the end
2629    of NEW_BB to the beginning of successor basic blocks when needed.  If the
2630    successor has multiple predecessors, reset them, otherwise keep
2631    their value.  */
2632 
2633 static void
maybe_move_debug_stmts_to_successors(copy_body_data * id,basic_block new_bb)2634 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2635 {
2636   edge e;
2637   edge_iterator ei;
2638   gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2639 
2640   if (gsi_end_p (si)
2641       || gsi_one_before_end_p (si)
2642       || !(stmt_can_throw_internal (gsi_stmt (si))
2643 	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2644     return;
2645 
2646   FOR_EACH_EDGE (e, ei, new_bb->succs)
2647     {
2648       gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2649       gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2650       while (is_gimple_debug (gsi_stmt (ssi)))
2651 	{
2652 	  gimple *stmt = gsi_stmt (ssi);
2653 	  gdebug *new_stmt;
2654 	  tree var;
2655 	  tree value;
2656 
2657 	  /* For the last edge move the debug stmts instead of copying
2658 	     them.  */
2659 	  if (ei_one_before_end_p (ei))
2660 	    {
2661 	      si = ssi;
2662 	      gsi_prev (&ssi);
2663 	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2664 		gimple_debug_bind_reset_value (stmt);
2665 	      gsi_remove (&si, false);
2666 	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2667 	      continue;
2668 	    }
2669 
2670 	  if (gimple_debug_bind_p (stmt))
2671 	    {
2672 	      var = gimple_debug_bind_get_var (stmt);
2673 	      if (single_pred_p (e->dest))
2674 		{
2675 		  value = gimple_debug_bind_get_value (stmt);
2676 		  value = unshare_expr (value);
2677 		}
2678 	      else
2679 		value = NULL_TREE;
2680 	      new_stmt = gimple_build_debug_bind (var, value, stmt);
2681 	    }
2682 	  else if (gimple_debug_source_bind_p (stmt))
2683 	    {
2684 	      var = gimple_debug_source_bind_get_var (stmt);
2685 	      value = gimple_debug_source_bind_get_value (stmt);
2686 	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2687 	    }
2688 	  else if (gimple_debug_nonbind_marker_p (stmt))
2689 	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2690 	  else
2691 	    gcc_unreachable ();
2692 	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2693 	  id->debug_stmts.safe_push (new_stmt);
2694 	  gsi_prev (&ssi);
2695 	}
2696     }
2697 }
2698 
2699 /* Make a copy of the sub-loops of SRC_PARENT and place them
2700    as siblings of DEST_PARENT.  */
2701 
2702 static void
copy_loops(copy_body_data * id,struct loop * dest_parent,struct loop * src_parent)2703 copy_loops (copy_body_data *id,
2704 	    struct loop *dest_parent, struct loop *src_parent)
2705 {
2706   struct loop *src_loop = src_parent->inner;
2707   while (src_loop)
2708     {
2709       if (!id->blocks_to_copy
2710 	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2711 	{
2712 	  struct loop *dest_loop = alloc_loop ();
2713 
2714 	  /* Assign the new loop its header and latch and associate
2715 	     those with the new loop.  */
2716 	  dest_loop->header = (basic_block)src_loop->header->aux;
2717 	  dest_loop->header->loop_father = dest_loop;
2718 	  if (src_loop->latch != NULL)
2719 	    {
2720 	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2721 	      dest_loop->latch->loop_father = dest_loop;
2722 	    }
2723 
2724 	  /* Copy loop meta-data.  */
2725 	  copy_loop_info (src_loop, dest_loop);
2726 
2727 	  /* Finally place it into the loop array and the loop tree.  */
2728 	  place_new_loop (cfun, dest_loop);
2729 	  flow_loop_tree_node_add (dest_parent, dest_loop);
2730 
2731 	  dest_loop->safelen = src_loop->safelen;
2732 	  if (src_loop->unroll)
2733 	    {
2734 	      dest_loop->unroll = src_loop->unroll;
2735 	      cfun->has_unroll = true;
2736 	    }
2737 	  dest_loop->dont_vectorize = src_loop->dont_vectorize;
2738 	  if (src_loop->force_vectorize)
2739 	    {
2740 	      dest_loop->force_vectorize = true;
2741 	      cfun->has_force_vectorize_loops = true;
2742 	    }
2743 	  if (src_loop->simduid)
2744 	    {
2745 	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2746 	      cfun->has_simduid_loops = true;
2747 	    }
2748 	  if (id->src_cfun->last_clique != 0)
2749 	    dest_loop->owned_clique
2750 	      = remap_dependence_clique (id,
2751 					 src_loop->owned_clique
2752 					 ? src_loop->owned_clique : 1);
2753 	  /* Recurse.  */
2754 	  copy_loops (id, dest_loop, src_loop);
2755 	}
2756       src_loop = src_loop->next;
2757     }
2758 }
2759 
2760 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2761 
2762 void
redirect_all_calls(copy_body_data * id,basic_block bb)2763 redirect_all_calls (copy_body_data * id, basic_block bb)
2764 {
2765   gimple_stmt_iterator si;
2766   gimple *last = last_stmt (bb);
2767   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2768     {
2769       gimple *stmt = gsi_stmt (si);
2770       if (is_gimple_call (stmt))
2771 	{
2772 	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2773 	  if (edge)
2774 	    {
2775 	      edge->redirect_call_stmt_to_callee ();
2776 	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2777 		gimple_purge_dead_eh_edges (bb);
2778 	    }
2779 	}
2780     }
2781 }
2782 
2783 /* Make a copy of the body of FN so that it can be inserted inline in
2784    another function.  Walks FN via CFG, returns new fndecl.  */
2785 
2786 static tree
copy_cfg_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)2787 copy_cfg_body (copy_body_data * id,
2788 	       basic_block entry_block_map, basic_block exit_block_map,
2789 	       basic_block new_entry)
2790 {
2791   tree callee_fndecl = id->src_fn;
2792   /* Original cfun for the callee, doesn't change.  */
2793   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2794   struct function *cfun_to_copy;
2795   basic_block bb;
2796   tree new_fndecl = NULL;
2797   bool need_debug_cleanup = false;
2798   int last;
2799   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2800   profile_count num = entry_block_map->count;
2801 
2802   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2803 
2804   /* Register specific tree functions.  */
2805   gimple_register_cfg_hooks ();
2806 
2807   /* If we are inlining just region of the function, make sure to connect
2808      new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
2809      part of loop, we must compute frequency and probability of
2810      ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2811      probabilities of edges incoming from nonduplicated region.  */
2812   if (new_entry)
2813     {
2814       edge e;
2815       edge_iterator ei;
2816       den = profile_count::zero ();
2817 
2818       FOR_EACH_EDGE (e, ei, new_entry->preds)
2819 	if (!e->src->aux)
2820 	  den += e->count ();
2821       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2822     }
2823 
2824   profile_count::adjust_for_ipa_scaling (&num, &den);
2825 
2826   /* Must have a CFG here at this point.  */
2827   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2828 	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2829 
2830 
2831   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2832   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2833   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2834   exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2835 
2836   /* Duplicate any exception-handling regions.  */
2837   if (cfun->eh)
2838     id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2839 				       remap_decl_1, id);
2840 
2841   /* Use aux pointers to map the original blocks to copy.  */
2842   FOR_EACH_BB_FN (bb, cfun_to_copy)
2843     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2844       {
2845 	basic_block new_bb = copy_bb (id, bb, num, den);
2846 	bb->aux = new_bb;
2847 	new_bb->aux = bb;
2848 	new_bb->loop_father = entry_block_map->loop_father;
2849       }
2850 
2851   last = last_basic_block_for_fn (cfun);
2852 
2853   /* Now that we've duplicated the blocks, duplicate their edges.  */
2854   basic_block abnormal_goto_dest = NULL;
2855   if (id->call_stmt
2856       && stmt_can_make_abnormal_goto (id->call_stmt))
2857     {
2858       gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2859 
2860       bb = gimple_bb (id->call_stmt);
2861       gsi_next (&gsi);
2862       if (gsi_end_p (gsi))
2863 	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2864     }
2865   FOR_ALL_BB_FN (bb, cfun_to_copy)
2866     if (!id->blocks_to_copy
2867 	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2868       need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2869 					       abnormal_goto_dest);
2870 
2871   if (new_entry)
2872     {
2873       edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2874 			  EDGE_FALLTHRU);
2875       e->probability = profile_probability::always ();
2876     }
2877 
2878   /* Duplicate the loop tree, if available and wanted.  */
2879   if (loops_for_fn (src_cfun) != NULL
2880       && current_loops != NULL)
2881     {
2882       copy_loops (id, entry_block_map->loop_father,
2883 		  get_loop (src_cfun, 0));
2884       /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
2885       loops_state_set (LOOPS_NEED_FIXUP);
2886     }
2887 
2888   /* If the loop tree in the source function needed fixup, mark the
2889      destination loop tree for fixup, too.  */
2890   if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2891     loops_state_set (LOOPS_NEED_FIXUP);
2892 
2893   if (gimple_in_ssa_p (cfun))
2894     FOR_ALL_BB_FN (bb, cfun_to_copy)
2895       if (!id->blocks_to_copy
2896 	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2897 	copy_phis_for_bb (bb, id);
2898 
2899   FOR_ALL_BB_FN (bb, cfun_to_copy)
2900     if (bb->aux)
2901       {
2902 	if (need_debug_cleanup
2903 	    && bb->index != ENTRY_BLOCK
2904 	    && bb->index != EXIT_BLOCK)
2905 	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2906 	/* Update call edge destinations.  This can not be done before loop
2907 	   info is updated, because we may split basic blocks.  */
2908 	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2909 	    && bb->index != ENTRY_BLOCK
2910 	    && bb->index != EXIT_BLOCK)
2911 	  redirect_all_calls (id, (basic_block)bb->aux);
2912 	((basic_block)bb->aux)->aux = NULL;
2913 	bb->aux = NULL;
2914       }
2915 
2916   /* Zero out AUX fields of newly created block during EH edge
2917      insertion. */
2918   for (; last < last_basic_block_for_fn (cfun); last++)
2919     {
2920       if (need_debug_cleanup)
2921 	maybe_move_debug_stmts_to_successors (id,
2922 					      BASIC_BLOCK_FOR_FN (cfun, last));
2923       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2924       /* Update call edge destinations.  This can not be done before loop
2925 	 info is updated, because we may split basic blocks.  */
2926       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2927 	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2928     }
2929   entry_block_map->aux = NULL;
2930   exit_block_map->aux = NULL;
2931 
2932   if (id->eh_map)
2933     {
2934       delete id->eh_map;
2935       id->eh_map = NULL;
2936     }
2937   if (id->dependence_map)
2938     {
2939       delete id->dependence_map;
2940       id->dependence_map = NULL;
2941     }
2942 
2943   return new_fndecl;
2944 }
2945 
2946 /* Copy the debug STMT using ID.  We deal with these statements in a
2947    special way: if any variable in their VALUE expression wasn't
2948    remapped yet, we won't remap it, because that would get decl uids
2949    out of sync, causing codegen differences between -g and -g0.  If
2950    this arises, we drop the VALUE expression altogether.  */
2951 
2952 static void
copy_debug_stmt(gdebug * stmt,copy_body_data * id)2953 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2954 {
2955   tree t, *n;
2956   struct walk_stmt_info wi;
2957 
2958   if (gimple_block (stmt))
2959     {
2960       n = id->decl_map->get (gimple_block (stmt));
2961       gimple_set_block (stmt, n ? *n : id->block);
2962     }
2963 
2964   if (gimple_debug_nonbind_marker_p (stmt))
2965     return;
2966 
2967   /* Remap all the operands in COPY.  */
2968   memset (&wi, 0, sizeof (wi));
2969   wi.info = id;
2970 
2971   processing_debug_stmt = 1;
2972 
2973   if (gimple_debug_source_bind_p (stmt))
2974     t = gimple_debug_source_bind_get_var (stmt);
2975   else if (gimple_debug_bind_p (stmt))
2976     t = gimple_debug_bind_get_var (stmt);
2977   else
2978     gcc_unreachable ();
2979 
2980   if (TREE_CODE (t) == PARM_DECL && id->debug_map
2981       && (n = id->debug_map->get (t)))
2982     {
2983       gcc_assert (VAR_P (*n));
2984       t = *n;
2985     }
2986   else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2987     /* T is a non-localized variable.  */;
2988   else
2989     walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2990 
2991   if (gimple_debug_bind_p (stmt))
2992     {
2993       gimple_debug_bind_set_var (stmt, t);
2994 
2995       if (gimple_debug_bind_has_value_p (stmt))
2996 	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2997 		   remap_gimple_op_r, &wi, NULL);
2998 
2999       /* Punt if any decl couldn't be remapped.  */
3000       if (processing_debug_stmt < 0)
3001 	gimple_debug_bind_reset_value (stmt);
3002     }
3003   else if (gimple_debug_source_bind_p (stmt))
3004     {
3005       gimple_debug_source_bind_set_var (stmt, t);
3006       /* When inlining and source bind refers to one of the optimized
3007 	 away parameters, change the source bind into normal debug bind
3008 	 referring to the corresponding DEBUG_EXPR_DECL that should have
3009 	 been bound before the call stmt.  */
3010       t = gimple_debug_source_bind_get_value (stmt);
3011       if (t != NULL_TREE
3012 	  && TREE_CODE (t) == PARM_DECL
3013 	  && id->call_stmt)
3014 	{
3015 	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3016 	  unsigned int i;
3017 	  if (debug_args != NULL)
3018 	    {
3019 	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3020 		if ((**debug_args)[i] == DECL_ORIGIN (t)
3021 		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3022 		  {
3023 		    t = (**debug_args)[i + 1];
3024 		    stmt->subcode = GIMPLE_DEBUG_BIND;
3025 		    gimple_debug_bind_set_value (stmt, t);
3026 		    break;
3027 		  }
3028 	    }
3029 	}
3030       if (gimple_debug_source_bind_p (stmt))
3031 	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3032 		   remap_gimple_op_r, &wi, NULL);
3033     }
3034 
3035   processing_debug_stmt = 0;
3036 
3037   update_stmt (stmt);
3038 }
3039 
3040 /* Process deferred debug stmts.  In order to give values better odds
3041    of being successfully remapped, we delay the processing of debug
3042    stmts until all other stmts that might require remapping are
3043    processed.  */
3044 
3045 static void
copy_debug_stmts(copy_body_data * id)3046 copy_debug_stmts (copy_body_data *id)
3047 {
3048   size_t i;
3049   gdebug *stmt;
3050 
3051   if (!id->debug_stmts.exists ())
3052     return;
3053 
3054   FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3055     copy_debug_stmt (stmt, id);
3056 
3057   id->debug_stmts.release ();
3058 }
3059 
3060 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3061    another function.  */
3062 
3063 static tree
copy_tree_body(copy_body_data * id)3064 copy_tree_body (copy_body_data *id)
3065 {
3066   tree fndecl = id->src_fn;
3067   tree body = DECL_SAVED_TREE (fndecl);
3068 
3069   walk_tree (&body, copy_tree_body_r, id, NULL);
3070 
3071   return body;
3072 }
3073 
3074 /* Make a copy of the body of FN so that it can be inserted inline in
3075    another function.  */
3076 
3077 static tree
copy_body(copy_body_data * id,basic_block entry_block_map,basic_block exit_block_map,basic_block new_entry)3078 copy_body (copy_body_data *id,
3079 	   basic_block entry_block_map, basic_block exit_block_map,
3080 	   basic_block new_entry)
3081 {
3082   tree fndecl = id->src_fn;
3083   tree body;
3084 
3085   /* If this body has a CFG, walk CFG and copy.  */
3086   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3087   body = copy_cfg_body (id, entry_block_map, exit_block_map,
3088 			new_entry);
3089   copy_debug_stmts (id);
3090 
3091   return body;
3092 }
3093 
3094 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3095    defined in function FN, or of a data member thereof.  */
3096 
3097 static bool
self_inlining_addr_expr(tree value,tree fn)3098 self_inlining_addr_expr (tree value, tree fn)
3099 {
3100   tree var;
3101 
3102   if (TREE_CODE (value) != ADDR_EXPR)
3103     return false;
3104 
3105   var = get_base_address (TREE_OPERAND (value, 0));
3106 
3107   return var && auto_var_in_fn_p (var, fn);
3108 }
3109 
3110 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3111    lexical block and line number information from base_stmt, if given,
3112    or from the last stmt of the block otherwise.  */
3113 
3114 static gimple *
insert_init_debug_bind(copy_body_data * id,basic_block bb,tree var,tree value,gimple * base_stmt)3115 insert_init_debug_bind (copy_body_data *id,
3116 			basic_block bb, tree var, tree value,
3117 			gimple *base_stmt)
3118 {
3119   gimple *note;
3120   gimple_stmt_iterator gsi;
3121   tree tracked_var;
3122 
3123   if (!gimple_in_ssa_p (id->src_cfun))
3124     return NULL;
3125 
3126   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3127     return NULL;
3128 
3129   tracked_var = target_for_debug_bind (var);
3130   if (!tracked_var)
3131     return NULL;
3132 
3133   if (bb)
3134     {
3135       gsi = gsi_last_bb (bb);
3136       if (!base_stmt && !gsi_end_p (gsi))
3137 	base_stmt = gsi_stmt (gsi);
3138     }
3139 
3140   note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3141 
3142   if (bb)
3143     {
3144       if (!gsi_end_p (gsi))
3145 	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3146       else
3147 	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3148     }
3149 
3150   return note;
3151 }
3152 
3153 static void
insert_init_stmt(copy_body_data * id,basic_block bb,gimple * init_stmt)3154 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3155 {
3156   /* If VAR represents a zero-sized variable, it's possible that the
3157      assignment statement may result in no gimple statements.  */
3158   if (init_stmt)
3159     {
3160       gimple_stmt_iterator si = gsi_last_bb (bb);
3161 
3162       /* We can end up with init statements that store to a non-register
3163          from a rhs with a conversion.  Handle that here by forcing the
3164 	 rhs into a temporary.  gimple_regimplify_operands is not
3165 	 prepared to do this for us.  */
3166       if (!is_gimple_debug (init_stmt)
3167 	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3168 	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3169 	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3170 	{
3171 	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3172 			     gimple_expr_type (init_stmt),
3173 			     gimple_assign_rhs1 (init_stmt));
3174 	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3175 					  GSI_NEW_STMT);
3176 	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3177 	  gimple_assign_set_rhs1 (init_stmt, rhs);
3178 	}
3179       gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3180       gimple_regimplify_operands (init_stmt, &si);
3181 
3182       if (!is_gimple_debug (init_stmt))
3183 	{
3184 	  tree def = gimple_assign_lhs (init_stmt);
3185 	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3186 	}
3187     }
3188 }
3189 
3190 /* Initialize parameter P with VALUE.  If needed, produce init statement
3191    at the end of BB.  When BB is NULL, we return init statement to be
3192    output later.  */
3193 static gimple *
setup_one_parameter(copy_body_data * id,tree p,tree value,tree fn,basic_block bb,tree * vars)3194 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3195 		     basic_block bb, tree *vars)
3196 {
3197   gimple *init_stmt = NULL;
3198   tree var;
3199   tree rhs = value;
3200   tree def = (gimple_in_ssa_p (cfun)
3201 	      ? ssa_default_def (id->src_cfun, p) : NULL);
3202 
3203   if (value
3204       && value != error_mark_node
3205       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3206     {
3207       /* If we can match up types by promotion/demotion do so.  */
3208       if (fold_convertible_p (TREE_TYPE (p), value))
3209 	rhs = fold_convert (TREE_TYPE (p), value);
3210       else
3211 	{
3212 	  /* ???  For valid programs we should not end up here.
3213 	     Still if we end up with truly mismatched types here, fall back
3214 	     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3215 	     GIMPLE to the following passes.  */
3216 	  if (!is_gimple_reg_type (TREE_TYPE (value))
3217 	      || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3218 	    rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3219 	  else
3220 	    rhs = build_zero_cst (TREE_TYPE (p));
3221 	}
3222     }
3223 
3224   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3225      here since the type of this decl must be visible to the calling
3226      function.  */
3227   var = copy_decl_to_var (p, id);
3228 
3229   /* Declare this new variable.  */
3230   DECL_CHAIN (var) = *vars;
3231   *vars = var;
3232 
3233   /* Make gimplifier happy about this variable.  */
3234   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3235 
3236   /* If the parameter is never assigned to, has no SSA_NAMEs created,
3237      we would not need to create a new variable here at all, if it
3238      weren't for debug info.  Still, we can just use the argument
3239      value.  */
3240   if (TREE_READONLY (p)
3241       && !TREE_ADDRESSABLE (p)
3242       && value && !TREE_SIDE_EFFECTS (value)
3243       && !def)
3244     {
3245       /* We may produce non-gimple trees by adding NOPs or introduce
3246 	 invalid sharing when operand is not really constant.
3247 	 It is not big deal to prohibit constant propagation here as
3248 	 we will constant propagate in DOM1 pass anyway.  */
3249       if (is_gimple_min_invariant (value)
3250 	  && useless_type_conversion_p (TREE_TYPE (p),
3251 						 TREE_TYPE (value))
3252 	  /* We have to be very careful about ADDR_EXPR.  Make sure
3253 	     the base variable isn't a local variable of the inlined
3254 	     function, e.g., when doing recursive inlining, direct or
3255 	     mutually-recursive or whatever, which is why we don't
3256 	     just test whether fn == current_function_decl.  */
3257 	  && ! self_inlining_addr_expr (value, fn))
3258 	{
3259 	  insert_decl_map (id, p, value);
3260 	  insert_debug_decl_map (id, p, var);
3261 	  return insert_init_debug_bind (id, bb, var, value, NULL);
3262 	}
3263     }
3264 
3265   /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3266      that way, when the PARM_DECL is encountered, it will be
3267      automatically replaced by the VAR_DECL.  */
3268   insert_decl_map (id, p, var);
3269 
3270   /* Even if P was TREE_READONLY, the new VAR should not be.
3271      In the original code, we would have constructed a
3272      temporary, and then the function body would have never
3273      changed the value of P.  However, now, we will be
3274      constructing VAR directly.  The constructor body may
3275      change its value multiple times as it is being
3276      constructed.  Therefore, it must not be TREE_READONLY;
3277      the back-end assumes that TREE_READONLY variable is
3278      assigned to only once.  */
3279   if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3280     TREE_READONLY (var) = 0;
3281 
3282   /* If there is no setup required and we are in SSA, take the easy route
3283      replacing all SSA names representing the function parameter by the
3284      SSA name passed to function.
3285 
3286      We need to construct map for the variable anyway as it might be used
3287      in different SSA names when parameter is set in function.
3288 
3289      Do replacement at -O0 for const arguments replaced by constant.
3290      This is important for builtin_constant_p and other construct requiring
3291      constant argument to be visible in inlined function body.  */
3292   if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3293       && (optimize
3294           || (TREE_READONLY (p)
3295 	      && is_gimple_min_invariant (rhs)))
3296       && (TREE_CODE (rhs) == SSA_NAME
3297 	  || is_gimple_min_invariant (rhs))
3298       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3299     {
3300       insert_decl_map (id, def, rhs);
3301       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3302     }
3303 
3304   /* If the value of argument is never used, don't care about initializing
3305      it.  */
3306   if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3307     {
3308       gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3309       return insert_init_debug_bind (id, bb, var, rhs, NULL);
3310     }
3311 
3312   /* Initialize this VAR_DECL from the equivalent argument.  Convert
3313      the argument to the proper type in case it was promoted.  */
3314   if (value)
3315     {
3316       if (rhs == error_mark_node)
3317 	{
3318 	  insert_decl_map (id, p, var);
3319 	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3320 	}
3321 
3322       STRIP_USELESS_TYPE_CONVERSION (rhs);
3323 
3324       /* If we are in SSA form properly remap the default definition
3325          or assign to a dummy SSA name if the parameter is unused and
3326 	 we are not optimizing.  */
3327       if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3328 	{
3329 	  if (def)
3330 	    {
3331 	      def = remap_ssa_name (def, id);
3332 	      init_stmt = gimple_build_assign (def, rhs);
3333 	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3334 	      set_ssa_default_def (cfun, var, NULL);
3335 	    }
3336 	  else if (!optimize)
3337 	    {
3338 	      def = make_ssa_name (var);
3339 	      init_stmt = gimple_build_assign (def, rhs);
3340 	    }
3341 	}
3342       else
3343         init_stmt = gimple_build_assign (var, rhs);
3344 
3345       if (bb && init_stmt)
3346         insert_init_stmt (id, bb, init_stmt);
3347     }
3348   return init_stmt;
3349 }
3350 
3351 /* Generate code to initialize the parameters of the function at the
3352    top of the stack in ID from the GIMPLE_CALL STMT.  */
3353 
3354 static void
initialize_inlined_parameters(copy_body_data * id,gimple * stmt,tree fn,basic_block bb)3355 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3356 			       tree fn, basic_block bb)
3357 {
3358   tree parms;
3359   size_t i;
3360   tree p;
3361   tree vars = NULL_TREE;
3362   tree static_chain = gimple_call_chain (stmt);
3363 
3364   /* Figure out what the parameters are.  */
3365   parms = DECL_ARGUMENTS (fn);
3366 
3367   /* Loop through the parameter declarations, replacing each with an
3368      equivalent VAR_DECL, appropriately initialized.  */
3369   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3370     {
3371       tree val;
3372       val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3373       setup_one_parameter (id, p, val, fn, bb, &vars);
3374     }
3375   /* After remapping parameters remap their types.  This has to be done
3376      in a second loop over all parameters to appropriately remap
3377      variable sized arrays when the size is specified in a
3378      parameter following the array.  */
3379   for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3380     {
3381       tree *varp = id->decl_map->get (p);
3382       if (varp && VAR_P (*varp))
3383 	{
3384 	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3385 		      ? ssa_default_def (id->src_cfun, p) : NULL);
3386 	  tree var = *varp;
3387 	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3388 	  /* Also remap the default definition if it was remapped
3389 	     to the default definition of the parameter replacement
3390 	     by the parameter setup.  */
3391 	  if (def)
3392 	    {
3393 	      tree *defp = id->decl_map->get (def);
3394 	      if (defp
3395 		  && TREE_CODE (*defp) == SSA_NAME
3396 		  && SSA_NAME_VAR (*defp) == var)
3397 		TREE_TYPE (*defp) = TREE_TYPE (var);
3398 	    }
3399 	}
3400     }
3401 
3402   /* Initialize the static chain.  */
3403   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3404   gcc_assert (fn != current_function_decl);
3405   if (p)
3406     {
3407       /* No static chain?  Seems like a bug in tree-nested.c.  */
3408       gcc_assert (static_chain);
3409 
3410       setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3411     }
3412 
3413   declare_inline_vars (id->block, vars);
3414 }
3415 
3416 
3417 /* Declare a return variable to replace the RESULT_DECL for the
3418    function we are calling.  An appropriate DECL_STMT is returned.
3419    The USE_STMT is filled to contain a use of the declaration to
3420    indicate the return value of the function.
3421 
3422    RETURN_SLOT, if non-null is place where to store the result.  It
3423    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3424    was the LHS of the MODIFY_EXPR to which this call is the RHS.
3425 
3426    RETURN_BOUNDS holds a destination for returned bounds.
3427 
3428    The return value is a (possibly null) value that holds the result
3429    as seen by the caller.  */
3430 
3431 static tree
declare_return_variable(copy_body_data * id,tree return_slot,tree modify_dest,tree return_bounds,basic_block entry_bb)3432 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3433 			 tree return_bounds, basic_block entry_bb)
3434 {
3435   tree callee = id->src_fn;
3436   tree result = DECL_RESULT (callee);
3437   tree callee_type = TREE_TYPE (result);
3438   tree caller_type;
3439   tree var, use;
3440 
3441   /* Handle type-mismatches in the function declaration return type
3442      vs. the call expression.  */
3443   if (modify_dest)
3444     caller_type = TREE_TYPE (modify_dest);
3445   else
3446     caller_type = TREE_TYPE (TREE_TYPE (callee));
3447 
3448   /* We don't need to do anything for functions that don't return anything.  */
3449   if (VOID_TYPE_P (callee_type))
3450     return NULL_TREE;
3451 
3452   /* If there was a return slot, then the return value is the
3453      dereferenced address of that object.  */
3454   if (return_slot)
3455     {
3456       /* The front end shouldn't have used both return_slot and
3457 	 a modify expression.  */
3458       gcc_assert (!modify_dest);
3459       if (DECL_BY_REFERENCE (result))
3460 	{
3461 	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3462 	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3463 
3464 	  /* We are going to construct *&return_slot and we can't do that
3465 	     for variables believed to be not addressable.
3466 
3467 	     FIXME: This check possibly can match, because values returned
3468 	     via return slot optimization are not believed to have address
3469 	     taken by alias analysis.  */
3470 	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3471 	  var = return_slot_addr;
3472 	}
3473       else
3474 	{
3475 	  var = return_slot;
3476 	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3477 	  if (TREE_ADDRESSABLE (result))
3478 	    mark_addressable (var);
3479 	}
3480       if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3481            || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3482 	  && !DECL_GIMPLE_REG_P (result)
3483 	  && DECL_P (var))
3484 	DECL_GIMPLE_REG_P (var) = 0;
3485       use = NULL;
3486       goto done;
3487     }
3488 
3489   /* All types requiring non-trivial constructors should have been handled.  */
3490   gcc_assert (!TREE_ADDRESSABLE (callee_type));
3491 
3492   /* Attempt to avoid creating a new temporary variable.  */
3493   if (modify_dest
3494       && TREE_CODE (modify_dest) != SSA_NAME)
3495     {
3496       bool use_it = false;
3497 
3498       /* We can't use MODIFY_DEST if there's type promotion involved.  */
3499       if (!useless_type_conversion_p (callee_type, caller_type))
3500 	use_it = false;
3501 
3502       /* ??? If we're assigning to a variable sized type, then we must
3503 	 reuse the destination variable, because we've no good way to
3504 	 create variable sized temporaries at this point.  */
3505       else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3506 	use_it = true;
3507 
3508       /* If the callee cannot possibly modify MODIFY_DEST, then we can
3509 	 reuse it as the result of the call directly.  Don't do this if
3510 	 it would promote MODIFY_DEST to addressable.  */
3511       else if (TREE_ADDRESSABLE (result))
3512 	use_it = false;
3513       else
3514 	{
3515 	  tree base_m = get_base_address (modify_dest);
3516 
3517 	  /* If the base isn't a decl, then it's a pointer, and we don't
3518 	     know where that's going to go.  */
3519 	  if (!DECL_P (base_m))
3520 	    use_it = false;
3521 	  else if (is_global_var (base_m))
3522 	    use_it = false;
3523 	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3524 		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3525 		   && !DECL_GIMPLE_REG_P (result)
3526 		   && DECL_GIMPLE_REG_P (base_m))
3527 	    use_it = false;
3528 	  else if (!TREE_ADDRESSABLE (base_m))
3529 	    use_it = true;
3530 	}
3531 
3532       if (use_it)
3533 	{
3534 	  var = modify_dest;
3535 	  use = NULL;
3536 	  goto done;
3537 	}
3538     }
3539 
3540   gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3541 
3542   var = copy_result_decl_to_var (result, id);
3543   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3544 
3545   /* Do not have the rest of GCC warn about this variable as it should
3546      not be visible to the user.  */
3547   TREE_NO_WARNING (var) = 1;
3548 
3549   declare_inline_vars (id->block, var);
3550 
3551   /* Build the use expr.  If the return type of the function was
3552      promoted, convert it back to the expected type.  */
3553   use = var;
3554   if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3555     {
3556       /* If we can match up types by promotion/demotion do so.  */
3557       if (fold_convertible_p (caller_type, var))
3558 	use = fold_convert (caller_type, var);
3559       else
3560 	{
3561 	  /* ???  For valid programs we should not end up here.
3562 	     Still if we end up with truly mismatched types here, fall back
3563 	     to using a MEM_REF to not leak invalid GIMPLE to the following
3564 	     passes.  */
3565 	  /* Prevent var from being written into SSA form.  */
3566 	  if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3567 	      || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3568 	    DECL_GIMPLE_REG_P (var) = false;
3569 	  else if (is_gimple_reg_type (TREE_TYPE (var)))
3570 	    TREE_ADDRESSABLE (var) = true;
3571 	  use = fold_build2 (MEM_REF, caller_type,
3572 			     build_fold_addr_expr (var),
3573 			     build_int_cst (ptr_type_node, 0));
3574 	}
3575     }
3576 
3577   STRIP_USELESS_TYPE_CONVERSION (use);
3578 
3579   if (DECL_BY_REFERENCE (result))
3580     {
3581       TREE_ADDRESSABLE (var) = 1;
3582       var = build_fold_addr_expr (var);
3583     }
3584 
3585  done:
3586   /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3587      way, when the RESULT_DECL is encountered, it will be
3588      automatically replaced by the VAR_DECL.
3589 
3590      When returning by reference, ensure that RESULT_DECL remaps to
3591      gimple_val.  */
3592   if (DECL_BY_REFERENCE (result)
3593       && !is_gimple_val (var))
3594     {
3595       tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3596       insert_decl_map (id, result, temp);
3597       /* When RESULT_DECL is in SSA form, we need to remap and initialize
3598 	 it's default_def SSA_NAME.  */
3599       if (gimple_in_ssa_p (id->src_cfun)
3600 	  && is_gimple_reg (result))
3601 	{
3602 	  temp = make_ssa_name (temp);
3603 	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3604 	}
3605       insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3606     }
3607   else
3608     insert_decl_map (id, result, var);
3609 
3610   /* Remember this so we can ignore it in remap_decls.  */
3611   id->retvar = var;
3612 
3613   /* If returned bounds are used, then make var for them.  */
3614   if (return_bounds)
3615   {
3616     tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3617     DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3618     TREE_NO_WARNING (bndtemp) = 1;
3619     declare_inline_vars (id->block, bndtemp);
3620 
3621     id->retbnd = bndtemp;
3622     insert_init_stmt (id, entry_bb,
3623 		      gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3624   }
3625 
3626   return use;
3627 }
3628 
3629 /* Determine if the function can be copied.  If so return NULL.  If
3630    not return a string describng the reason for failure.  */
3631 
3632 const char *
copy_forbidden(struct function * fun)3633 copy_forbidden (struct function *fun)
3634 {
3635   const char *reason = fun->cannot_be_copied_reason;
3636 
3637   /* Only examine the function once.  */
3638   if (fun->cannot_be_copied_set)
3639     return reason;
3640 
3641   /* We cannot copy a function that receives a non-local goto
3642      because we cannot remap the destination label used in the
3643      function that is performing the non-local goto.  */
3644   /* ??? Actually, this should be possible, if we work at it.
3645      No doubt there's just a handful of places that simply
3646      assume it doesn't happen and don't substitute properly.  */
3647   if (fun->has_nonlocal_label)
3648     {
3649       reason = G_("function %q+F can never be copied "
3650 		  "because it receives a non-local goto");
3651       goto fail;
3652     }
3653 
3654   if (fun->has_forced_label_in_static)
3655     {
3656       reason = G_("function %q+F can never be copied because it saves "
3657 		  "address of local label in a static variable");
3658       goto fail;
3659     }
3660 
3661  fail:
3662   fun->cannot_be_copied_reason = reason;
3663   fun->cannot_be_copied_set = true;
3664   return reason;
3665 }
3666 
3667 
3668 static const char *inline_forbidden_reason;
3669 
3670 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
3671    iff a function can not be inlined.  Also sets the reason why. */
3672 
3673 static tree
inline_forbidden_p_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wip)3674 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3675 			 struct walk_stmt_info *wip)
3676 {
3677   tree fn = (tree) wip->info;
3678   tree t;
3679   gimple *stmt = gsi_stmt (*gsi);
3680 
3681   switch (gimple_code (stmt))
3682     {
3683     case GIMPLE_CALL:
3684       /* Refuse to inline alloca call unless user explicitly forced so as
3685 	 this may change program's memory overhead drastically when the
3686 	 function using alloca is called in loop.  In GCC present in
3687 	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3688 	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3689 	 VLA objects as those can't cause unbounded growth (they're always
3690 	 wrapped inside stack_save/stack_restore regions.  */
3691       if (gimple_maybe_alloca_call_p (stmt)
3692 	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3693 	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3694 	{
3695 	  inline_forbidden_reason
3696 	    = G_("function %q+F can never be inlined because it uses "
3697 		 "alloca (override using the always_inline attribute)");
3698 	  *handled_ops_p = true;
3699 	  return fn;
3700 	}
3701 
3702       t = gimple_call_fndecl (stmt);
3703       if (t == NULL_TREE)
3704 	break;
3705 
3706       /* We cannot inline functions that call setjmp.  */
3707       if (setjmp_call_p (t))
3708 	{
3709 	  inline_forbidden_reason
3710 	    = G_("function %q+F can never be inlined because it uses setjmp");
3711 	  *handled_ops_p = true;
3712 	  return t;
3713 	}
3714 
3715       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3716 	switch (DECL_FUNCTION_CODE (t))
3717 	  {
3718 	    /* We cannot inline functions that take a variable number of
3719 	       arguments.  */
3720 	  case BUILT_IN_VA_START:
3721 	  case BUILT_IN_NEXT_ARG:
3722 	  case BUILT_IN_VA_END:
3723 	    inline_forbidden_reason
3724 	      = G_("function %q+F can never be inlined because it "
3725 		   "uses variable argument lists");
3726 	    *handled_ops_p = true;
3727 	    return t;
3728 
3729 	  case BUILT_IN_LONGJMP:
3730 	    /* We can't inline functions that call __builtin_longjmp at
3731 	       all.  The non-local goto machinery really requires the
3732 	       destination be in a different function.  If we allow the
3733 	       function calling __builtin_longjmp to be inlined into the
3734 	       function calling __builtin_setjmp, Things will Go Awry.  */
3735 	    inline_forbidden_reason
3736 	      = G_("function %q+F can never be inlined because "
3737 		   "it uses setjmp-longjmp exception handling");
3738 	    *handled_ops_p = true;
3739 	    return t;
3740 
3741 	  case BUILT_IN_NONLOCAL_GOTO:
3742 	    /* Similarly.  */
3743 	    inline_forbidden_reason
3744 	      = G_("function %q+F can never be inlined because "
3745 		   "it uses non-local goto");
3746 	    *handled_ops_p = true;
3747 	    return t;
3748 
3749 	  case BUILT_IN_RETURN:
3750 	  case BUILT_IN_APPLY_ARGS:
3751 	    /* If a __builtin_apply_args caller would be inlined,
3752 	       it would be saving arguments of the function it has
3753 	       been inlined into.  Similarly __builtin_return would
3754 	       return from the function the inline has been inlined into.  */
3755 	    inline_forbidden_reason
3756 	      = G_("function %q+F can never be inlined because "
3757 		   "it uses __builtin_return or __builtin_apply_args");
3758 	    *handled_ops_p = true;
3759 	    return t;
3760 
3761 	  default:
3762 	    break;
3763 	  }
3764       break;
3765 
3766     case GIMPLE_GOTO:
3767       t = gimple_goto_dest (stmt);
3768 
3769       /* We will not inline a function which uses computed goto.  The
3770 	 addresses of its local labels, which may be tucked into
3771 	 global storage, are of course not constant across
3772 	 instantiations, which causes unexpected behavior.  */
3773       if (TREE_CODE (t) != LABEL_DECL)
3774 	{
3775 	  inline_forbidden_reason
3776 	    = G_("function %q+F can never be inlined "
3777 		 "because it contains a computed goto");
3778 	  *handled_ops_p = true;
3779 	  return t;
3780 	}
3781       break;
3782 
3783     default:
3784       break;
3785     }
3786 
3787   *handled_ops_p = false;
3788   return NULL_TREE;
3789 }
3790 
3791 /* Return true if FNDECL is a function that cannot be inlined into
3792    another one.  */
3793 
3794 static bool
inline_forbidden_p(tree fndecl)3795 inline_forbidden_p (tree fndecl)
3796 {
3797   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3798   struct walk_stmt_info wi;
3799   basic_block bb;
3800   bool forbidden_p = false;
3801 
3802   /* First check for shared reasons not to copy the code.  */
3803   inline_forbidden_reason = copy_forbidden (fun);
3804   if (inline_forbidden_reason != NULL)
3805     return true;
3806 
3807   /* Next, walk the statements of the function looking for
3808      constraucts we can't handle, or are non-optimal for inlining.  */
3809   hash_set<tree> visited_nodes;
3810   memset (&wi, 0, sizeof (wi));
3811   wi.info = (void *) fndecl;
3812   wi.pset = &visited_nodes;
3813 
3814   FOR_EACH_BB_FN (bb, fun)
3815     {
3816       gimple *ret;
3817       gimple_seq seq = bb_seq (bb);
3818       ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3819       forbidden_p = (ret != NULL);
3820       if (forbidden_p)
3821 	break;
3822     }
3823 
3824   return forbidden_p;
3825 }
3826 
3827 /* Return false if the function FNDECL cannot be inlined on account of its
3828    attributes, true otherwise.  */
3829 static bool
function_attribute_inlinable_p(const_tree fndecl)3830 function_attribute_inlinable_p (const_tree fndecl)
3831 {
3832   if (targetm.attribute_table)
3833     {
3834       const_tree a;
3835 
3836       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3837 	{
3838 	  const_tree name = TREE_PURPOSE (a);
3839 	  int i;
3840 
3841 	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3842 	    if (is_attribute_p (targetm.attribute_table[i].name, name))
3843 	      return targetm.function_attribute_inlinable_p (fndecl);
3844 	}
3845     }
3846 
3847   return true;
3848 }
3849 
3850 /* Returns nonzero if FN is a function that does not have any
3851    fundamental inline blocking properties.  */
3852 
3853 bool
tree_inlinable_function_p(tree fn)3854 tree_inlinable_function_p (tree fn)
3855 {
3856   bool inlinable = true;
3857   bool do_warning;
3858   tree always_inline;
3859 
3860   /* If we've already decided this function shouldn't be inlined,
3861      there's no need to check again.  */
3862   if (DECL_UNINLINABLE (fn))
3863     return false;
3864 
3865   /* We only warn for functions declared `inline' by the user.  */
3866   do_warning = (warn_inline
3867 		&& DECL_DECLARED_INLINE_P (fn)
3868 		&& !DECL_NO_INLINE_WARNING_P (fn)
3869 		&& !DECL_IN_SYSTEM_HEADER (fn));
3870 
3871   always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3872 
3873   if (flag_no_inline
3874       && always_inline == NULL)
3875     {
3876       if (do_warning)
3877         warning (OPT_Winline, "function %q+F can never be inlined because it "
3878                  "is suppressed using -fno-inline", fn);
3879       inlinable = false;
3880     }
3881 
3882   else if (!function_attribute_inlinable_p (fn))
3883     {
3884       if (do_warning)
3885         warning (OPT_Winline, "function %q+F can never be inlined because it "
3886                  "uses attributes conflicting with inlining", fn);
3887       inlinable = false;
3888     }
3889 
3890   else if (inline_forbidden_p (fn))
3891     {
3892       /* See if we should warn about uninlinable functions.  Previously,
3893 	 some of these warnings would be issued while trying to expand
3894 	 the function inline, but that would cause multiple warnings
3895 	 about functions that would for example call alloca.  But since
3896 	 this a property of the function, just one warning is enough.
3897 	 As a bonus we can now give more details about the reason why a
3898 	 function is not inlinable.  */
3899       if (always_inline)
3900 	error (inline_forbidden_reason, fn);
3901       else if (do_warning)
3902 	warning (OPT_Winline, inline_forbidden_reason, fn);
3903 
3904       inlinable = false;
3905     }
3906 
3907   /* Squirrel away the result so that we don't have to check again.  */
3908   DECL_UNINLINABLE (fn) = !inlinable;
3909 
3910   return inlinable;
3911 }
3912 
3913 /* Estimate the cost of a memory move of type TYPE.  Use machine dependent
3914    word size and take possible memcpy call into account and return
3915    cost based on whether optimizing for size or speed according to SPEED_P.  */
3916 
3917 int
estimate_move_cost(tree type,bool ARG_UNUSED (speed_p))3918 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3919 {
3920   HOST_WIDE_INT size;
3921 
3922   gcc_assert (!VOID_TYPE_P (type));
3923 
3924   if (TREE_CODE (type) == VECTOR_TYPE)
3925     {
3926       scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3927       machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3928       int orig_mode_size
3929 	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3930       int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3931       return ((orig_mode_size + simd_mode_size - 1)
3932 	      / simd_mode_size);
3933     }
3934 
3935   size = int_size_in_bytes (type);
3936 
3937   if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3938     /* Cost of a memcpy call, 3 arguments and the call.  */
3939     return 4;
3940   else
3941     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3942 }
3943 
3944 /* Returns cost of operation CODE, according to WEIGHTS  */
3945 
3946 static int
estimate_operator_cost(enum tree_code code,eni_weights * weights,tree op1 ATTRIBUTE_UNUSED,tree op2)3947 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3948 			tree op1 ATTRIBUTE_UNUSED, tree op2)
3949 {
3950   switch (code)
3951     {
3952     /* These are "free" conversions, or their presumed cost
3953        is folded into other operations.  */
3954     case RANGE_EXPR:
3955     CASE_CONVERT:
3956     case COMPLEX_EXPR:
3957     case PAREN_EXPR:
3958     case VIEW_CONVERT_EXPR:
3959       return 0;
3960 
3961     /* Assign cost of 1 to usual operations.
3962        ??? We may consider mapping RTL costs to this.  */
3963     case COND_EXPR:
3964     case VEC_COND_EXPR:
3965     case VEC_PERM_EXPR:
3966 
3967     case PLUS_EXPR:
3968     case POINTER_PLUS_EXPR:
3969     case POINTER_DIFF_EXPR:
3970     case MINUS_EXPR:
3971     case MULT_EXPR:
3972     case MULT_HIGHPART_EXPR:
3973     case FMA_EXPR:
3974 
3975     case ADDR_SPACE_CONVERT_EXPR:
3976     case FIXED_CONVERT_EXPR:
3977     case FIX_TRUNC_EXPR:
3978 
3979     case NEGATE_EXPR:
3980     case FLOAT_EXPR:
3981     case MIN_EXPR:
3982     case MAX_EXPR:
3983     case ABS_EXPR:
3984 
3985     case LSHIFT_EXPR:
3986     case RSHIFT_EXPR:
3987     case LROTATE_EXPR:
3988     case RROTATE_EXPR:
3989 
3990     case BIT_IOR_EXPR:
3991     case BIT_XOR_EXPR:
3992     case BIT_AND_EXPR:
3993     case BIT_NOT_EXPR:
3994 
3995     case TRUTH_ANDIF_EXPR:
3996     case TRUTH_ORIF_EXPR:
3997     case TRUTH_AND_EXPR:
3998     case TRUTH_OR_EXPR:
3999     case TRUTH_XOR_EXPR:
4000     case TRUTH_NOT_EXPR:
4001 
4002     case LT_EXPR:
4003     case LE_EXPR:
4004     case GT_EXPR:
4005     case GE_EXPR:
4006     case EQ_EXPR:
4007     case NE_EXPR:
4008     case ORDERED_EXPR:
4009     case UNORDERED_EXPR:
4010 
4011     case UNLT_EXPR:
4012     case UNLE_EXPR:
4013     case UNGT_EXPR:
4014     case UNGE_EXPR:
4015     case UNEQ_EXPR:
4016     case LTGT_EXPR:
4017 
4018     case CONJ_EXPR:
4019 
4020     case PREDECREMENT_EXPR:
4021     case PREINCREMENT_EXPR:
4022     case POSTDECREMENT_EXPR:
4023     case POSTINCREMENT_EXPR:
4024 
4025     case REALIGN_LOAD_EXPR:
4026 
4027     case WIDEN_SUM_EXPR:
4028     case WIDEN_MULT_EXPR:
4029     case DOT_PROD_EXPR:
4030     case SAD_EXPR:
4031     case WIDEN_MULT_PLUS_EXPR:
4032     case WIDEN_MULT_MINUS_EXPR:
4033     case WIDEN_LSHIFT_EXPR:
4034 
4035     case VEC_WIDEN_MULT_HI_EXPR:
4036     case VEC_WIDEN_MULT_LO_EXPR:
4037     case VEC_WIDEN_MULT_EVEN_EXPR:
4038     case VEC_WIDEN_MULT_ODD_EXPR:
4039     case VEC_UNPACK_HI_EXPR:
4040     case VEC_UNPACK_LO_EXPR:
4041     case VEC_UNPACK_FLOAT_HI_EXPR:
4042     case VEC_UNPACK_FLOAT_LO_EXPR:
4043     case VEC_PACK_TRUNC_EXPR:
4044     case VEC_PACK_SAT_EXPR:
4045     case VEC_PACK_FIX_TRUNC_EXPR:
4046     case VEC_WIDEN_LSHIFT_HI_EXPR:
4047     case VEC_WIDEN_LSHIFT_LO_EXPR:
4048     case VEC_DUPLICATE_EXPR:
4049     case VEC_SERIES_EXPR:
4050 
4051       return 1;
4052 
4053     /* Few special cases of expensive operations.  This is useful
4054        to avoid inlining on functions having too many of these.  */
4055     case TRUNC_DIV_EXPR:
4056     case CEIL_DIV_EXPR:
4057     case FLOOR_DIV_EXPR:
4058     case ROUND_DIV_EXPR:
4059     case EXACT_DIV_EXPR:
4060     case TRUNC_MOD_EXPR:
4061     case CEIL_MOD_EXPR:
4062     case FLOOR_MOD_EXPR:
4063     case ROUND_MOD_EXPR:
4064     case RDIV_EXPR:
4065       if (TREE_CODE (op2) != INTEGER_CST)
4066         return weights->div_mod_cost;
4067       return 1;
4068 
4069     /* Bit-field insertion needs several shift and mask operations.  */
4070     case BIT_INSERT_EXPR:
4071       return 3;
4072 
4073     default:
4074       /* We expect a copy assignment with no operator.  */
4075       gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4076       return 0;
4077     }
4078 }
4079 
4080 
4081 /* Estimate number of instructions that will be created by expanding
4082    the statements in the statement sequence STMTS.
4083    WEIGHTS contains weights attributed to various constructs.  */
4084 
4085 int
estimate_num_insns_seq(gimple_seq stmts,eni_weights * weights)4086 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4087 {
4088   int cost;
4089   gimple_stmt_iterator gsi;
4090 
4091   cost = 0;
4092   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4093     cost += estimate_num_insns (gsi_stmt (gsi), weights);
4094 
4095   return cost;
4096 }
4097 
4098 
4099 /* Estimate number of instructions that will be created by expanding STMT.
4100    WEIGHTS contains weights attributed to various constructs.  */
4101 
4102 int
estimate_num_insns(gimple * stmt,eni_weights * weights)4103 estimate_num_insns (gimple *stmt, eni_weights *weights)
4104 {
4105   unsigned cost, i;
4106   enum gimple_code code = gimple_code (stmt);
4107   tree lhs;
4108   tree rhs;
4109 
4110   switch (code)
4111     {
4112     case GIMPLE_ASSIGN:
4113       /* Try to estimate the cost of assignments.  We have three cases to
4114 	 deal with:
4115 	 1) Simple assignments to registers;
4116 	 2) Stores to things that must live in memory.  This includes
4117 	    "normal" stores to scalars, but also assignments of large
4118 	    structures, or constructors of big arrays;
4119 
4120 	 Let us look at the first two cases, assuming we have "a = b + C":
4121 	 <GIMPLE_ASSIGN <var_decl "a">
4122 	        <plus_expr <var_decl "b"> <constant C>>
4123 	 If "a" is a GIMPLE register, the assignment to it is free on almost
4124 	 any target, because "a" usually ends up in a real register.  Hence
4125 	 the only cost of this expression comes from the PLUS_EXPR, and we
4126 	 can ignore the GIMPLE_ASSIGN.
4127 	 If "a" is not a GIMPLE register, the assignment to "a" will most
4128 	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4129 	 of moving something into "a", which we compute using the function
4130 	 estimate_move_cost.  */
4131       if (gimple_clobber_p (stmt))
4132 	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4133 
4134       lhs = gimple_assign_lhs (stmt);
4135       rhs = gimple_assign_rhs1 (stmt);
4136 
4137       cost = 0;
4138 
4139       /* Account for the cost of moving to / from memory.  */
4140       if (gimple_store_p (stmt))
4141 	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4142       if (gimple_assign_load_p (stmt))
4143 	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4144 
4145       cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4146       				      gimple_assign_rhs1 (stmt),
4147 				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4148 				      == GIMPLE_BINARY_RHS
4149 				      ? gimple_assign_rhs2 (stmt) : NULL);
4150       break;
4151 
4152     case GIMPLE_COND:
4153       cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4154       				         gimple_op (stmt, 0),
4155 				         gimple_op (stmt, 1));
4156       break;
4157 
4158     case GIMPLE_SWITCH:
4159       {
4160 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4161 	/* Take into account cost of the switch + guess 2 conditional jumps for
4162 	   each case label.
4163 
4164 	   TODO: once the switch expansion logic is sufficiently separated, we can
4165 	   do better job on estimating cost of the switch.  */
4166 	if (weights->time_based)
4167 	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4168 	else
4169 	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4170       }
4171       break;
4172 
4173     case GIMPLE_CALL:
4174       {
4175 	tree decl;
4176 
4177 	if (gimple_call_internal_p (stmt))
4178 	  return 0;
4179 	else if ((decl = gimple_call_fndecl (stmt))
4180 		 && DECL_BUILT_IN (decl))
4181 	  {
4182 	    /* Do not special case builtins where we see the body.
4183 	       This just confuse inliner.  */
4184 	    struct cgraph_node *node;
4185 	    if (!(node = cgraph_node::get (decl))
4186 		|| node->definition)
4187 	      ;
4188 	    /* For buitins that are likely expanded to nothing or
4189 	       inlined do not account operand costs.  */
4190 	    else if (is_simple_builtin (decl))
4191 	      return 0;
4192 	    else if (is_inexpensive_builtin (decl))
4193 	      return weights->target_builtin_call_cost;
4194 	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4195 	      {
4196 		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4197 		   specialize the cheap expansion we do here.
4198 		   ???  This asks for a more general solution.  */
4199 		switch (DECL_FUNCTION_CODE (decl))
4200 		  {
4201 		    case BUILT_IN_POW:
4202 		    case BUILT_IN_POWF:
4203 		    case BUILT_IN_POWL:
4204 		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4205 			  && (real_equal
4206 			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4207 			       &dconst2)))
4208 			return estimate_operator_cost
4209 			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4210 			     gimple_call_arg (stmt, 0));
4211 		      break;
4212 
4213 		    default:
4214 		      break;
4215 		  }
4216 	      }
4217 	  }
4218 
4219 	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4220 	if (gimple_call_lhs (stmt))
4221 	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4222 				      weights->time_based);
4223 	for (i = 0; i < gimple_call_num_args (stmt); i++)
4224 	  {
4225 	    tree arg = gimple_call_arg (stmt, i);
4226 	    cost += estimate_move_cost (TREE_TYPE (arg),
4227 					weights->time_based);
4228 	  }
4229 	break;
4230       }
4231 
4232     case GIMPLE_RETURN:
4233       return weights->return_cost;
4234 
4235     case GIMPLE_GOTO:
4236     case GIMPLE_LABEL:
4237     case GIMPLE_NOP:
4238     case GIMPLE_PHI:
4239     case GIMPLE_PREDICT:
4240     case GIMPLE_DEBUG:
4241       return 0;
4242 
4243     case GIMPLE_ASM:
4244       {
4245 	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4246 	/* 1000 means infinity. This avoids overflows later
4247 	   with very long asm statements.  */
4248 	if (count > 1000)
4249 	  count = 1000;
4250 	/* If this asm is asm inline, count anything as minimum size.  */
4251 	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4252 	  count = MIN (1, count);
4253 	return MAX (1, count);
4254       }
4255 
4256     case GIMPLE_RESX:
4257       /* This is either going to be an external function call with one
4258 	 argument, or two register copy statements plus a goto.  */
4259       return 2;
4260 
4261     case GIMPLE_EH_DISPATCH:
4262       /* ??? This is going to turn into a switch statement.  Ideally
4263 	 we'd have a look at the eh region and estimate the number of
4264 	 edges involved.  */
4265       return 10;
4266 
4267     case GIMPLE_BIND:
4268       return estimate_num_insns_seq (
4269 	       gimple_bind_body (as_a <gbind *> (stmt)),
4270 	       weights);
4271 
4272     case GIMPLE_EH_FILTER:
4273       return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4274 
4275     case GIMPLE_CATCH:
4276       return estimate_num_insns_seq (gimple_catch_handler (
4277 				       as_a <gcatch *> (stmt)),
4278 				     weights);
4279 
4280     case GIMPLE_TRY:
4281       return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4282               + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4283 
4284     /* OMP directives are generally very expensive.  */
4285 
4286     case GIMPLE_OMP_RETURN:
4287     case GIMPLE_OMP_SECTIONS_SWITCH:
4288     case GIMPLE_OMP_ATOMIC_STORE:
4289     case GIMPLE_OMP_CONTINUE:
4290       /* ...except these, which are cheap.  */
4291       return 0;
4292 
4293     case GIMPLE_OMP_ATOMIC_LOAD:
4294       return weights->omp_cost;
4295 
4296     case GIMPLE_OMP_FOR:
4297       return (weights->omp_cost
4298               + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4299               + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4300 
4301     case GIMPLE_OMP_PARALLEL:
4302     case GIMPLE_OMP_TASK:
4303     case GIMPLE_OMP_CRITICAL:
4304     case GIMPLE_OMP_MASTER:
4305     case GIMPLE_OMP_TASKGROUP:
4306     case GIMPLE_OMP_ORDERED:
4307     case GIMPLE_OMP_SECTION:
4308     case GIMPLE_OMP_SECTIONS:
4309     case GIMPLE_OMP_SINGLE:
4310     case GIMPLE_OMP_TARGET:
4311     case GIMPLE_OMP_TEAMS:
4312       return (weights->omp_cost
4313               + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4314 
4315     case GIMPLE_TRANSACTION:
4316       return (weights->tm_cost
4317 	      + estimate_num_insns_seq (gimple_transaction_body (
4318 					  as_a <gtransaction *> (stmt)),
4319 					weights));
4320 
4321     default:
4322       gcc_unreachable ();
4323     }
4324 
4325   return cost;
4326 }
4327 
4328 /* Estimate number of instructions that will be created by expanding
4329    function FNDECL.  WEIGHTS contains weights attributed to various
4330    constructs.  */
4331 
4332 int
estimate_num_insns_fn(tree fndecl,eni_weights * weights)4333 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4334 {
4335   struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4336   gimple_stmt_iterator bsi;
4337   basic_block bb;
4338   int n = 0;
4339 
4340   gcc_assert (my_function && my_function->cfg);
4341   FOR_EACH_BB_FN (bb, my_function)
4342     {
4343       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4344 	n += estimate_num_insns (gsi_stmt (bsi), weights);
4345     }
4346 
4347   return n;
4348 }
4349 
4350 
4351 /* Initializes weights used by estimate_num_insns.  */
4352 
4353 void
init_inline_once(void)4354 init_inline_once (void)
4355 {
4356   eni_size_weights.call_cost = 1;
4357   eni_size_weights.indirect_call_cost = 3;
4358   eni_size_weights.target_builtin_call_cost = 1;
4359   eni_size_weights.div_mod_cost = 1;
4360   eni_size_weights.omp_cost = 40;
4361   eni_size_weights.tm_cost = 10;
4362   eni_size_weights.time_based = false;
4363   eni_size_weights.return_cost = 1;
4364 
4365   /* Estimating time for call is difficult, since we have no idea what the
4366      called function does.  In the current uses of eni_time_weights,
4367      underestimating the cost does less harm than overestimating it, so
4368      we choose a rather small value here.  */
4369   eni_time_weights.call_cost = 10;
4370   eni_time_weights.indirect_call_cost = 15;
4371   eni_time_weights.target_builtin_call_cost = 1;
4372   eni_time_weights.div_mod_cost = 10;
4373   eni_time_weights.omp_cost = 40;
4374   eni_time_weights.tm_cost = 40;
4375   eni_time_weights.time_based = true;
4376   eni_time_weights.return_cost = 2;
4377 }
4378 
4379 
4380 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4381 
4382 static void
prepend_lexical_block(tree current_block,tree new_block)4383 prepend_lexical_block (tree current_block, tree new_block)
4384 {
4385   BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4386   BLOCK_SUBBLOCKS (current_block) = new_block;
4387   BLOCK_SUPERCONTEXT (new_block) = current_block;
4388 }
4389 
4390 /* Add local variables from CALLEE to CALLER.  */
4391 
4392 static inline void
add_local_variables(struct function * callee,struct function * caller,copy_body_data * id)4393 add_local_variables (struct function *callee, struct function *caller,
4394 		     copy_body_data *id)
4395 {
4396   tree var;
4397   unsigned ix;
4398 
4399   FOR_EACH_LOCAL_DECL (callee, ix, var)
4400     if (!can_be_nonlocal (var, id))
4401       {
4402         tree new_var = remap_decl (var, id);
4403 
4404         /* Remap debug-expressions.  */
4405 	if (VAR_P (new_var)
4406 	    && DECL_HAS_DEBUG_EXPR_P (var)
4407 	    && new_var != var)
4408 	  {
4409 	    tree tem = DECL_DEBUG_EXPR (var);
4410 	    bool old_regimplify = id->regimplify;
4411 	    id->remapping_type_depth++;
4412 	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4413 	    id->remapping_type_depth--;
4414 	    id->regimplify = old_regimplify;
4415 	    SET_DECL_DEBUG_EXPR (new_var, tem);
4416 	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4417 	  }
4418 	add_local_decl (caller, new_var);
4419       }
4420 }
4421 
4422 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4423    have brought in or introduced any debug stmts for SRCVAR.  */
4424 
4425 static inline void
reset_debug_binding(copy_body_data * id,tree srcvar,gimple_seq * bindings)4426 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4427 {
4428   tree *remappedvarp = id->decl_map->get (srcvar);
4429 
4430   if (!remappedvarp)
4431     return;
4432 
4433   if (!VAR_P (*remappedvarp))
4434     return;
4435 
4436   if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4437     return;
4438 
4439   tree tvar = target_for_debug_bind (*remappedvarp);
4440   if (!tvar)
4441     return;
4442 
4443   gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4444 					  id->call_stmt);
4445   gimple_seq_add_stmt (bindings, stmt);
4446 }
4447 
4448 /* For each inlined variable for which we may have debug bind stmts,
4449    add before GSI a final debug stmt resetting it, marking the end of
4450    its life, so that var-tracking knows it doesn't have to compute
4451    further locations for it.  */
4452 
4453 static inline void
reset_debug_bindings(copy_body_data * id,gimple_stmt_iterator gsi)4454 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4455 {
4456   tree var;
4457   unsigned ix;
4458   gimple_seq bindings = NULL;
4459 
4460   if (!gimple_in_ssa_p (id->src_cfun))
4461     return;
4462 
4463   if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4464     return;
4465 
4466   for (var = DECL_ARGUMENTS (id->src_fn);
4467        var; var = DECL_CHAIN (var))
4468     reset_debug_binding (id, var, &bindings);
4469 
4470   FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4471     reset_debug_binding (id, var, &bindings);
4472 
4473   gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4474 }
4475 
4476 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4477 
4478 static bool
expand_call_inline(basic_block bb,gimple * stmt,copy_body_data * id,bitmap to_purge)4479 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4480 		    bitmap to_purge)
4481 {
4482   tree use_retvar;
4483   tree fn;
4484   hash_map<tree, tree> *dst;
4485   hash_map<tree, tree> *st = NULL;
4486   tree return_slot;
4487   tree modify_dest;
4488   tree return_bounds = NULL;
4489   struct cgraph_edge *cg_edge;
4490   cgraph_inline_failed_t reason;
4491   basic_block return_block;
4492   edge e;
4493   gimple_stmt_iterator gsi, stmt_gsi;
4494   bool successfully_inlined = false;
4495   bool purge_dead_abnormal_edges;
4496   gcall *call_stmt;
4497   unsigned int i;
4498   unsigned int prop_mask, src_properties;
4499   struct function *dst_cfun;
4500   tree simduid;
4501   use_operand_p use;
4502   gimple *simtenter_stmt = NULL;
4503   vec<tree> *simtvars_save;
4504 
4505   /* The gimplifier uses input_location in too many places, such as
4506      internal_get_tmp_var ().  */
4507   location_t saved_location = input_location;
4508   input_location = gimple_location (stmt);
4509 
4510   /* From here on, we're only interested in CALL_EXPRs.  */
4511   call_stmt = dyn_cast <gcall *> (stmt);
4512   if (!call_stmt)
4513     goto egress;
4514 
4515   cg_edge = id->dst_node->get_edge (stmt);
4516   gcc_checking_assert (cg_edge);
4517   /* First, see if we can figure out what function is being called.
4518      If we cannot, then there is no hope of inlining the function.  */
4519   if (cg_edge->indirect_unknown_callee)
4520     goto egress;
4521   fn = cg_edge->callee->decl;
4522   gcc_checking_assert (fn);
4523 
4524   /* If FN is a declaration of a function in a nested scope that was
4525      globally declared inline, we don't set its DECL_INITIAL.
4526      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4527      C++ front-end uses it for cdtors to refer to their internal
4528      declarations, that are not real functions.  Fortunately those
4529      don't have trees to be saved, so we can tell by checking their
4530      gimple_body.  */
4531   if (!DECL_INITIAL (fn)
4532       && DECL_ABSTRACT_ORIGIN (fn)
4533       && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4534     fn = DECL_ABSTRACT_ORIGIN (fn);
4535 
4536   /* Don't try to inline functions that are not well-suited to inlining.  */
4537   if (cg_edge->inline_failed)
4538     {
4539       reason = cg_edge->inline_failed;
4540       /* If this call was originally indirect, we do not want to emit any
4541 	 inlining related warnings or sorry messages because there are no
4542 	 guarantees regarding those.  */
4543       if (cg_edge->indirect_inlining_edge)
4544 	goto egress;
4545 
4546       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4547           /* For extern inline functions that get redefined we always
4548 	     silently ignored always_inline flag. Better behavior would
4549 	     be to be able to keep both bodies and use extern inline body
4550 	     for inlining, but we can't do that because frontends overwrite
4551 	     the body.  */
4552 	  && !cg_edge->callee->local.redefined_extern_inline
4553 	  /* During early inline pass, report only when optimization is
4554 	     not turned on.  */
4555 	  && (symtab->global_info_ready
4556 	      || !optimize
4557 	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4558 	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4559 	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4560 	{
4561 	  error ("inlining failed in call to always_inline %q+F: %s", fn,
4562 		 cgraph_inline_failed_string (reason));
4563 	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4564 	    inform (gimple_location (stmt), "called from here");
4565 	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4566 	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4567                    "called from this function");
4568 	}
4569       else if (warn_inline
4570 	       && DECL_DECLARED_INLINE_P (fn)
4571 	       && !DECL_NO_INLINE_WARNING_P (fn)
4572 	       && !DECL_IN_SYSTEM_HEADER (fn)
4573 	       && reason != CIF_UNSPECIFIED
4574 	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4575 	       /* Do not warn about not inlined recursive calls.  */
4576 	       && !cg_edge->recursive_p ()
4577 	       /* Avoid warnings during early inline pass. */
4578 	       && symtab->global_info_ready)
4579 	{
4580 	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4581 		       fn, _(cgraph_inline_failed_string (reason))))
4582 	    {
4583 	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4584 		inform (gimple_location (stmt), "called from here");
4585 	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4586 		inform (DECL_SOURCE_LOCATION (cfun->decl),
4587                        "called from this function");
4588 	    }
4589 	}
4590       goto egress;
4591     }
4592   id->src_node = cg_edge->callee;
4593 
4594   /* If callee is thunk, all we need is to adjust the THIS pointer
4595      and redirect to function being thunked.  */
4596   if (id->src_node->thunk.thunk_p)
4597     {
4598       cgraph_edge *edge;
4599       tree virtual_offset = NULL;
4600       profile_count count = cg_edge->count;
4601       tree op;
4602       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4603 
4604       cg_edge->remove ();
4605       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4606 		   		           gimple_uid (stmt),
4607 				   	   profile_count::one (),
4608 					   profile_count::one (),
4609 				           true);
4610       edge->count = count;
4611       if (id->src_node->thunk.virtual_offset_p)
4612         virtual_offset = size_int (id->src_node->thunk.virtual_value);
4613       op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4614 			      NULL);
4615       gsi_insert_before (&iter, gimple_build_assign (op,
4616 						    gimple_call_arg (stmt, 0)),
4617 			 GSI_NEW_STMT);
4618       gcc_assert (id->src_node->thunk.this_adjusting);
4619       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4620 			 virtual_offset);
4621 
4622       gimple_call_set_arg (stmt, 0, op);
4623       gimple_call_set_fndecl (stmt, edge->callee->decl);
4624       update_stmt (stmt);
4625       id->src_node->remove ();
4626       expand_call_inline (bb, stmt, id, to_purge);
4627       maybe_remove_unused_call_args (cfun, stmt);
4628       return true;
4629     }
4630   fn = cg_edge->callee->decl;
4631   cg_edge->callee->get_untransformed_body ();
4632 
4633   if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4634     cg_edge->callee->verify ();
4635 
4636   /* We will be inlining this callee.  */
4637   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4638   id->assign_stmts.create (0);
4639 
4640   /* Update the callers EH personality.  */
4641   if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4642     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4643       = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4644 
4645   /* Split the block before the GIMPLE_CALL.  */
4646   stmt_gsi = gsi_for_stmt (stmt);
4647   gsi_prev (&stmt_gsi);
4648   e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4649   bb = e->src;
4650   return_block = e->dest;
4651   remove_edge (e);
4652 
4653   /* If the GIMPLE_CALL was in the last statement of BB, it may have
4654      been the source of abnormal edges.  In this case, schedule
4655      the removal of dead abnormal edges.  */
4656   gsi = gsi_start_bb (return_block);
4657   gsi_next (&gsi);
4658   purge_dead_abnormal_edges = gsi_end_p (gsi);
4659 
4660   stmt_gsi = gsi_start_bb (return_block);
4661 
4662   /* Build a block containing code to initialize the arguments, the
4663      actual inline expansion of the body, and a label for the return
4664      statements within the function to jump to.  The type of the
4665      statement expression is the return type of the function call.
4666      ???  If the call does not have an associated block then we will
4667      remap all callee blocks to NULL, effectively dropping most of
4668      its debug information.  This should only happen for calls to
4669      artificial decls inserted by the compiler itself.  We need to
4670      either link the inlined blocks into the caller block tree or
4671      not refer to them in any way to not break GC for locations.  */
4672   if (gimple_block (stmt))
4673     {
4674       /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4675          to make inlined_function_outer_scope_p return true on this BLOCK.  */
4676       location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4677       if (loc == UNKNOWN_LOCATION)
4678 	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4679       if (loc == UNKNOWN_LOCATION)
4680 	loc = BUILTINS_LOCATION;
4681       id->block = make_node (BLOCK);
4682       BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4683       BLOCK_SOURCE_LOCATION (id->block) = loc;
4684       prepend_lexical_block (gimple_block (stmt), id->block);
4685     }
4686 
4687   /* Local declarations will be replaced by their equivalents in this
4688      map.  */
4689   st = id->decl_map;
4690   id->decl_map = new hash_map<tree, tree>;
4691   dst = id->debug_map;
4692   id->debug_map = NULL;
4693 
4694   /* Record the function we are about to inline.  */
4695   id->src_fn = fn;
4696   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4697   id->call_stmt = call_stmt;
4698 
4699   /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4700      variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4701   dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4702   simtvars_save = id->dst_simt_vars;
4703   if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4704       && (simduid = bb->loop_father->simduid) != NULL_TREE
4705       && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4706       && single_imm_use (simduid, &use, &simtenter_stmt)
4707       && is_gimple_call (simtenter_stmt)
4708       && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4709     vec_alloc (id->dst_simt_vars, 0);
4710   else
4711     id->dst_simt_vars = NULL;
4712 
4713   if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4714     profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4715 
4716   /* If the src function contains an IFN_VA_ARG, then so will the dst
4717      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4718   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4719   src_properties = id->src_cfun->curr_properties & prop_mask;
4720   if (src_properties != prop_mask)
4721     dst_cfun->curr_properties &= src_properties | ~prop_mask;
4722 
4723   gcc_assert (!id->src_cfun->after_inlining);
4724 
4725   id->entry_bb = bb;
4726   if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4727     {
4728       gimple_stmt_iterator si = gsi_last_bb (bb);
4729       gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4730       						   NOT_TAKEN),
4731 			GSI_NEW_STMT);
4732     }
4733   initialize_inlined_parameters (id, stmt, fn, bb);
4734   if (debug_nonbind_markers_p && debug_inline_points && id->block
4735       && inlined_function_outer_scope_p (id->block))
4736     {
4737       gimple_stmt_iterator si = gsi_last_bb (bb);
4738       gsi_insert_after (&si, gimple_build_debug_inline_entry
4739 			(id->block, input_location), GSI_NEW_STMT);
4740     }
4741 
4742   if (DECL_INITIAL (fn))
4743     {
4744       if (gimple_block (stmt))
4745 	{
4746 	  tree *var;
4747 
4748 	  prepend_lexical_block (id->block,
4749 				 remap_blocks (DECL_INITIAL (fn), id));
4750 	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4751 			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4752 				   == NULL_TREE));
4753 	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4754 	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
4755 	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4756 	     under it.  The parameters can be then evaluated in the debugger,
4757 	     but don't show in backtraces.  */
4758 	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4759 	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4760 	      {
4761 		tree v = *var;
4762 		*var = TREE_CHAIN (v);
4763 		TREE_CHAIN (v) = BLOCK_VARS (id->block);
4764 		BLOCK_VARS (id->block) = v;
4765 	      }
4766 	    else
4767 	      var = &TREE_CHAIN (*var);
4768 	}
4769       else
4770 	remap_blocks_to_null (DECL_INITIAL (fn), id);
4771     }
4772 
4773   /* Return statements in the function body will be replaced by jumps
4774      to the RET_LABEL.  */
4775   gcc_assert (DECL_INITIAL (fn));
4776   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4777 
4778   /* Find the LHS to which the result of this call is assigned.  */
4779   return_slot = NULL;
4780   if (gimple_call_lhs (stmt))
4781     {
4782       modify_dest = gimple_call_lhs (stmt);
4783 
4784       /* Remember where to copy returned bounds.  */
4785       if (gimple_call_with_bounds_p (stmt)
4786 	  && TREE_CODE (modify_dest) == SSA_NAME)
4787 	{
4788 	  gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4789 	  if (retbnd)
4790 	    {
4791 	      return_bounds = gimple_call_lhs (retbnd);
4792 	      /* If returned bounds are not used then just
4793 		 remove unused call.  */
4794 	      if (!return_bounds)
4795 		{
4796 		  gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4797 		  gsi_remove (&iter, true);
4798 		}
4799 	    }
4800 	}
4801 
4802       /* The function which we are inlining might not return a value,
4803 	 in which case we should issue a warning that the function
4804 	 does not return a value.  In that case the optimizers will
4805 	 see that the variable to which the value is assigned was not
4806 	 initialized.  We do not want to issue a warning about that
4807 	 uninitialized variable.  */
4808       if (DECL_P (modify_dest))
4809 	TREE_NO_WARNING (modify_dest) = 1;
4810 
4811       if (gimple_call_return_slot_opt_p (call_stmt))
4812 	{
4813 	  return_slot = modify_dest;
4814 	  modify_dest = NULL;
4815 	}
4816     }
4817   else
4818     modify_dest = NULL;
4819 
4820   /* If we are inlining a call to the C++ operator new, we don't want
4821      to use type based alias analysis on the return value.  Otherwise
4822      we may get confused if the compiler sees that the inlined new
4823      function returns a pointer which was just deleted.  See bug
4824      33407.  */
4825   if (DECL_IS_OPERATOR_NEW (fn))
4826     {
4827       return_slot = NULL;
4828       modify_dest = NULL;
4829     }
4830 
4831   /* Declare the return variable for the function.  */
4832   use_retvar = declare_return_variable (id, return_slot, modify_dest,
4833 					return_bounds, bb);
4834 
4835   /* Add local vars in this inlined callee to caller.  */
4836   add_local_variables (id->src_cfun, cfun, id);
4837 
4838   if (dump_file && (dump_flags & TDF_DETAILS))
4839     {
4840       fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4841 	       id->src_node->dump_name (),
4842 	       id->dst_node->dump_name (),
4843 	       cg_edge->sreal_frequency ().to_double ());
4844       id->src_node->dump (dump_file);
4845       id->dst_node->dump (dump_file);
4846     }
4847 
4848   /* This is it.  Duplicate the callee body.  Assume callee is
4849      pre-gimplified.  Note that we must not alter the caller
4850      function in any way before this point, as this CALL_EXPR may be
4851      a self-referential call; if we're calling ourselves, we need to
4852      duplicate our body before altering anything.  */
4853   copy_body (id, bb, return_block, NULL);
4854 
4855   reset_debug_bindings (id, stmt_gsi);
4856 
4857   if (flag_stack_reuse != SR_NONE)
4858     for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4859       if (!TREE_THIS_VOLATILE (p))
4860 	{
4861 	  tree *varp = id->decl_map->get (p);
4862 	  if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4863 	    {
4864 	      tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4865 	      gimple *clobber_stmt;
4866 	      TREE_THIS_VOLATILE (clobber) = 1;
4867 	      clobber_stmt = gimple_build_assign (*varp, clobber);
4868 	      gimple_set_location (clobber_stmt, gimple_location (stmt));
4869 	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4870 	    }
4871 	}
4872 
4873   /* Reset the escaped solution.  */
4874   if (cfun->gimple_df)
4875     pt_solution_reset (&cfun->gimple_df->escaped);
4876 
4877   /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
4878   if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4879     {
4880       size_t nargs = gimple_call_num_args (simtenter_stmt);
4881       vec<tree> *vars = id->dst_simt_vars;
4882       auto_vec<tree> newargs (nargs + vars->length ());
4883       for (size_t i = 0; i < nargs; i++)
4884 	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4885       for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4886 	{
4887 	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4888 	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4889 	}
4890       gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4891       gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4892       gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4893       gsi_replace (&gsi, g, false);
4894     }
4895   vec_free (id->dst_simt_vars);
4896   id->dst_simt_vars = simtvars_save;
4897 
4898   /* Clean up.  */
4899   if (id->debug_map)
4900     {
4901       delete id->debug_map;
4902       id->debug_map = dst;
4903     }
4904   delete id->decl_map;
4905   id->decl_map = st;
4906 
4907   /* Unlink the calls virtual operands before replacing it.  */
4908   unlink_stmt_vdef (stmt);
4909   if (gimple_vdef (stmt)
4910       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4911     release_ssa_name (gimple_vdef (stmt));
4912 
4913   /* If the inlined function returns a result that we care about,
4914      substitute the GIMPLE_CALL with an assignment of the return
4915      variable to the LHS of the call.  That is, if STMT was
4916      'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
4917   if (use_retvar && gimple_call_lhs (stmt))
4918     {
4919       gimple *old_stmt = stmt;
4920       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4921       gsi_replace (&stmt_gsi, stmt, false);
4922       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4923       /* Append a clobber for id->retvar if easily possible.  */
4924       if (flag_stack_reuse != SR_NONE
4925 	  && id->retvar
4926 	  && VAR_P (id->retvar)
4927 	  && id->retvar != return_slot
4928 	  && id->retvar != modify_dest
4929 	  && !TREE_THIS_VOLATILE (id->retvar)
4930 	  && !is_gimple_reg (id->retvar)
4931 	  && !stmt_ends_bb_p (stmt))
4932 	{
4933 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4934 	  gimple *clobber_stmt;
4935 	  TREE_THIS_VOLATILE (clobber) = 1;
4936 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4937 	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4938 	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4939 	}
4940 
4941       /* Copy bounds if we copy structure with bounds.  */
4942       if (chkp_function_instrumented_p (id->dst_fn)
4943 	  && !BOUNDED_P (use_retvar)
4944 	  && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4945 	id->assign_stmts.safe_push (stmt);
4946     }
4947   else
4948     {
4949       /* Handle the case of inlining a function with no return
4950 	 statement, which causes the return value to become undefined.  */
4951       if (gimple_call_lhs (stmt)
4952 	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4953 	{
4954 	  tree name = gimple_call_lhs (stmt);
4955 	  tree var = SSA_NAME_VAR (name);
4956 	  tree def = var ? ssa_default_def (cfun, var) : NULL;
4957 
4958 	  if (def)
4959 	    {
4960 	      /* If the variable is used undefined, make this name
4961 		 undefined via a move.  */
4962 	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4963 	      gsi_replace (&stmt_gsi, stmt, true);
4964 	    }
4965 	  else
4966 	    {
4967 	      if (!var)
4968 		{
4969 		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4970 		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4971 		}
4972 	      /* Otherwise make this variable undefined.  */
4973 	      gsi_remove (&stmt_gsi, true);
4974 	      set_ssa_default_def (cfun, var, name);
4975 	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4976 	    }
4977 	}
4978       /* Replace with a clobber for id->retvar.  */
4979       else if (flag_stack_reuse != SR_NONE
4980 	       && id->retvar
4981 	       && VAR_P (id->retvar)
4982 	       && id->retvar != return_slot
4983 	       && id->retvar != modify_dest
4984 	       && !TREE_THIS_VOLATILE (id->retvar)
4985 	       && !is_gimple_reg (id->retvar))
4986 	{
4987 	  tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4988 	  gimple *clobber_stmt;
4989 	  TREE_THIS_VOLATILE (clobber) = 1;
4990 	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
4991 	  gimple_set_location (clobber_stmt, gimple_location (stmt));
4992 	  gsi_replace (&stmt_gsi, clobber_stmt, false);
4993 	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4994 	}
4995       else
4996 	gsi_remove (&stmt_gsi, true);
4997     }
4998 
4999   /* Put returned bounds into the correct place if required.  */
5000   if (return_bounds)
5001     {
5002       gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
5003       gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
5004       gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
5005       unlink_stmt_vdef (old_stmt);
5006       gsi_replace (&bnd_gsi, new_stmt, false);
5007       maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
5008       cgraph_update_edges_for_call_stmt (old_stmt,
5009 					 gimple_call_fndecl (old_stmt),
5010 					 new_stmt);
5011     }
5012 
5013   if (purge_dead_abnormal_edges)
5014     bitmap_set_bit (to_purge, return_block->index);
5015 
5016   /* If the value of the new expression is ignored, that's OK.  We
5017      don't warn about this for CALL_EXPRs, so we shouldn't warn about
5018      the equivalent inlined version either.  */
5019   if (is_gimple_assign (stmt))
5020     {
5021       gcc_assert (gimple_assign_single_p (stmt)
5022 		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5023       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5024     }
5025 
5026   /* Copy bounds for all generated assigns that need it.  */
5027   for (i = 0; i < id->assign_stmts.length (); i++)
5028     chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
5029   id->assign_stmts.release ();
5030 
5031   /* Output the inlining info for this abstract function, since it has been
5032      inlined.  If we don't do this now, we can lose the information about the
5033      variables in the function when the blocks get blown away as soon as we
5034      remove the cgraph node.  */
5035   if (gimple_block (stmt))
5036     (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
5037 
5038   /* Update callgraph if needed.  */
5039   cg_edge->callee->remove ();
5040 
5041   id->block = NULL_TREE;
5042   id->retvar = NULL_TREE;
5043   id->retbnd = NULL_TREE;
5044   successfully_inlined = true;
5045 
5046  egress:
5047   input_location = saved_location;
5048   return successfully_inlined;
5049 }
5050 
5051 /* Expand call statements reachable from STMT_P.
5052    We can only have CALL_EXPRs as the "toplevel" tree code or nested
5053    in a MODIFY_EXPR.  */
5054 
5055 static bool
gimple_expand_calls_inline(basic_block bb,copy_body_data * id,bitmap to_purge)5056 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5057 			    bitmap to_purge)
5058 {
5059   gimple_stmt_iterator gsi;
5060   bool inlined = false;
5061 
5062   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5063     {
5064       gimple *stmt = gsi_stmt (gsi);
5065       gsi_prev (&gsi);
5066 
5067       if (is_gimple_call (stmt)
5068 	  && !gimple_call_internal_p (stmt))
5069 	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5070     }
5071 
5072   return inlined;
5073 }
5074 
5075 
5076 /* Walk all basic blocks created after FIRST and try to fold every statement
5077    in the STATEMENTS pointer set.  */
5078 
5079 static void
fold_marked_statements(int first,hash_set<gimple * > * statements)5080 fold_marked_statements (int first, hash_set<gimple *> *statements)
5081 {
5082   auto_bitmap to_purge;
5083   for (; first < last_basic_block_for_fn (cfun); first++)
5084     if (BASIC_BLOCK_FOR_FN (cfun, first))
5085       {
5086         gimple_stmt_iterator gsi;
5087 
5088 	for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5089 	     !gsi_end_p (gsi);
5090 	     gsi_next (&gsi))
5091 	  if (statements->contains (gsi_stmt (gsi)))
5092 	    {
5093 	      gimple *old_stmt = gsi_stmt (gsi);
5094 	      tree old_decl
5095 		= is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5096 
5097 	      if (old_decl && DECL_BUILT_IN (old_decl))
5098 		{
5099 		  /* Folding builtins can create multiple instructions,
5100 		     we need to look at all of them.  */
5101 		  gimple_stmt_iterator i2 = gsi;
5102 		  gsi_prev (&i2);
5103 		  if (fold_stmt (&gsi))
5104 		    {
5105 		      gimple *new_stmt;
5106 		      /* If a builtin at the end of a bb folded into nothing,
5107 			 the following loop won't work.  */
5108 		      if (gsi_end_p (gsi))
5109 			{
5110 			  cgraph_update_edges_for_call_stmt (old_stmt,
5111 							     old_decl, NULL);
5112 			  break;
5113 			}
5114 		      if (gsi_end_p (i2))
5115 			i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5116 		      else
5117 			gsi_next (&i2);
5118 		      while (1)
5119 			{
5120 			  new_stmt = gsi_stmt (i2);
5121 			  update_stmt (new_stmt);
5122 			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5123 							     new_stmt);
5124 
5125 			  if (new_stmt == gsi_stmt (gsi))
5126 			    {
5127 			      /* It is okay to check only for the very last
5128 				 of these statements.  If it is a throwing
5129 				 statement nothing will change.  If it isn't
5130 				 this can remove EH edges.  If that weren't
5131 				 correct then because some intermediate stmts
5132 				 throw, but not the last one.  That would mean
5133 				 we'd have to split the block, which we can't
5134 				 here and we'd loose anyway.  And as builtins
5135 				 probably never throw, this all
5136 				 is mood anyway.  */
5137 			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
5138 								  new_stmt))
5139 				bitmap_set_bit (to_purge, first);
5140 			      break;
5141 			    }
5142 			  gsi_next (&i2);
5143 			}
5144 		    }
5145 		}
5146 	      else if (fold_stmt (&gsi))
5147 		{
5148 		  /* Re-read the statement from GSI as fold_stmt() may
5149 		     have changed it.  */
5150 		  gimple *new_stmt = gsi_stmt (gsi);
5151 		  update_stmt (new_stmt);
5152 
5153 		  if (is_gimple_call (old_stmt)
5154 		      || is_gimple_call (new_stmt))
5155 		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5156 						       new_stmt);
5157 
5158 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5159 		    bitmap_set_bit (to_purge, first);
5160 		}
5161 	    }
5162       }
5163   gimple_purge_all_dead_eh_edges (to_purge);
5164 }
5165 
5166 /* Expand calls to inline functions in the body of FN.  */
5167 
5168 unsigned int
optimize_inline_calls(tree fn)5169 optimize_inline_calls (tree fn)
5170 {
5171   copy_body_data id;
5172   basic_block bb;
5173   int last = n_basic_blocks_for_fn (cfun);
5174   bool inlined_p = false;
5175 
5176   /* Clear out ID.  */
5177   memset (&id, 0, sizeof (id));
5178 
5179   id.src_node = id.dst_node = cgraph_node::get (fn);
5180   gcc_assert (id.dst_node->definition);
5181   id.dst_fn = fn;
5182   /* Or any functions that aren't finished yet.  */
5183   if (current_function_decl)
5184     id.dst_fn = current_function_decl;
5185 
5186   id.copy_decl = copy_decl_maybe_to_var;
5187   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5188   id.transform_new_cfg = false;
5189   id.transform_return_to_modify = true;
5190   id.transform_parameter = true;
5191   id.transform_lang_insert_block = NULL;
5192   id.statements_to_fold = new hash_set<gimple *>;
5193 
5194   push_gimplify_context ();
5195 
5196   /* We make no attempts to keep dominance info up-to-date.  */
5197   free_dominance_info (CDI_DOMINATORS);
5198   free_dominance_info (CDI_POST_DOMINATORS);
5199 
5200   /* Register specific gimple functions.  */
5201   gimple_register_cfg_hooks ();
5202 
5203   /* Reach the trees by walking over the CFG, and note the
5204      enclosing basic-blocks in the call edges.  */
5205   /* We walk the blocks going forward, because inlined function bodies
5206      will split id->current_basic_block, and the new blocks will
5207      follow it; we'll trudge through them, processing their CALL_EXPRs
5208      along the way.  */
5209   auto_bitmap to_purge;
5210   FOR_EACH_BB_FN (bb, cfun)
5211     inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5212 
5213   pop_gimplify_context (NULL);
5214 
5215   if (flag_checking)
5216     {
5217       struct cgraph_edge *e;
5218 
5219       id.dst_node->verify ();
5220 
5221       /* Double check that we inlined everything we are supposed to inline.  */
5222       for (e = id.dst_node->callees; e; e = e->next_callee)
5223 	gcc_assert (e->inline_failed);
5224     }
5225 
5226   /* Fold queued statements.  */
5227   update_max_bb_count ();
5228   fold_marked_statements (last, id.statements_to_fold);
5229   delete id.statements_to_fold;
5230 
5231   /* Finally purge EH and abnormal edges from the call stmts we inlined.
5232      We need to do this after fold_marked_statements since that may walk
5233      the SSA use-def chain.  */
5234   unsigned i;
5235   bitmap_iterator bi;
5236   EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5237     {
5238       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5239       if (bb)
5240 	{
5241 	  gimple_purge_dead_eh_edges (bb);
5242 	  gimple_purge_dead_abnormal_call_edges (bb);
5243 	}
5244     }
5245 
5246   gcc_assert (!id.debug_stmts.exists ());
5247 
5248   /* If we didn't inline into the function there is nothing to do.  */
5249   if (!inlined_p)
5250     return 0;
5251 
5252   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5253   number_blocks (fn);
5254 
5255   delete_unreachable_blocks_update_callgraph (&id);
5256   if (flag_checking)
5257     id.dst_node->verify ();
5258 
5259   /* It would be nice to check SSA/CFG/statement consistency here, but it is
5260      not possible yet - the IPA passes might make various functions to not
5261      throw and they don't care to proactively update local EH info.  This is
5262      done later in fixup_cfg pass that also execute the verification.  */
5263   return (TODO_update_ssa
5264 	  | TODO_cleanup_cfg
5265 	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5266 	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5267 	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5268 	     ? TODO_rebuild_frequencies : 0));
5269 }
5270 
5271 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5272 
5273 tree
copy_tree_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5274 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5275 {
5276   enum tree_code code = TREE_CODE (*tp);
5277   enum tree_code_class cl = TREE_CODE_CLASS (code);
5278 
5279   /* We make copies of most nodes.  */
5280   if (IS_EXPR_CODE_CLASS (cl)
5281       || code == TREE_LIST
5282       || code == TREE_VEC
5283       || code == TYPE_DECL
5284       || code == OMP_CLAUSE)
5285     {
5286       /* Because the chain gets clobbered when we make a copy, we save it
5287 	 here.  */
5288       tree chain = NULL_TREE, new_tree;
5289 
5290       if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5291 	chain = TREE_CHAIN (*tp);
5292 
5293       /* Copy the node.  */
5294       new_tree = copy_node (*tp);
5295 
5296       *tp = new_tree;
5297 
5298       /* Now, restore the chain, if appropriate.  That will cause
5299 	 walk_tree to walk into the chain as well.  */
5300       if (code == PARM_DECL
5301 	  || code == TREE_LIST
5302 	  || code == OMP_CLAUSE)
5303 	TREE_CHAIN (*tp) = chain;
5304 
5305       /* For now, we don't update BLOCKs when we make copies.  So, we
5306 	 have to nullify all BIND_EXPRs.  */
5307       if (TREE_CODE (*tp) == BIND_EXPR)
5308 	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5309     }
5310   else if (code == CONSTRUCTOR)
5311     {
5312       /* CONSTRUCTOR nodes need special handling because
5313          we need to duplicate the vector of elements.  */
5314       tree new_tree;
5315 
5316       new_tree = copy_node (*tp);
5317       CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5318       *tp = new_tree;
5319     }
5320   else if (code == STATEMENT_LIST)
5321     /* We used to just abort on STATEMENT_LIST, but we can run into them
5322        with statement-expressions (c++/40975).  */
5323     copy_statement_list (tp);
5324   else if (TREE_CODE_CLASS (code) == tcc_type)
5325     *walk_subtrees = 0;
5326   else if (TREE_CODE_CLASS (code) == tcc_declaration)
5327     *walk_subtrees = 0;
5328   else if (TREE_CODE_CLASS (code) == tcc_constant)
5329     *walk_subtrees = 0;
5330   return NULL_TREE;
5331 }
5332 
5333 /* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5334    information indicating to what new SAVE_EXPR this one should be mapped,
5335    use that one.  Otherwise, create a new node and enter it in ST.  FN is
5336    the function into which the copy will be placed.  */
5337 
5338 static void
remap_save_expr(tree * tp,hash_map<tree,tree> * st,int * walk_subtrees)5339 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5340 {
5341   tree *n;
5342   tree t;
5343 
5344   /* See if we already encountered this SAVE_EXPR.  */
5345   n = st->get (*tp);
5346 
5347   /* If we didn't already remap this SAVE_EXPR, do so now.  */
5348   if (!n)
5349     {
5350       t = copy_node (*tp);
5351 
5352       /* Remember this SAVE_EXPR.  */
5353       st->put (*tp, t);
5354       /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5355       st->put (t, t);
5356     }
5357   else
5358     {
5359       /* We've already walked into this SAVE_EXPR; don't do it again.  */
5360       *walk_subtrees = 0;
5361       t = *n;
5362     }
5363 
5364   /* Replace this SAVE_EXPR with the copy.  */
5365   *tp = t;
5366 }
5367 
5368 /* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5369    label, copies the declaration and enters it in the splay_tree in DATA (which
5370    is really a 'copy_body_data *'.  */
5371 
5372 static tree
mark_local_labels_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5373 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5374 		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5375 		        struct walk_stmt_info *wi)
5376 {
5377   copy_body_data *id = (copy_body_data *) wi->info;
5378   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5379 
5380   if (stmt)
5381     {
5382       tree decl = gimple_label_label (stmt);
5383 
5384       /* Copy the decl and remember the copy.  */
5385       insert_decl_map (id, decl, id->copy_decl (decl, id));
5386     }
5387 
5388   return NULL_TREE;
5389 }
5390 
5391 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5392 						  struct walk_stmt_info *wi);
5393 
5394 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5395    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5396    remaps all local declarations to appropriate replacements in gimple
5397    operands. */
5398 
5399 static tree
replace_locals_op(tree * tp,int * walk_subtrees,void * data)5400 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5401 {
5402   struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5403   copy_body_data *id = (copy_body_data *) wi->info;
5404   hash_map<tree, tree> *st = id->decl_map;
5405   tree *n;
5406   tree expr = *tp;
5407 
5408   /* For recursive invocations this is no longer the LHS itself.  */
5409   bool is_lhs = wi->is_lhs;
5410   wi->is_lhs = false;
5411 
5412   if (TREE_CODE (expr) == SSA_NAME)
5413     {
5414       *tp = remap_ssa_name (*tp, id);
5415       *walk_subtrees = 0;
5416       if (is_lhs)
5417 	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5418     }
5419   /* Only a local declaration (variable or label).  */
5420   else if ((VAR_P (expr) && !TREE_STATIC (expr))
5421 	   || TREE_CODE (expr) == LABEL_DECL)
5422     {
5423       /* Lookup the declaration.  */
5424       n = st->get (expr);
5425 
5426       /* If it's there, remap it.  */
5427       if (n)
5428 	*tp = *n;
5429       *walk_subtrees = 0;
5430     }
5431   else if (TREE_CODE (expr) == STATEMENT_LIST
5432 	   || TREE_CODE (expr) == BIND_EXPR
5433 	   || TREE_CODE (expr) == SAVE_EXPR)
5434     gcc_unreachable ();
5435   else if (TREE_CODE (expr) == TARGET_EXPR)
5436     {
5437       /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5438          It's OK for this to happen if it was part of a subtree that
5439          isn't immediately expanded, such as operand 2 of another
5440          TARGET_EXPR.  */
5441       if (!TREE_OPERAND (expr, 1))
5442 	{
5443 	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5444 	  TREE_OPERAND (expr, 3) = NULL_TREE;
5445 	}
5446     }
5447   else if (TREE_CODE (expr) == OMP_CLAUSE)
5448     {
5449       /* Before the omplower pass completes, some OMP clauses can contain
5450 	 sequences that are neither copied by gimple_seq_copy nor walked by
5451 	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5452 	 in those situations, we have to copy and process them explicitely.  */
5453 
5454       if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5455 	{
5456 	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5457 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5458 	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5459 	}
5460       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5461 	{
5462 	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5463 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5464 	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5465 	}
5466       else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5467 	{
5468 	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5469 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5470 	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5471 	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5472 	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5473 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5474 	}
5475     }
5476 
5477   /* Keep iterating.  */
5478   return NULL_TREE;
5479 }
5480 
5481 
5482 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5483    Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5484    remaps all local declarations to appropriate replacements in gimple
5485    statements. */
5486 
5487 static tree
replace_locals_stmt(gimple_stmt_iterator * gsip,bool * handled_ops_p ATTRIBUTE_UNUSED,struct walk_stmt_info * wi)5488 replace_locals_stmt (gimple_stmt_iterator *gsip,
5489 		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5490 		     struct walk_stmt_info *wi)
5491 {
5492   copy_body_data *id = (copy_body_data *) wi->info;
5493   gimple *gs = gsi_stmt (*gsip);
5494 
5495   if (gbind *stmt = dyn_cast <gbind *> (gs))
5496     {
5497       tree block = gimple_bind_block (stmt);
5498 
5499       if (block)
5500 	{
5501 	  remap_block (&block, id);
5502 	  gimple_bind_set_block (stmt, block);
5503 	}
5504 
5505       /* This will remap a lot of the same decls again, but this should be
5506 	 harmless.  */
5507       if (gimple_bind_vars (stmt))
5508 	{
5509 	  tree old_var, decls = gimple_bind_vars (stmt);
5510 
5511 	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5512 	    if (!can_be_nonlocal (old_var, id)
5513 		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5514 	      remap_decl (old_var, id);
5515 
5516 	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5517 	  id->prevent_decl_creation_for_types = true;
5518 	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5519 	  id->prevent_decl_creation_for_types = false;
5520 	}
5521     }
5522 
5523   /* Keep iterating.  */
5524   return NULL_TREE;
5525 }
5526 
5527 /* Create a copy of SEQ and remap all decls in it.  */
5528 
5529 static gimple_seq
duplicate_remap_omp_clause_seq(gimple_seq seq,struct walk_stmt_info * wi)5530 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5531 {
5532   if (!seq)
5533     return NULL;
5534 
5535   /* If there are any labels in OMP sequences, they can be only referred to in
5536      the sequence itself and therefore we can do both here.  */
5537   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5538   gimple_seq copy = gimple_seq_copy (seq);
5539   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5540   return copy;
5541 }
5542 
5543 /* Copies everything in SEQ and replaces variables and labels local to
5544    current_function_decl.  */
5545 
5546 gimple_seq
copy_gimple_seq_and_replace_locals(gimple_seq seq)5547 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5548 {
5549   copy_body_data id;
5550   struct walk_stmt_info wi;
5551   gimple_seq copy;
5552 
5553   /* There's nothing to do for NULL_TREE.  */
5554   if (seq == NULL)
5555     return seq;
5556 
5557   /* Set up ID.  */
5558   memset (&id, 0, sizeof (id));
5559   id.src_fn = current_function_decl;
5560   id.dst_fn = current_function_decl;
5561   id.src_cfun = cfun;
5562   id.decl_map = new hash_map<tree, tree>;
5563   id.debug_map = NULL;
5564 
5565   id.copy_decl = copy_decl_no_change;
5566   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5567   id.transform_new_cfg = false;
5568   id.transform_return_to_modify = false;
5569   id.transform_parameter = false;
5570   id.transform_lang_insert_block = NULL;
5571 
5572   /* Walk the tree once to find local labels.  */
5573   memset (&wi, 0, sizeof (wi));
5574   hash_set<tree> visited;
5575   wi.info = &id;
5576   wi.pset = &visited;
5577   walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5578 
5579   copy = gimple_seq_copy (seq);
5580 
5581   /* Walk the copy, remapping decls.  */
5582   memset (&wi, 0, sizeof (wi));
5583   wi.info = &id;
5584   walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5585 
5586   /* Clean up.  */
5587   delete id.decl_map;
5588   if (id.debug_map)
5589     delete id.debug_map;
5590   if (id.dependence_map)
5591     {
5592       delete id.dependence_map;
5593       id.dependence_map = NULL;
5594     }
5595 
5596   return copy;
5597 }
5598 
5599 
5600 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5601 
5602 static tree
debug_find_tree_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)5603 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5604 {
5605   if (*tp == data)
5606     return (tree) data;
5607   else
5608     return NULL;
5609 }
5610 
5611 DEBUG_FUNCTION bool
debug_find_tree(tree top,tree search)5612 debug_find_tree (tree top, tree search)
5613 {
5614   return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5615 }
5616 
5617 
5618 /* Declare the variables created by the inliner.  Add all the variables in
5619    VARS to BIND_EXPR.  */
5620 
5621 static void
declare_inline_vars(tree block,tree vars)5622 declare_inline_vars (tree block, tree vars)
5623 {
5624   tree t;
5625   for (t = vars; t; t = DECL_CHAIN (t))
5626     {
5627       DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5628       gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5629       add_local_decl (cfun, t);
5630     }
5631 
5632   if (block)
5633     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5634 }
5635 
5636 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5637    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5638    VAR_DECL translation.  */
5639 
5640 tree
copy_decl_for_dup_finish(copy_body_data * id,tree decl,tree copy)5641 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5642 {
5643   /* Don't generate debug information for the copy if we wouldn't have
5644      generated it for the copy either.  */
5645   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5646   DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5647 
5648   /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5649      declaration inspired this copy.  */
5650   DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5651 
5652   /* The new variable/label has no RTL, yet.  */
5653   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5654       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5655     SET_DECL_RTL (copy, 0);
5656   /* For vector typed decls make sure to update DECL_MODE according
5657      to the new function context.  */
5658   if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5659     SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5660 
5661   /* These args would always appear unused, if not for this.  */
5662   TREE_USED (copy) = 1;
5663 
5664   /* Set the context for the new declaration.  */
5665   if (!DECL_CONTEXT (decl))
5666     /* Globals stay global.  */
5667     ;
5668   else if (DECL_CONTEXT (decl) != id->src_fn)
5669     /* Things that weren't in the scope of the function we're inlining
5670        from aren't in the scope we're inlining to, either.  */
5671     ;
5672   else if (TREE_STATIC (decl))
5673     /* Function-scoped static variables should stay in the original
5674        function.  */
5675     ;
5676   else
5677     {
5678       /* Ordinary automatic local variables are now in the scope of the
5679 	 new function.  */
5680       DECL_CONTEXT (copy) = id->dst_fn;
5681       if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5682 	{
5683 	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5684 	    DECL_ATTRIBUTES (copy)
5685 	      = tree_cons (get_identifier ("omp simt private"), NULL,
5686 			   DECL_ATTRIBUTES (copy));
5687 	  id->dst_simt_vars->safe_push (copy);
5688 	}
5689     }
5690 
5691   return copy;
5692 }
5693 
5694 static tree
copy_decl_to_var(tree decl,copy_body_data * id)5695 copy_decl_to_var (tree decl, copy_body_data *id)
5696 {
5697   tree copy, type;
5698 
5699   gcc_assert (TREE_CODE (decl) == PARM_DECL
5700 	      || TREE_CODE (decl) == RESULT_DECL);
5701 
5702   type = TREE_TYPE (decl);
5703 
5704   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5705 		     VAR_DECL, DECL_NAME (decl), type);
5706   if (DECL_PT_UID_SET_P (decl))
5707     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5708   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5709   TREE_READONLY (copy) = TREE_READONLY (decl);
5710   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5711   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5712 
5713   return copy_decl_for_dup_finish (id, decl, copy);
5714 }
5715 
5716 /* Like copy_decl_to_var, but create a return slot object instead of a
5717    pointer variable for return by invisible reference.  */
5718 
5719 static tree
copy_result_decl_to_var(tree decl,copy_body_data * id)5720 copy_result_decl_to_var (tree decl, copy_body_data *id)
5721 {
5722   tree copy, type;
5723 
5724   gcc_assert (TREE_CODE (decl) == PARM_DECL
5725 	      || TREE_CODE (decl) == RESULT_DECL);
5726 
5727   type = TREE_TYPE (decl);
5728   if (DECL_BY_REFERENCE (decl))
5729     type = TREE_TYPE (type);
5730 
5731   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5732 		     VAR_DECL, DECL_NAME (decl), type);
5733   if (DECL_PT_UID_SET_P (decl))
5734     SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5735   TREE_READONLY (copy) = TREE_READONLY (decl);
5736   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5737   if (!DECL_BY_REFERENCE (decl))
5738     {
5739       TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5740       DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5741     }
5742 
5743   return copy_decl_for_dup_finish (id, decl, copy);
5744 }
5745 
5746 tree
copy_decl_no_change(tree decl,copy_body_data * id)5747 copy_decl_no_change (tree decl, copy_body_data *id)
5748 {
5749   tree copy;
5750 
5751   copy = copy_node (decl);
5752 
5753   /* The COPY is not abstract; it will be generated in DST_FN.  */
5754   DECL_ABSTRACT_P (copy) = false;
5755   lang_hooks.dup_lang_specific_decl (copy);
5756 
5757   /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5758      been taken; it's for internal bookkeeping in expand_goto_internal.  */
5759   if (TREE_CODE (copy) == LABEL_DECL)
5760     {
5761       TREE_ADDRESSABLE (copy) = 0;
5762       LABEL_DECL_UID (copy) = -1;
5763     }
5764 
5765   return copy_decl_for_dup_finish (id, decl, copy);
5766 }
5767 
5768 static tree
copy_decl_maybe_to_var(tree decl,copy_body_data * id)5769 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5770 {
5771   if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5772     return copy_decl_to_var (decl, id);
5773   else
5774     return copy_decl_no_change (decl, id);
5775 }
5776 
5777 /* Return a copy of the function's argument tree.  */
5778 static tree
copy_arguments_for_versioning(tree orig_parm,copy_body_data * id,bitmap args_to_skip,tree * vars)5779 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5780 			       bitmap args_to_skip, tree *vars)
5781 {
5782   tree arg, *parg;
5783   tree new_parm = NULL;
5784   int i = 0;
5785 
5786   parg = &new_parm;
5787 
5788   for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5789     if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5790       {
5791         tree new_tree = remap_decl (arg, id);
5792 	if (TREE_CODE (new_tree) != PARM_DECL)
5793 	  new_tree = id->copy_decl (arg, id);
5794         lang_hooks.dup_lang_specific_decl (new_tree);
5795         *parg = new_tree;
5796 	parg = &DECL_CHAIN (new_tree);
5797       }
5798     else if (!id->decl_map->get (arg))
5799       {
5800 	/* Make an equivalent VAR_DECL.  If the argument was used
5801 	   as temporary variable later in function, the uses will be
5802 	   replaced by local variable.  */
5803 	tree var = copy_decl_to_var (arg, id);
5804 	insert_decl_map (id, arg, var);
5805         /* Declare this new variable.  */
5806         DECL_CHAIN (var) = *vars;
5807         *vars = var;
5808       }
5809   return new_parm;
5810 }
5811 
5812 /* Return a copy of the function's static chain.  */
5813 static tree
copy_static_chain(tree static_chain,copy_body_data * id)5814 copy_static_chain (tree static_chain, copy_body_data * id)
5815 {
5816   tree *chain_copy, *pvar;
5817 
5818   chain_copy = &static_chain;
5819   for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5820     {
5821       tree new_tree = remap_decl (*pvar, id);
5822       lang_hooks.dup_lang_specific_decl (new_tree);
5823       DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5824       *pvar = new_tree;
5825     }
5826   return static_chain;
5827 }
5828 
5829 /* Return true if the function is allowed to be versioned.
5830    This is a guard for the versioning functionality.  */
5831 
5832 bool
tree_versionable_function_p(tree fndecl)5833 tree_versionable_function_p (tree fndecl)
5834 {
5835   return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5836 	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5837 }
5838 
5839 /* Delete all unreachable basic blocks and update callgraph.
5840    Doing so is somewhat nontrivial because we need to update all clones and
5841    remove inline function that become unreachable.  */
5842 
5843 static bool
delete_unreachable_blocks_update_callgraph(copy_body_data * id)5844 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5845 {
5846   bool changed = false;
5847   basic_block b, next_bb;
5848 
5849   find_unreachable_blocks ();
5850 
5851   /* Delete all unreachable basic blocks.  */
5852 
5853   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5854        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5855     {
5856       next_bb = b->next_bb;
5857 
5858       if (!(b->flags & BB_REACHABLE))
5859 	{
5860           gimple_stmt_iterator bsi;
5861 
5862           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5863 	    {
5864 	      struct cgraph_edge *e;
5865 	      struct cgraph_node *node;
5866 
5867 	      id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5868 
5869 	      if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5870 		  &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5871 		{
5872 		  if (!e->inline_failed)
5873 		    e->callee->remove_symbol_and_inline_clones (id->dst_node);
5874 		  else
5875 		    e->remove ();
5876 		}
5877 	      if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5878 		  && id->dst_node->clones)
5879 		for (node = id->dst_node->clones; node != id->dst_node;)
5880 		  {
5881 		    node->remove_stmt_references (gsi_stmt (bsi));
5882 		    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5883 			&& (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5884 		      {
5885 			if (!e->inline_failed)
5886 			  e->callee->remove_symbol_and_inline_clones (id->dst_node);
5887 			else
5888 			  e->remove ();
5889 		      }
5890 
5891 		    if (node->clones)
5892 		      node = node->clones;
5893 		    else if (node->next_sibling_clone)
5894 		      node = node->next_sibling_clone;
5895 		    else
5896 		      {
5897 			while (node != id->dst_node && !node->next_sibling_clone)
5898 			  node = node->clone_of;
5899 			if (node != id->dst_node)
5900 			  node = node->next_sibling_clone;
5901 		      }
5902 		  }
5903 	    }
5904 	  delete_basic_block (b);
5905 	  changed = true;
5906 	}
5907     }
5908 
5909   return changed;
5910 }
5911 
5912 /* Update clone info after duplication.  */
5913 
5914 static void
update_clone_info(copy_body_data * id)5915 update_clone_info (copy_body_data * id)
5916 {
5917   struct cgraph_node *node;
5918   if (!id->dst_node->clones)
5919     return;
5920   for (node = id->dst_node->clones; node != id->dst_node;)
5921     {
5922       /* First update replace maps to match the new body.  */
5923       if (node->clone.tree_map)
5924         {
5925 	  unsigned int i;
5926           for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5927 	    {
5928 	      struct ipa_replace_map *replace_info;
5929 	      replace_info = (*node->clone.tree_map)[i];
5930 	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5931 	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5932 	    }
5933 	}
5934       if (node->clones)
5935 	node = node->clones;
5936       else if (node->next_sibling_clone)
5937 	node = node->next_sibling_clone;
5938       else
5939 	{
5940 	  while (node != id->dst_node && !node->next_sibling_clone)
5941 	    node = node->clone_of;
5942 	  if (node != id->dst_node)
5943 	    node = node->next_sibling_clone;
5944 	}
5945     }
5946 }
5947 
5948 /* Create a copy of a function's tree.
5949    OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5950    of the original function and the new copied function
5951    respectively.  In case we want to replace a DECL
5952    tree with another tree while duplicating the function's
5953    body, TREE_MAP represents the mapping between these
5954    trees. If UPDATE_CLONES is set, the call_stmt fields
5955    of edges of clones of the function will be updated.
5956 
5957    If non-NULL ARGS_TO_SKIP determine function parameters to remove
5958    from new version.
5959    If SKIP_RETURN is true, the new version will return void.
5960    If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5961    If non_NULL NEW_ENTRY determine new entry BB of the clone.
5962 */
5963 void
tree_function_versioning(tree old_decl,tree new_decl,vec<ipa_replace_map *,va_gc> * tree_map,bool update_clones,bitmap args_to_skip,bool skip_return,bitmap blocks_to_copy,basic_block new_entry)5964 tree_function_versioning (tree old_decl, tree new_decl,
5965 			  vec<ipa_replace_map *, va_gc> *tree_map,
5966 			  bool update_clones, bitmap args_to_skip,
5967 			  bool skip_return, bitmap blocks_to_copy,
5968 			  basic_block new_entry)
5969 {
5970   struct cgraph_node *old_version_node;
5971   struct cgraph_node *new_version_node;
5972   copy_body_data id;
5973   tree p;
5974   unsigned i;
5975   struct ipa_replace_map *replace_info;
5976   basic_block old_entry_block, bb;
5977   auto_vec<gimple *, 10> init_stmts;
5978   tree vars = NULL_TREE;
5979   bitmap debug_args_to_skip = args_to_skip;
5980 
5981   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5982 	      && TREE_CODE (new_decl) == FUNCTION_DECL);
5983   DECL_POSSIBLY_INLINED (old_decl) = 1;
5984 
5985   old_version_node = cgraph_node::get (old_decl);
5986   gcc_checking_assert (old_version_node);
5987   new_version_node = cgraph_node::get (new_decl);
5988   gcc_checking_assert (new_version_node);
5989 
5990   /* Copy over debug args.  */
5991   if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5992     {
5993       vec<tree, va_gc> **new_debug_args, **old_debug_args;
5994       gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5995       DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5996       old_debug_args = decl_debug_args_lookup (old_decl);
5997       if (old_debug_args)
5998 	{
5999 	  new_debug_args = decl_debug_args_insert (new_decl);
6000 	  *new_debug_args = vec_safe_copy (*old_debug_args);
6001 	}
6002     }
6003 
6004   /* Output the inlining info for this abstract function, since it has been
6005      inlined.  If we don't do this now, we can lose the information about the
6006      variables in the function when the blocks get blown away as soon as we
6007      remove the cgraph node.  */
6008   (*debug_hooks->outlining_inline_function) (old_decl);
6009 
6010   DECL_ARTIFICIAL (new_decl) = 1;
6011   DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6012   if (DECL_ORIGIN (old_decl) == old_decl)
6013     old_version_node->used_as_abstract_origin = true;
6014   DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6015 
6016   /* Prepare the data structures for the tree copy.  */
6017   memset (&id, 0, sizeof (id));
6018 
6019   /* Generate a new name for the new version. */
6020   id.statements_to_fold = new hash_set<gimple *>;
6021 
6022   id.decl_map = new hash_map<tree, tree>;
6023   id.debug_map = NULL;
6024   id.src_fn = old_decl;
6025   id.dst_fn = new_decl;
6026   id.src_node = old_version_node;
6027   id.dst_node = new_version_node;
6028   id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6029   id.blocks_to_copy = blocks_to_copy;
6030 
6031   id.copy_decl = copy_decl_no_change;
6032   id.transform_call_graph_edges
6033     = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6034   id.transform_new_cfg = true;
6035   id.transform_return_to_modify = false;
6036   id.transform_parameter = false;
6037   id.transform_lang_insert_block = NULL;
6038 
6039   old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6040     (DECL_STRUCT_FUNCTION (old_decl));
6041   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6042   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6043   initialize_cfun (new_decl, old_decl,
6044 		   new_entry ? new_entry->count : old_entry_block->count);
6045   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6046     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6047       = id.src_cfun->gimple_df->ipa_pta;
6048 
6049   /* Copy the function's static chain.  */
6050   p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6051   if (p)
6052     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6053       = copy_static_chain (p, &id);
6054 
6055   /* If there's a tree_map, prepare for substitution.  */
6056   if (tree_map)
6057     for (i = 0; i < tree_map->length (); i++)
6058       {
6059 	gimple *init;
6060 	replace_info = (*tree_map)[i];
6061 	if (replace_info->replace_p)
6062 	  {
6063 	    int parm_num = -1;
6064 	    if (!replace_info->old_tree)
6065 	      {
6066 		int p = replace_info->parm_num;
6067 		tree parm;
6068 		tree req_type, new_type;
6069 
6070 		for (parm = DECL_ARGUMENTS (old_decl); p;
6071 		     parm = DECL_CHAIN (parm))
6072 		  p--;
6073 		replace_info->old_tree = parm;
6074 		parm_num = replace_info->parm_num;
6075 		req_type = TREE_TYPE (parm);
6076 		new_type = TREE_TYPE (replace_info->new_tree);
6077 		if (!useless_type_conversion_p (req_type, new_type))
6078 		  {
6079 		    if (fold_convertible_p (req_type, replace_info->new_tree))
6080 		      replace_info->new_tree
6081 			= fold_build1 (NOP_EXPR, req_type,
6082 				       replace_info->new_tree);
6083 		    else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6084 		      replace_info->new_tree
6085 			= fold_build1 (VIEW_CONVERT_EXPR, req_type,
6086 				       replace_info->new_tree);
6087 		    else
6088 		      {
6089 			if (dump_file)
6090 			  {
6091 			    fprintf (dump_file, "    const ");
6092 			    print_generic_expr (dump_file,
6093 						replace_info->new_tree);
6094 			    fprintf (dump_file,
6095 				     "  can't be converted to param ");
6096 			    print_generic_expr (dump_file, parm);
6097 			    fprintf (dump_file, "\n");
6098 			  }
6099 			replace_info->old_tree = NULL;
6100 		      }
6101 		  }
6102 	      }
6103 	    else
6104 	      gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6105 	    if (replace_info->old_tree)
6106 	      {
6107 		init = setup_one_parameter (&id, replace_info->old_tree,
6108 					    replace_info->new_tree, id.src_fn,
6109 					    NULL,
6110 					    &vars);
6111 		if (init)
6112 		  init_stmts.safe_push (init);
6113 		if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6114 		  {
6115 		    if (parm_num == -1)
6116 		      {
6117 			tree parm;
6118 			int p;
6119 			for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6120 			     parm = DECL_CHAIN (parm), p++)
6121 			  if (parm == replace_info->old_tree)
6122 			    {
6123 			      parm_num = p;
6124 			      break;
6125 			    }
6126 		      }
6127 		    if (parm_num != -1)
6128 		      {
6129 			if (debug_args_to_skip == args_to_skip)
6130 			  {
6131 			    debug_args_to_skip = BITMAP_ALLOC (NULL);
6132 			    bitmap_copy (debug_args_to_skip, args_to_skip);
6133 			  }
6134 			bitmap_clear_bit (debug_args_to_skip, parm_num);
6135 		      }
6136 		  }
6137 	      }
6138 	  }
6139       }
6140   /* Copy the function's arguments.  */
6141   if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6142     DECL_ARGUMENTS (new_decl)
6143       = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6144 				       args_to_skip, &vars);
6145 
6146   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6147   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6148 
6149   declare_inline_vars (DECL_INITIAL (new_decl), vars);
6150 
6151   if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6152     /* Add local vars.  */
6153     add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6154 
6155   if (DECL_RESULT (old_decl) == NULL_TREE)
6156     ;
6157   else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6158     {
6159       DECL_RESULT (new_decl)
6160 	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6161 		      RESULT_DECL, NULL_TREE, void_type_node);
6162       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6163       cfun->returns_struct = 0;
6164       cfun->returns_pcc_struct = 0;
6165     }
6166   else
6167     {
6168       tree old_name;
6169       DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6170       lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6171       if (gimple_in_ssa_p (id.src_cfun)
6172 	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6173 	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6174 	{
6175 	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6176 	  insert_decl_map (&id, old_name, new_name);
6177 	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6178 	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6179 	}
6180     }
6181 
6182   /* Set up the destination functions loop tree.  */
6183   if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6184     {
6185       cfun->curr_properties &= ~PROP_loops;
6186       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6187       cfun->curr_properties |= PROP_loops;
6188     }
6189 
6190   /* Copy the Function's body.  */
6191   copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6192 	     new_entry);
6193 
6194   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6195   number_blocks (new_decl);
6196 
6197   /* We want to create the BB unconditionally, so that the addition of
6198      debug stmts doesn't affect BB count, which may in the end cause
6199      codegen differences.  */
6200   bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6201   while (init_stmts.length ())
6202     insert_init_stmt (&id, bb, init_stmts.pop ());
6203   update_clone_info (&id);
6204 
6205   /* Remap the nonlocal_goto_save_area, if any.  */
6206   if (cfun->nonlocal_goto_save_area)
6207     {
6208       struct walk_stmt_info wi;
6209 
6210       memset (&wi, 0, sizeof (wi));
6211       wi.info = &id;
6212       walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6213     }
6214 
6215   /* Clean up.  */
6216   delete id.decl_map;
6217   if (id.debug_map)
6218     delete id.debug_map;
6219   free_dominance_info (CDI_DOMINATORS);
6220   free_dominance_info (CDI_POST_DOMINATORS);
6221 
6222   update_max_bb_count ();
6223   fold_marked_statements (0, id.statements_to_fold);
6224   delete id.statements_to_fold;
6225   delete_unreachable_blocks_update_callgraph (&id);
6226   if (id.dst_node->definition)
6227     cgraph_edge::rebuild_references ();
6228   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6229     {
6230       calculate_dominance_info (CDI_DOMINATORS);
6231       fix_loop_structure (NULL);
6232     }
6233   update_ssa (TODO_update_ssa);
6234 
6235   /* After partial cloning we need to rescale frequencies, so they are
6236      within proper range in the cloned function.  */
6237   if (new_entry)
6238     {
6239       struct cgraph_edge *e;
6240       rebuild_frequencies ();
6241 
6242       new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6243       for (e = new_version_node->callees; e; e = e->next_callee)
6244 	{
6245 	  basic_block bb = gimple_bb (e->call_stmt);
6246 	  e->count = bb->count;
6247 	}
6248       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6249 	{
6250 	  basic_block bb = gimple_bb (e->call_stmt);
6251 	  e->count = bb->count;
6252 	}
6253     }
6254 
6255   if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6256     {
6257       tree parm;
6258       vec<tree, va_gc> **debug_args = NULL;
6259       unsigned int len = 0;
6260       for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6261 	   parm; parm = DECL_CHAIN (parm), i++)
6262 	if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6263 	  {
6264 	    tree ddecl;
6265 
6266 	    if (debug_args == NULL)
6267 	      {
6268 		debug_args = decl_debug_args_insert (new_decl);
6269 		len = vec_safe_length (*debug_args);
6270 	      }
6271 	    ddecl = make_node (DEBUG_EXPR_DECL);
6272 	    DECL_ARTIFICIAL (ddecl) = 1;
6273 	    TREE_TYPE (ddecl) = TREE_TYPE (parm);
6274 	    SET_DECL_MODE (ddecl, DECL_MODE (parm));
6275 	    vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6276 	    vec_safe_push (*debug_args, ddecl);
6277 	  }
6278       if (debug_args != NULL)
6279 	{
6280 	  /* On the callee side, add
6281 	     DEBUG D#Y s=> parm
6282 	     DEBUG var => D#Y
6283 	     stmts to the first bb where var is a VAR_DECL created for the
6284 	     optimized away parameter in DECL_INITIAL block.  This hints
6285 	     in the debug info that var (whole DECL_ORIGIN is the parm
6286 	     PARM_DECL) is optimized away, but could be looked up at the
6287 	     call site as value of D#X there.  */
6288 	  tree var = vars, vexpr;
6289 	  gimple_stmt_iterator cgsi
6290 	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6291 	  gimple *def_temp;
6292 	  var = vars;
6293 	  i = vec_safe_length (*debug_args);
6294 	  do
6295 	    {
6296 	      i -= 2;
6297 	      while (var != NULL_TREE
6298 		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6299 		var = TREE_CHAIN (var);
6300 	      if (var == NULL_TREE)
6301 		break;
6302 	      vexpr = make_node (DEBUG_EXPR_DECL);
6303 	      parm = (**debug_args)[i];
6304 	      DECL_ARTIFICIAL (vexpr) = 1;
6305 	      TREE_TYPE (vexpr) = TREE_TYPE (parm);
6306 	      SET_DECL_MODE (vexpr, DECL_MODE (parm));
6307 	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6308 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6309 	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6310 	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6311 	    }
6312 	  while (i > len);
6313 	}
6314     }
6315 
6316   if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6317     BITMAP_FREE (debug_args_to_skip);
6318   free_dominance_info (CDI_DOMINATORS);
6319   free_dominance_info (CDI_POST_DOMINATORS);
6320 
6321   gcc_assert (!id.debug_stmts.exists ());
6322   pop_cfun ();
6323   return;
6324 }
6325 
6326 /* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6327    the callee and return the inlined body on success.  */
6328 
6329 tree
maybe_inline_call_in_expr(tree exp)6330 maybe_inline_call_in_expr (tree exp)
6331 {
6332   tree fn = get_callee_fndecl (exp);
6333 
6334   /* We can only try to inline "const" functions.  */
6335   if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6336     {
6337       call_expr_arg_iterator iter;
6338       copy_body_data id;
6339       tree param, arg, t;
6340       hash_map<tree, tree> decl_map;
6341 
6342       /* Remap the parameters.  */
6343       for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6344 	   param;
6345 	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6346 	decl_map.put (param, arg);
6347 
6348       memset (&id, 0, sizeof (id));
6349       id.src_fn = fn;
6350       id.dst_fn = current_function_decl;
6351       id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6352       id.decl_map = &decl_map;
6353 
6354       id.copy_decl = copy_decl_no_change;
6355       id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6356       id.transform_new_cfg = false;
6357       id.transform_return_to_modify = true;
6358       id.transform_parameter = true;
6359       id.transform_lang_insert_block = NULL;
6360 
6361       /* Make sure not to unshare trees behind the front-end's back
6362 	 since front-end specific mechanisms may rely on sharing.  */
6363       id.regimplify = false;
6364       id.do_not_unshare = true;
6365 
6366       /* We're not inside any EH region.  */
6367       id.eh_lp_nr = 0;
6368 
6369       t = copy_tree_body (&id);
6370 
6371       /* We can only return something suitable for use in a GENERIC
6372 	 expression tree.  */
6373       if (TREE_CODE (t) == MODIFY_EXPR)
6374 	return TREE_OPERAND (t, 1);
6375     }
6376 
6377    return NULL_TREE;
6378 }
6379 
6380 /* Duplicate a type, fields and all.  */
6381 
6382 tree
build_duplicate_type(tree type)6383 build_duplicate_type (tree type)
6384 {
6385   struct copy_body_data id;
6386 
6387   memset (&id, 0, sizeof (id));
6388   id.src_fn = current_function_decl;
6389   id.dst_fn = current_function_decl;
6390   id.src_cfun = cfun;
6391   id.decl_map = new hash_map<tree, tree>;
6392   id.debug_map = NULL;
6393   id.copy_decl = copy_decl_no_change;
6394 
6395   type = remap_type_1 (type, &id);
6396 
6397   delete id.decl_map;
6398   if (id.debug_map)
6399     delete id.debug_map;
6400 
6401   TYPE_CANONICAL (type) = type;
6402 
6403   return type;
6404 }
6405 
6406 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6407    parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6408    evaluation.  */
6409 
6410 tree
copy_fn(tree fn,tree & parms,tree & result)6411 copy_fn (tree fn, tree& parms, tree& result)
6412 {
6413   copy_body_data id;
6414   tree param;
6415   hash_map<tree, tree> decl_map;
6416 
6417   tree *p = &parms;
6418   *p = NULL_TREE;
6419 
6420   memset (&id, 0, sizeof (id));
6421   id.src_fn = fn;
6422   id.dst_fn = current_function_decl;
6423   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6424   id.decl_map = &decl_map;
6425 
6426   id.copy_decl = copy_decl_no_change;
6427   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6428   id.transform_new_cfg = false;
6429   id.transform_return_to_modify = false;
6430   id.transform_parameter = true;
6431   id.transform_lang_insert_block = NULL;
6432 
6433   /* Make sure not to unshare trees behind the front-end's back
6434      since front-end specific mechanisms may rely on sharing.  */
6435   id.regimplify = false;
6436   id.do_not_unshare = true;
6437 
6438   /* We're not inside any EH region.  */
6439   id.eh_lp_nr = 0;
6440 
6441   /* Remap the parameters and result and return them to the caller.  */
6442   for (param = DECL_ARGUMENTS (fn);
6443        param;
6444        param = DECL_CHAIN (param))
6445     {
6446       *p = remap_decl (param, &id);
6447       p = &DECL_CHAIN (*p);
6448     }
6449 
6450   if (DECL_RESULT (fn))
6451     result = remap_decl (DECL_RESULT (fn), &id);
6452   else
6453     result = NULL_TREE;
6454 
6455   return copy_tree_body (&id);
6456 }
6457